From f0ac1da95993614918269ee0984b4eaad2f45325 Mon Sep 17 00:00:00 2001 From: Parth Bansal Date: Wed, 3 Jul 2024 10:29:47 +0200 Subject: [PATCH 001/136] fix windows path (#660) (#673) ## Changes Changed `pathlib.Path` with the `pathlib.PurePosixPath` in `/databricks/sdk/mixins/files.py` which always use linux path separators regardless of the OS that it is running on. Fixes (#660) ## Tests - [x] `make test` run locally - [x] `make fmt` applied - [ ] relevant integration tests applied --- Makefile | 4 ++++ databricks/sdk/mixins/files.py | 16 ++++++++++++---- tests/test_dbfs_mixins.py | 30 ++++++++++++++++++++++++++++++ 3 files changed, 46 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index 9f6de61aa..eb8fe8397 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,10 @@ dev: python3 -m venv .venv +ifeq ($(OS), Windows_NT) + .venv\Scripts\activate +else . .venv/bin/activate +endif pip install '.[dev]' install: diff --git a/databricks/sdk/mixins/files.py b/databricks/sdk/mixins/files.py index 3295aef7a..1e109a1a7 100644 --- a/databricks/sdk/mixins/files.py +++ b/databricks/sdk/mixins/files.py @@ -3,6 +3,7 @@ import base64 import os import pathlib +import platform import shutil import sys from abc import ABC, abstractmethod @@ -266,8 +267,9 @@ def __repr__(self) -> str: class _Path(ABC): - def __init__(self, path: str): - self._path = pathlib.Path(str(path).replace('dbfs:', '').replace('file:', '')) + @abstractmethod + def __init__(self): + ... @property def is_local(self) -> bool: @@ -327,6 +329,12 @@ def as_string(self) -> str: class _LocalPath(_Path): + def __init__(self, path: str): + if platform.system() == "Windows": + self._path = pathlib.Path(str(path).replace('file:///', '').replace('file:', '')) + else: + self._path = pathlib.Path(str(path).replace('file:', '')) + def _is_local(self) -> bool: return True @@ -393,7 +401,7 @@ def __repr__(self) -> str: class _VolumesPath(_Path): def __init__(self, api: files.FilesAPI, src: Union[str, pathlib.Path]): - super().__init__(src) + self._path = pathlib.PurePosixPath(str(src).replace('dbfs:', '').replace('file:', '')) self._api = api def _is_local(self) -> bool: @@ -462,7 +470,7 @@ def __repr__(self) -> str: class _DbfsPath(_Path): def __init__(self, api: files.DbfsAPI, src: str): - super().__init__(src) + self._path = pathlib.PurePosixPath(str(src).replace('dbfs:', '').replace('file:', '')) self._api = api def _is_local(self) -> bool: diff --git a/tests/test_dbfs_mixins.py b/tests/test_dbfs_mixins.py index 427c445fd..6bbaca7a2 100644 --- a/tests/test_dbfs_mixins.py +++ b/tests/test_dbfs_mixins.py @@ -70,3 +70,33 @@ def test_fs_path_invalid(config): with pytest.raises(ValueError) as e: dbfs_ext._path('s3://path/to/file') assert 'unsupported scheme "s3"' in str(e.value) + + +def test_dbfs_local_path_mkdir(config, tmp_path): + from databricks.sdk import WorkspaceClient + + w = WorkspaceClient(config=config) + w.dbfs._path(f'file:{tmp_path}/test_dir').mkdir() + assert w.dbfs.exists(f'file:{tmp_path}/test_dir') + + +def test_dbfs_exists(config, mocker): + from databricks.sdk import WorkspaceClient + + get_status = mocker.patch('databricks.sdk.service.files.DbfsAPI.get_status', side_effect=NotFound()) + + client = WorkspaceClient(config=config) + client.dbfs.exists('/abc/def/ghi') + + get_status.assert_called_with('/abc/def/ghi') + + +def test_volume_exists(config, mocker): + from databricks.sdk import WorkspaceClient + + get_metadata = mocker.patch('databricks.sdk.service.files.FilesAPI.get_metadata') + + client = WorkspaceClient(config=config) + client.dbfs.exists('/Volumes/abc/def/ghi') + + get_metadata.assert_called_with('/Volumes/abc/def/ghi') From febf2fd5959fbab2ade2ef61d12797e72b69a76e Mon Sep 17 00:00:00 2001 From: Parth Bansal Date: Wed, 3 Jul 2024 10:31:53 +0200 Subject: [PATCH 002/136] Check trailing slash in host url (#681) ## Changes Added a check for trailing slash in the host url. Fixes (#661) ## Tests - [ ] `make test` run locally - [ ] `make fmt` applied - [ ] relevant integration tests applied --------- Signed-off-by: Parth Bansal --- databricks/sdk/config.py | 19 +++++++++++++------ tests/test_config.py | 13 +++++++++++++ 2 files changed, 26 insertions(+), 6 deletions(-) diff --git a/databricks/sdk/config.py b/databricks/sdk/config.py index ca1c2cfc1..0802b831a 100644 --- a/databricks/sdk/config.py +++ b/databricks/sdk/config.py @@ -403,13 +403,20 @@ def attributes(cls) -> Iterable[ConfigAttribute]: def _fix_host_if_needed(self): if not self.host: return - # fix url to remove trailing slash + + # Add a default scheme if it's missing + if '://' not in self.host: + self.host = 'https://' + self.host + o = urllib.parse.urlparse(self.host) - if not o.hostname: - # only hostname is specified - self.host = f"https://{self.host}" - else: - self.host = f"{o.scheme}://{o.netloc}" + # remove trailing slash + path = o.path.rstrip('/') + # remove port if 443 + netloc = o.netloc + if o.port == 443: + netloc = netloc.split(':')[0] + + self.host = urllib.parse.urlunparse((o.scheme, netloc, path, o.params, o.query, o.fragment)) def _set_inner_config(self, keyword_args: Dict[str, any]): for attr in self.attributes(): diff --git a/tests/test_config.py b/tests/test_config.py index 4b6c05638..fc6a29f1b 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -1,5 +1,7 @@ import platform +import pytest + from databricks.sdk.config import Config, with_product, with_user_agent_extra from databricks.sdk.version import __version__ @@ -20,6 +22,17 @@ def test_config_supports_legacy_credentials_provider(): assert c2._product_version == '1.2.3' +@pytest.mark.parametrize('host,expected', [("https://abc.def.ghi", "https://abc.def.ghi"), + ("https://abc.def.ghi/", "https://abc.def.ghi"), + ("abc.def.ghi", "https://abc.def.ghi"), + ("abc.def.ghi/", "https://abc.def.ghi"), + ("https://abc.def.ghi:443", "https://abc.def.ghi"), + ("abc.def.ghi:443", "https://abc.def.ghi")]) +def test_config_host_url_format_check(mocker, host, expected): + mocker.patch('databricks.sdk.config.Config.init_auth') + assert Config(host=host).host == expected + + def test_extra_and_upstream_user_agent(monkeypatch): class MockUname: From 6fd28ba58d16504fa83407f02b2c8a956079a16f Mon Sep 17 00:00:00 2001 From: Parth Bansal Date: Wed, 3 Jul 2024 16:48:52 +0200 Subject: [PATCH 003/136] Add Windows WorkFlow (#692) ## Changes Changed workflow such that tests will run on windows. ## Tests - [ ] `make test` run locally - [ ] `make fmt` applied - [ ] relevant integration tests applied --- .github/workflows/push.yml | 18 ++++++++++++++++++ .github/workflows/test.yml | 35 +++++++++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+) create mode 100644 .github/workflows/test.yml diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index ef4b64486..10379d74e 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -32,6 +32,24 @@ jobs: env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + tests-ubuntu: + uses: ./.github/workflows/test.yml + strategy: + matrix: + pyVersion: [ '3.7', '3.8', '3.9', '3.10', '3.11', '3.12' ] + with: + os: ubuntu-latest + pyVersion: ${{ matrix.pyVersion }} + + tests-windows: + uses: ./.github/workflows/test.yml + strategy: + matrix: + pyVersion: [ '3.9', '3.10', '3.11', '3.12' ] + with: + os: windows-latest + pyVersion: ${{ matrix.pyVersion }} + fmt: runs-on: ubuntu-latest diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 000000000..bb86e38a3 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,35 @@ +name: Test Workflow + +on: + workflow_call: + inputs: + os: + required: true + type: string + pyVersion: + required: true + type: string + +jobs: + test: + strategy: + fail-fast: false + runs-on: ${{ inputs.os }} + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Unshallow + run: git fetch --prune --unshallow + + - uses: actions/setup-python@v4 + with: + python-version: ${{ inputs.pyVersion }} + + - name: Run tests + run: make dev install test + + - name: Publish test coverage + uses: codecov/codecov-action@v4 + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} \ No newline at end of file From 9931e684b7126ada9c5940e6aac39f65756cc1e3 Mon Sep 17 00:00:00 2001 From: Parth Bansal Date: Wed, 3 Jul 2024 16:58:20 +0200 Subject: [PATCH 004/136] Remove duplicate ubuntu tests (#693) ## Changes Remove duplicate ubuntu tests ## Tests - [ ] `make test` run locally - [ ] `make fmt` applied - [ ] relevant integration tests applied --- .github/workflows/push.yml | 25 ------------------------- 1 file changed, 25 deletions(-) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 10379d74e..297be4c8e 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -7,31 +7,6 @@ on: types: [checks_requested] jobs: - tests: - strategy: - fail-fast: false - matrix: - pyVersion: [ '3.7', '3.8', '3.9', '3.10', '3.11', '3.12' ] - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v2 - - - name: Unshallow - run: git fetch --prune --unshallow - - - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.pyVersion }} - - - name: Run tests - run: make dev install test - - - name: Publish test coverage - uses: codecov/codecov-action@v4 - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - tests-ubuntu: uses: ./.github/workflows/test.yml strategy: From 7a49922224752654b2497e00cd5618cd9a7b2a17 Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Thu, 4 Jul 2024 17:31:16 +0200 Subject: [PATCH 005/136] Support partners in SDK (#648) ## Changes Ports https://github.com/databricks/databricks-sdk-go/pull/925 to the Python SDK. Partners of Databricks need a mechanism to register themselves in libraries or applications that they write. In this way, requests made by users of those libraries will include sufficient information to link those requests to the original users. This PR adds a new `useragent` module with functions to manipulate the user agent. * `product()`: returns the globally configured product & version. * `with_product(product: str, product_version: str)`: configure the global product & version. * `extra()`: returns the globally configured extra user agent metadata. * `with_extra(key: str, value: str)`: add an extra entry to the global extra user agent metadata. * `with_partner(partner: str)`: add a partner to the global extra user agent metadata. * `to_string(product_override: Optional[Tuple[str, str]]=None, other_info: Optional[List[Tuple[str, str]]] = None): str`: return the User-Agent header as a string. One new function here is `with_partner`, which can be used by a partner to add partner information to the User-Agent header for requests made by the SDK. The new header will have the form `partner/`. The partner identifier is opaque for the SDK, but it must be alphanumeric. This PR also removes the requirement that a user agent entry contain only a single copy of each key. This allows multiple partners to register in the same library or application. In this PR, I've also refactored the user agent library to be more static, aligning it with the Go and Java SDKs. This makes it easier to maintain and ensures similar behavior between all 3 SDKs. Note that this SDK has extra functionality that doesn't exist in the Go and Java SDKs, namely config-level user agent info; that is preserved here. ## Tests Unit tests were added to verify that the user agent contains all expected parts and supports multiple partners. - [ ] `make test` run locally - [ ] `make fmt` applied - [ ] relevant integration tests applied --- README.md | 24 ++++++ databricks/sdk/config.py | 90 ++++++---------------- databricks/sdk/useragent.py | 144 ++++++++++++++++++++++++++++++++++++ tests/test_config.py | 15 ++-- tests/test_core.py | 26 +++++++ tests/test_user_agent.py | 42 +++++++++++ 6 files changed, 262 insertions(+), 79 deletions(-) create mode 100644 databricks/sdk/useragent.py create mode 100644 tests/test_user_agent.py diff --git a/README.md b/README.md index dec52c8e4..9991c9cd0 100644 --- a/README.md +++ b/README.md @@ -30,6 +30,7 @@ The SDK's internal HTTP client is robust and handles failures on different level - [Long-running operations](#long-running-operations) - [Paginated responses](#paginated-responses) - [Single-sign-on with OAuth](#single-sign-on-sso-with-oauth) +- [User Agent Request Attribution](#user-agent-request-attribution) - [Error handling](#error-handling) - [Logging](#logging) - [Integration with `dbutils`](#interaction-with-dbutils) @@ -508,6 +509,29 @@ logging.info(f'Created new custom app: ' f'--client_secret {custom_app.client_secret}') ``` +## User Agent Request Attribution + +The Databricks SDK for Python uses the `User-Agent` header to include request metadata along with each request. By default, this includes the version of the Python SDK, the version of the Python language used by your application, and the underlying operating system. To statically add additional metadata, you can use the `with_partner()` and `with_product()` functions in the `databricks.sdk.useragent` module. `with_partner()` can be used by partners to indicate that code using the Databricks SDK for Go should be attributed to a specific partner. Multiple partners can be registered at once. Partner names can contain any number, digit, `.`, `-`, `_` or `+`. + +```python +from databricks.sdk import useragent +useragent.with_product("partner-abc") +useragent.with_partner("partner-xyz") +``` + +`with_product()` can be used to define the name and version of the product that is built with the Databricks SDK for Python. The product name has the same restrictions as the partner name above, and the product version must be a valid [SemVer](https://semver.org/). Subsequent calls to `with_product()` replace the original product with the new user-specified one. + +```go +from databricks.sdk import useragent +useragent.with_product("databricks-example-product", "1.2.0") +``` + +If both the `DATABRICKS_SDK_UPSTREAM` and `DATABRICKS_SDK_UPSTREAM_VERSION` environment variables are defined, these will also be included in the `User-Agent` header. + +If additional metadata needs to be specified that isn't already supported by the above interfaces, you can use the `with_user_agent_extra()` function to register arbitrary key-value pairs to include in the user agent. Multiple values associated with the same key are allowed. Keys have the same restrictions as the partner name above. Values must be either as described above or SemVer strings. + +Additional `User-Agent` information can be associated with different instances of `DatabricksConfig`. To add metadata to a specific instance of `DatabricksConfig`, use the `with_user_agent_extra()` method. + ## Error handling The Databricks SDK for Python provides a robust error-handling mechanism that allows developers to catch and handle API errors. When an error occurs, the SDK will raise an exception that contains information about the error, such as the HTTP status code, error message, and error details. Developers can catch these exceptions and handle them appropriately in their code. diff --git a/databricks/sdk/config.py b/databricks/sdk/config.py index 0802b831a..47d0ecc44 100644 --- a/databricks/sdk/config.py +++ b/databricks/sdk/config.py @@ -3,19 +3,18 @@ import logging import os import pathlib -import platform import sys import urllib.parse -from typing import Dict, Iterable, List, Optional, Tuple +from typing import Dict, Iterable, Optional import requests +from . import useragent from .clock import Clock, RealClock from .credentials_provider import CredentialsStrategy, DefaultCredentials from .environments import (ALL_ENVS, AzureEnvironment, Cloud, DatabricksEnvironment, get_environment_for_hostname) from .oauth import OidcEndpoints, Token -from .version import __version__ logger = logging.getLogger('databricks.sdk') @@ -44,30 +43,14 @@ def __repr__(self) -> str: return f"" -_DEFAULT_PRODUCT_NAME = 'unknown' -_DEFAULT_PRODUCT_VERSION = '0.0.0' -_STATIC_USER_AGENT: Tuple[str, str, List[str]] = (_DEFAULT_PRODUCT_NAME, _DEFAULT_PRODUCT_VERSION, []) - - def with_product(product: str, product_version: str): """[INTERNAL API] Change the product name and version used in the User-Agent header.""" - global _STATIC_USER_AGENT - prev_product, prev_version, prev_other_info = _STATIC_USER_AGENT - logger.debug(f'Changing product from {prev_product}/{prev_version} to {product}/{product_version}') - _STATIC_USER_AGENT = product, product_version, prev_other_info + useragent.with_product(product, product_version) def with_user_agent_extra(key: str, value: str): """[INTERNAL API] Add extra metadata to the User-Agent header when developing a library.""" - global _STATIC_USER_AGENT - product_name, product_version, other_info = _STATIC_USER_AGENT - for item in other_info: - if item.startswith(f"{key}/"): - # ensure that we don't have duplicates - other_info.remove(item) - break - other_info.append(f"{key}/{value}") - _STATIC_USER_AGENT = product_name, product_version, other_info + useragent.with_extra(key, value) class Config: @@ -111,21 +94,12 @@ def __init__(self, # Deprecated. Use credentials_strategy instead. credentials_provider: CredentialsStrategy = None, credentials_strategy: CredentialsStrategy = None, - product=_DEFAULT_PRODUCT_NAME, - product_version=_DEFAULT_PRODUCT_VERSION, + product=None, + product_version=None, clock: Clock = None, **kwargs): self._header_factory = None self._inner = {} - # as in SDK for Go, pull information from global static user agent context, - # so that we can track additional metadata for mid-stream libraries, as well - # as for cases, when the downstream product is used as a library and is not - # configured with a proper product name and version. - static_product, static_version, _ = _STATIC_USER_AGENT - if product == _DEFAULT_PRODUCT_NAME: - product = static_product - if product_version == _DEFAULT_PRODUCT_VERSION: - product_version = static_version self._user_agent_other_info = [] if credentials_strategy and credentials_provider: raise ValueError( @@ -147,8 +121,7 @@ def __init__(self, self._fix_host_if_needed() self._validate() self.init_auth() - self._product = product - self._product_version = product_version + self._init_product(product, product_version) except ValueError as e: message = self.wrap_debug_info(str(e)) raise ValueError(message) from e @@ -260,47 +233,19 @@ def is_any_auth_configured(self) -> bool: @property def user_agent(self): """ Returns User-Agent header used by this SDK """ - py_version = platform.python_version() - os_name = platform.uname().system.lower() - - ua = [ - f"{self._product}/{self._product_version}", f"databricks-sdk-py/{__version__}", - f"python/{py_version}", f"os/{os_name}", f"auth/{self.auth_type}", - ] - if len(self._user_agent_other_info) > 0: - ua.append(' '.join(self._user_agent_other_info)) - # as in SDK for Go, pull information from global static user agent context, - # so that we can track additional metadata for mid-stream libraries. this value - # is shared across all instances of Config objects intentionally. - _, _, static_info = _STATIC_USER_AGENT - if len(static_info) > 0: - ua.append(' '.join(static_info)) - if len(self._upstream_user_agent) > 0: - ua.append(self._upstream_user_agent) - if 'DATABRICKS_RUNTIME_VERSION' in os.environ: - runtime_version = os.environ['DATABRICKS_RUNTIME_VERSION'] - if runtime_version != '': - runtime_version = self._sanitize_header_value(runtime_version) - ua.append(f'runtime/{runtime_version}') - - return ' '.join(ua) - @staticmethod - def _sanitize_header_value(value: str) -> str: - value = value.replace(' ', '-') - value = value.replace('/', '-') - return value + # global user agent includes SDK version, product name & version, platform info, + # and global extra info. Config can have specific extra info associated with it, + # such as an override product, auth type, and other user-defined information. + return useragent.to_string(self._product_info, + [("auth", self.auth_type)] + self._user_agent_other_info) @property def _upstream_user_agent(self) -> str: - product = os.environ.get('DATABRICKS_SDK_UPSTREAM', None) - product_version = os.environ.get('DATABRICKS_SDK_UPSTREAM_VERSION', None) - if product is not None and product_version is not None: - return f"upstream/{product} upstream-version/{product_version}" - return "" + return " ".join(f"{k}/{v}" for k, v in useragent._get_upstream_user_agent_info()) def with_user_agent_extra(self, key: str, value: str) -> 'Config': - self._user_agent_other_info.append(f"{key}/{value}") + self._user_agent_other_info.append((key, value)) return self @property @@ -505,6 +450,13 @@ def init_auth(self): except ValueError as e: raise ValueError(f'{self._credentials_strategy.auth_type()} auth: {e}') from e + def _init_product(self, product, product_version): + if product is not None or product_version is not None: + default_product, default_version = useragent.product() + self._product_info = (product or default_product, product_version or default_version) + else: + self._product_info = None + def __repr__(self): return f'<{self.debug_string()}>' diff --git a/databricks/sdk/useragent.py b/databricks/sdk/useragent.py new file mode 100644 index 000000000..df8761600 --- /dev/null +++ b/databricks/sdk/useragent.py @@ -0,0 +1,144 @@ +import copy +import logging +import os +import platform +import re +from typing import List, Optional, Tuple + +from .version import __version__ + +# Constants +RUNTIME_KEY = 'runtime' +CICD_KEY = 'cicd' +AUTH_KEY = 'auth' + +_product_name = "unknown" +_product_version = "0.0.0" + +logger = logging.getLogger("databricks.sdk.useragent") + +_extra = [] + +# Precompiled regex patterns +alphanum_pattern = re.compile(r'^[a-zA-Z0-9_.+-]+$') +semver_pattern = re.compile(r'^v?(\d+\.)?(\d+\.)?(\*|\d+)$') + + +def _match_alphanum(value): + if not alphanum_pattern.match(value): + raise ValueError(f"Invalid alphanumeric value: {value}") + + +def _match_semver(value): + if not semver_pattern.match(value): + raise ValueError(f"Invalid semantic version: {value}") + + +def _match_alphanum_or_semver(value): + if not alphanum_pattern.match(value) and not semver_pattern.match(value): + raise ValueError(f"Invalid value: {value}") + + +def product() -> Tuple[str, str]: + """Return the global product name and version that will be submitted to Databricks on every request.""" + return _product_name, _product_version + + +def with_product(name: str, version: str): + """Change the product name and version that will be submitted to Databricks on every request.""" + global _product_name, _product_version + _match_alphanum(name) + _match_semver(version) + logger.debug(f'Changing product from {_product_name}/{_product_version} to {name}/{version}') + _product_name = name + _product_version = version + + +def _reset_product(): + """[Internal API] Reset product name and version to the default values. + + Used for testing purposes only.""" + global _product_name, _product_version + _product_name = "unknown" + _product_version = "0.0.0" + + +def with_extra(key: str, value: str): + """Add extra metadata to all requests submitted to Databricks. + + User-specified extra metadata can be inserted into request headers to provide additional context to Databricks + about usage of different tools in the Databricks ecosystem. This can be useful for collecting telemetry about SDK + usage from tools that are built on top of the SDK. + """ + global _extra + _match_alphanum(key) + _match_alphanum_or_semver(value) + logger.debug(f'Adding {key}/{value} to User-Agent') + _extra.append((key, value)) + + +def extra() -> List[Tuple[str, str]]: + """Returns the current extra metadata that will be submitted to Databricks on every request.""" + return copy.deepcopy(_extra) + + +def _reset_extra(extra: List[Tuple[str, str]]): + """[INTERNAL API] Reset the extra metadata to a new list. + + Prefer using with_user_agent_extra instead of this method to avoid overwriting other information included in the + user agent.""" + global _extra + _extra = extra + + +def with_partner(partner: str): + """Adds the given partner to the metadata submitted to Databricks on every request.""" + with_extra("partner", partner) + + +def _get_upstream_user_agent_info() -> List[Tuple[str, str]]: + """[INTERNAL API] Return the upstream product and version if specified in the system environment.""" + product = os.getenv("DATABRICKS_SDK_UPSTREAM") + version = os.getenv("DATABRICKS_SDK_UPSTREAM_VERSION") + if not product or not version: + return [] + return [("upstream", product), ("upstream-version", version)] + + +def _get_runtime_info() -> List[Tuple[str, str]]: + """[INTERNAL API] Return the runtime version if running on Databricks.""" + if 'DATABRICKS_RUNTIME_VERSION' in os.environ: + runtime_version = os.environ['DATABRICKS_RUNTIME_VERSION'] + if runtime_version != '': + runtime_version = _sanitize_header_value(runtime_version) + return [('runtime', runtime_version)] + return [] + + +def _sanitize_header_value(value: str) -> str: + value = value.replace(' ', '-') + value = value.replace('/', '-') + return value + + +def to_string(alternate_product_info: Optional[Tuple[str, str]] = None, + other_info: Optional[List[Tuple[str, str]]] = None) -> str: + """Compute the full User-Agent header. + + The User-Agent header contains the product name, version, and other metadata that is submitted to Databricks on + every request. There are some static components that are included by default in every request, like the SDK version, + OS name, and Python version. Other components can be optionally overridden or augmented in DatabricksConfig, like + the product name, product version, and extra user-defined information.""" + base = [] + if alternate_product_info: + base.append(alternate_product_info) + else: + base.append((_product_name, _product_version)) + base.extend([("databricks-sdk-py", __version__), ("python", platform.python_version()), + ("os", platform.uname().system.lower()), ]) + if other_info: + base.extend(other_info) + base.extend(_extra) + base.extend(_get_upstream_user_agent_info()) + base.extend(_get_runtime_info()) + return " ".join(f"{k}/{v}" for k, v in base) diff --git a/tests/test_config.py b/tests/test_config.py index fc6a29f1b..4d3a0ebef 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -2,24 +2,17 @@ import pytest +from databricks.sdk import useragent from databricks.sdk.config import Config, with_product, with_user_agent_extra from databricks.sdk.version import __version__ from .conftest import noop_credentials -def test_config_copy_preserves_product_and_product_version(): - c = Config(credentials_strategy=noop_credentials, product='foo', product_version='1.2.3') - c2 = c.copy() - assert c2._product == 'foo' - assert c2._product_version == '1.2.3' - - def test_config_supports_legacy_credentials_provider(): c = Config(credentials_provider=noop_credentials, product='foo', product_version='1.2.3') c2 = c.copy() - assert c2._product == 'foo' - assert c2._product_version == '1.2.3' + assert c2._product_info == ('foo', '1.2.3') @pytest.mark.parametrize('host,expected', [("https://abc.def.ghi", "https://abc.def.ghi"), @@ -54,7 +47,7 @@ def system(self): assert config.user_agent == ( f"test/0.0.0 databricks-sdk-py/{__version__} python/3.0.0 os/testos auth/basic" - f" test-extra-1/1 test-extra-2/2 upstream/upstream-product upstream-version/0.0.1" + " test-extra-1/1 test-extra-2/2 upstream/upstream-product upstream-version/0.0.1" " runtime/13.1-anything-else") with_product('some-product', '0.32.1') @@ -76,6 +69,8 @@ def test_config_copy_deep_copies_user_agent_other_info(config): assert "test/test2" in config_copy.user_agent assert "test/test2" not in config.user_agent + original_extra = useragent.extra() with_user_agent_extra("blueprint", "0.4.6") assert "blueprint/0.4.6" in config.user_agent assert "blueprint/0.4.6" in config_copy.user_agent + useragent._reset_extra(original_extra) diff --git a/tests/test_core.py b/tests/test_core.py index 2403d654b..eb2f6d954 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -2,6 +2,7 @@ import functools import os import pathlib +import platform import random import string import typing @@ -24,6 +25,7 @@ DatabricksEnvironment) from databricks.sdk.service.catalog import PermissionsChange from databricks.sdk.service.iam import AccessControlRequest +from databricks.sdk.version import __version__ from .clock import FakeClock from .conftest import noop_credentials @@ -179,6 +181,30 @@ def test_databricks_cli_credential_provider_installed_new(config, monkeypatch, t assert databricks_cli(config) is not None +def test_extra_and_upstream_user_agent(monkeypatch): + + class MockUname: + + @property + def system(self): + return 'TestOS' + + monkeypatch.setattr(platform, 'python_version', lambda: '3.0.0') + monkeypatch.setattr(platform, 'uname', MockUname) + monkeypatch.setenv('DATABRICKS_SDK_UPSTREAM', "upstream-product") + monkeypatch.setenv('DATABRICKS_SDK_UPSTREAM_VERSION', "0.0.1") + monkeypatch.setenv('DATABRICKS_RUNTIME_VERSION', "13.1 anything/else") + + config = Config(host='http://localhost', username="something", password="something", product='test', + product_version='0.0.0') \ + .with_user_agent_extra('test-extra-1', '1') \ + .with_user_agent_extra('test-extra-2', '2') + + assert config.user_agent == ( + f"test/0.0.0 databricks-sdk-py/{__version__} python/3.0.0 os/testos auth/basic test-extra-1/1 test-extra-2/2" + " upstream/upstream-product upstream-version/0.0.1 runtime/13.1-anything-else") + + def test_config_copy_shallow_copies_credential_provider(): class TestCredentialsStrategy(CredentialsStrategy): diff --git a/tests/test_user_agent.py b/tests/test_user_agent.py new file mode 100644 index 000000000..5083d9908 --- /dev/null +++ b/tests/test_user_agent.py @@ -0,0 +1,42 @@ +import pytest + +from databricks.sdk.version import __version__ + + +@pytest.fixture(scope="function") +def user_agent(): + from databricks.sdk import useragent + orig_product_name = useragent._product_name + orig_product_version = useragent._product_version + orig_extra = useragent._extra + + yield useragent + + useragent._product_name = orig_product_name + useragent._product_version = orig_product_version + useragent._extra = orig_extra + + +@pytest.mark.xdist_group(name="user_agent") +def test_user_agent(user_agent): + user_agent._reset_product() + default = user_agent.to_string() + + assert 'unknown/0.0.0' in default + assert 'databricks-sdk-py/' + __version__ in default + assert 'os/' in default + assert 'python/' in default + + +@pytest.mark.xdist_group(name="user_agent") +def test_user_agent_with_product(user_agent): + user_agent.with_product('test', '1.0.0') + assert 'test/1.0.0' in user_agent.to_string() + + +@pytest.mark.xdist_group(name="user_agent") +def test_user_agent_with_partner(user_agent): + user_agent.with_partner('test') + user_agent.with_partner('differenttest') + assert 'partner/test' in user_agent.to_string() + assert 'partner/differenttest' in user_agent.to_string() From 396d4f8754aa03f6f9ef4fedee17675c2e77a5c1 Mon Sep 17 00:00:00 2001 From: Parth Bansal Date: Fri, 5 Jul 2024 16:38:38 +0200 Subject: [PATCH 006/136] Fix auth tests for windows. (#697) ## Changes Fix auth tests for windows. - Added a powershell script as bash script doesn't run on windows - change 'COMSPEC' enviornment variable to run commands on powershell - Use 'USERPROFILE' instead of 'HOME' as it is alternative of 'HOME' in windows ## Tests - [ ] `make test` run locally - [ ] `make fmt` applied - [ ] relevant integration tests applied --- tests/conftest.py | 22 ++++++++++++- tests/test_auth.py | 46 +++++++++++++-------------- tests/test_auth_manual_tests.py | 22 ++++++------- tests/testdata/windows/az.ps1 | 56 +++++++++++++++++++++++++++++++++ 4 files changed, 111 insertions(+), 35 deletions(-) create mode 100644 tests/testdata/windows/az.ps1 diff --git a/tests/conftest.py b/tests/conftest.py index 9d1c26b38..a7e520dc9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,5 +1,6 @@ import functools import os +import platform import pytest as pytest from pyfakefs.fake_filesystem_unittest import Patcher @@ -38,7 +39,11 @@ def wrapper(*args, **kwargs): with pytest.raises(ValueError) as info: func(*args, **kwargs) exception_str = str(info.value) - exception_str = exception_str.replace(__tests__ + '/', '') + if platform.system() == 'Windows': + exception_str = exception_str.replace(__tests__ + '\\', '') + exception_str = exception_str.replace('\\', '/') + else: + exception_str = exception_str.replace(__tests__ + '/', '') assert msg in exception_str return wrapper @@ -57,3 +62,18 @@ def fake_fs(): patcher.fs.add_real_directory(test_data_path) yield patcher.fs # This will return a fake file system + + +def set_home(monkeypatch, path): + if platform.system() == 'Windows': + monkeypatch.setenv('USERPROFILE', __tests__ + path) + else: + monkeypatch.setenv('HOME', __tests__ + path) + + +def set_az_path(monkeypatch): + if platform.system() == 'Windows': + monkeypatch.setenv('Path', __tests__ + "\\testdata\\windows\\") + monkeypatch.setenv('COMSPEC', 'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe') + else: + monkeypatch.setenv('PATH', __tests__ + "/testdata:/bin") diff --git a/tests/test_auth.py b/tests/test_auth.py index 02535c39e..fd73378b2 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -2,7 +2,7 @@ # In case of editing this file, make sure the change is propagated to all Databricks SDK codebases from databricks.sdk.core import Config -from .conftest import __tests__, raises +from .conftest import __tests__, raises, set_az_path, set_home default_auth_base_error_message = \ "default auth: cannot configure default credentials, " \ @@ -121,19 +121,19 @@ def test_config_config_file(monkeypatch): @raises(f"{default_auth_base_error_message}. Config: host=https://x") def test_config_config_file_skip_default_profile_if_host_specified(monkeypatch): - monkeypatch.setenv('HOME', __tests__ + '/testdata') + set_home(monkeypatch, '/testdata') cfg = Config(host='x') @raises(default_auth_base_error_message) def test_config_config_file_with_empty_default_profile_select_default(monkeypatch): - monkeypatch.setenv('HOME', __tests__ + '/testdata/empty_default') + set_home(monkeypatch, '/testdata/empty_default') Config() def test_config_config_file_with_empty_default_profile_select_abc(monkeypatch): monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'abc') - monkeypatch.setenv('HOME', __tests__ + '/testdata/empty_default') + set_home(monkeypatch, '/testdata/empty_default') cfg = Config() assert cfg.auth_type == 'pat' @@ -141,7 +141,7 @@ def test_config_config_file_with_empty_default_profile_select_abc(monkeypatch): def test_config_pat_from_databricks_cfg(monkeypatch): - monkeypatch.setenv('HOME', __tests__ + '/testdata') + set_home(monkeypatch, '/testdata') cfg = Config() assert cfg.auth_type == 'pat' @@ -150,7 +150,7 @@ def test_config_pat_from_databricks_cfg(monkeypatch): def test_config_pat_from_databricks_cfg_dot_profile(monkeypatch): monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'pat.with.dot') - monkeypatch.setenv('HOME', __tests__ + '/testdata') + set_home(monkeypatch, '/testdata') cfg = Config() assert cfg.auth_type == 'pat' @@ -161,7 +161,7 @@ def test_config_pat_from_databricks_cfg_dot_profile(monkeypatch): f"{default_auth_base_error_message}. Config: token=***, profile=nohost. Env: DATABRICKS_CONFIG_PROFILE") def test_config_pat_from_databricks_cfg_nohost_profile(monkeypatch): monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'nohost') - monkeypatch.setenv('HOME', __tests__ + '/testdata') + set_home(monkeypatch, '/testdata') Config() @@ -171,7 +171,7 @@ def test_config_pat_from_databricks_cfg_nohost_profile(monkeypatch): def test_config_config_profile_and_token(monkeypatch): monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'nohost') monkeypatch.setenv('DATABRICKS_TOKEN', 'x') - monkeypatch.setenv('HOME', __tests__ + '/testdata') + set_home(monkeypatch, '/testdata') Config() @@ -181,7 +181,7 @@ def test_config_config_profile_and_token(monkeypatch): def test_config_config_profile_and_password(monkeypatch): monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'nohost') monkeypatch.setenv('DATABRICKS_USERNAME', 'x') - monkeypatch.setenv('HOME', __tests__ + '/testdata') + set_home(monkeypatch, '/testdata') Config() @@ -194,8 +194,8 @@ def test_config_azure_pat(): def test_config_azure_cli_host(monkeypatch): - monkeypatch.setenv('HOME', __tests__ + '/testdata/azure') - monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin') + set_home(monkeypatch, '/testdata/azure') + set_az_path(monkeypatch) cfg = Config(host='https://adb-123.4.azuredatabricks.net', azure_workspace_resource_id='/sub/rg/ws') assert cfg.auth_type == 'azure-cli' @@ -208,14 +208,14 @@ def test_config_azure_cli_host(monkeypatch): ) def test_config_azure_cli_host_fail(monkeypatch): monkeypatch.setenv('FAIL', 'yes') - monkeypatch.setenv('HOME', __tests__ + '/testdata/azure') - monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin') + set_home(monkeypatch, '/testdata/azure') + set_az_path(monkeypatch) cfg = Config(azure_workspace_resource_id='/sub/rg/ws') @raises(f"{default_auth_base_error_message}. Config: azure_workspace_resource_id=/sub/rg/ws") def test_config_azure_cli_host_az_not_installed(monkeypatch): - monkeypatch.setenv('HOME', __tests__ + '/testdata/azure') + set_home(monkeypatch, '/testdata/azure') monkeypatch.setenv('PATH', __tests__ + '/whatever') cfg = Config(azure_workspace_resource_id='/sub/rg/ws') @@ -224,14 +224,14 @@ def test_config_azure_cli_host_az_not_installed(monkeypatch): "validate: more than one authorization method configured: azure and pat. Config: token=***, azure_workspace_resource_id=/sub/rg/ws" ) def test_config_azure_cli_host_pat_conflict_with_config_file_present_without_default_profile(monkeypatch): - monkeypatch.setenv('HOME', __tests__ + '/testdata/azure') - monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin') + set_home(monkeypatch, '/testdata/azure') + set_az_path(monkeypatch) cfg = Config(token='x', azure_workspace_resource_id='/sub/rg/ws') def test_config_azure_cli_host_and_resource_id(monkeypatch): - monkeypatch.setenv('HOME', __tests__ + '/testdata') - monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin') + set_home(monkeypatch, '/testdata') + set_az_path(monkeypatch) cfg = Config(host='https://adb-123.4.azuredatabricks.net', azure_workspace_resource_id='/sub/rg/ws') assert cfg.auth_type == 'azure-cli' @@ -241,8 +241,8 @@ def test_config_azure_cli_host_and_resource_id(monkeypatch): def test_config_azure_cli_host_and_resource_i_d_configuration_precedence(monkeypatch): monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'justhost') - monkeypatch.setenv('HOME', __tests__ + '/testdata/azure') - monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin') + set_home(monkeypatch, '/testdata/azure') + set_az_path(monkeypatch) cfg = Config(host='https://adb-123.4.azuredatabricks.net', azure_workspace_resource_id='/sub/rg/ws') assert cfg.auth_type == 'azure-cli' @@ -255,8 +255,8 @@ def test_config_azure_cli_host_and_resource_i_d_configuration_precedence(monkeyp ) def test_config_azure_and_password_conflict(monkeypatch): monkeypatch.setenv('DATABRICKS_USERNAME', 'x') - monkeypatch.setenv('HOME', __tests__ + '/testdata/azure') - monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin') + set_home(monkeypatch, '/testdata/azure') + set_az_path(monkeypatch) cfg = Config(host='https://adb-123.4.azuredatabricks.net', azure_workspace_resource_id='/sub/rg/ws') @@ -265,7 +265,7 @@ def test_config_azure_and_password_conflict(monkeypatch): ) def test_config_corrupt_config(monkeypatch): monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'DEFAULT') - monkeypatch.setenv('HOME', __tests__ + '/testdata/corrupt') + set_home(monkeypatch, '/testdata/corrupt') Config() diff --git a/tests/test_auth_manual_tests.py b/tests/test_auth_manual_tests.py index 07250c532..e2874c427 100644 --- a/tests/test_auth_manual_tests.py +++ b/tests/test_auth_manual_tests.py @@ -1,11 +1,11 @@ from databricks.sdk.core import Config -from .conftest import __tests__ +from .conftest import set_az_path, set_home def test_azure_cli_workspace_header_present(monkeypatch): - monkeypatch.setenv('HOME', __tests__ + '/testdata/azure') - monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin') + set_home(monkeypatch, '/testdata/azure') + set_az_path(monkeypatch) resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123' cfg = Config(auth_type='azure-cli', host='https://adb-123.4.azuredatabricks.net', @@ -15,8 +15,8 @@ def test_azure_cli_workspace_header_present(monkeypatch): def test_azure_cli_user_with_management_access(monkeypatch): - monkeypatch.setenv('HOME', __tests__ + '/testdata/azure') - monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin') + set_home(monkeypatch, '/testdata/azure') + set_az_path(monkeypatch) resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123' cfg = Config(auth_type='azure-cli', host='https://adb-123.4.azuredatabricks.net', @@ -25,8 +25,8 @@ def test_azure_cli_user_with_management_access(monkeypatch): def test_azure_cli_user_no_management_access(monkeypatch): - monkeypatch.setenv('HOME', __tests__ + '/testdata/azure') - monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin') + set_home(monkeypatch, '/testdata/azure') + set_az_path(monkeypatch) monkeypatch.setenv('FAIL_IF', 'https://management.core.windows.net/') resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123' cfg = Config(auth_type='azure-cli', @@ -36,8 +36,8 @@ def test_azure_cli_user_no_management_access(monkeypatch): def test_azure_cli_fallback(monkeypatch): - monkeypatch.setenv('HOME', __tests__ + '/testdata/azure') - monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin') + set_home(monkeypatch, '/testdata/azure') + set_az_path(monkeypatch) monkeypatch.setenv('FAIL_IF', 'subscription') resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123' cfg = Config(auth_type='azure-cli', @@ -47,8 +47,8 @@ def test_azure_cli_fallback(monkeypatch): def test_azure_cli_with_warning_on_stderr(monkeypatch): - monkeypatch.setenv('HOME', __tests__ + '/testdata/azure') - monkeypatch.setenv('PATH', __tests__ + '/testdata:/bin') + set_home(monkeypatch, '/testdata/azure') + set_az_path(monkeypatch) monkeypatch.setenv('WARN', 'this is a warning') resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123' cfg = Config(auth_type='azure-cli', diff --git a/tests/testdata/windows/az.ps1 b/tests/testdata/windows/az.ps1 new file mode 100644 index 000000000..4aa96adf5 --- /dev/null +++ b/tests/testdata/windows/az.ps1 @@ -0,0 +1,56 @@ +#!/usr/bin/env pwsh + +if ($env:WARN) { + Write-Error "WARNING: $env:WARN" +} + +if ($env:FAIL -eq "yes") { + Write-Error "This is just a failing script." + exit 1 +} + +if ($env:FAIL -eq "logout") { + Write-Error "No subscription found. Run 'az account set' to select a subscription." + exit 1 +} + +if ($env:FAIL -eq "corrupt") { + Write-Output "{accessToken: ..corrupt" + exit +} + +param ( + [string[]]$Args +) + +foreach ($arg in $Args) { + if ($arg -eq $env:FAIL_IF) { + Write-Output "Failed" + exit 1 + } +} + +try { + $EXP = (Get-Date).AddSeconds($env:EXPIRE -as [int]) +} catch { + $expireString = $env:EXPIRE + $expireString = $expireString -replace "S", "seconds" + $expireString = $expireString -replace "M", "minutes" + $EXP = (Get-Date).AddSeconds($expireString -as [int]) +} + +if (-not $env:TF_AAD_TOKEN) { + $TF_AAD_TOKEN = "..." +} else { + $TF_AAD_TOKEN = $env:TF_AAD_TOKEN +} + +$expiresOn = $EXP.ToString("yyyy-MM-dd HH:mm:ss") + +Write-Output "{ + `"accessToken`": `"$TF_AAD_TOKEN`", + `"expiresOn`": `"$expiresOn`", + `"subscription`": `"aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee`", + `"tenant`": `"aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee`", + `"tokenType`": `"Bearer`" +}" From 881670f77d4080ce7fe8c6fedbb82d1563c17ee1 Mon Sep 17 00:00:00 2001 From: Parth Bansal Date: Mon, 8 Jul 2024 16:59:30 +0200 Subject: [PATCH 007/136] Fix test_local_io for windows (#695) ## Changes Fix `tests/integration/test_files.py::test_local_io` for windows. This PR is part of fixing the test for windows ## Tests - [ ] `make test` run locally - [ ] `make fmt` applied - [ ] relevant integration tests applied --- tests/integration/test_dbutils.py | 7 +++++++ tests/integration/test_files.py | 6 +++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/tests/integration/test_dbutils.py b/tests/integration/test_dbutils.py index a7e780eb3..e6e2a8668 100644 --- a/tests/integration/test_dbutils.py +++ b/tests/integration/test_dbutils.py @@ -51,6 +51,13 @@ def test_large_put(fs_and_base_path): assert output == ("test" * 20000)[:65536] +def test_put_local_path(w, random, tmp_path): + to_write = random(1024 * 1024 * 2) + tmp_path = tmp_path / "tmp_file" + w.dbutils.fs.put(f'file:{tmp_path}', to_write, True) + assert w.dbutils.fs.head(f'file:{tmp_path}', 1024 * 1024 * 2) == to_write + + def test_cp_file(fs_and_base_path, random): fs, base_path = fs_and_base_path path = base_path + "/dbc_qa_file-" + random() diff --git a/tests/integration/test_files.py b/tests/integration/test_files.py index 35750ab75..7b9ede556 100644 --- a/tests/integration/test_files.py +++ b/tests/integration/test_files.py @@ -1,6 +1,7 @@ import io import logging import pathlib +import platform import time from typing import Callable, List, Tuple, Union @@ -11,7 +12,10 @@ def test_local_io(random): - dummy_file = f'/tmp/{random()}' + if platform.system() == 'Windows': + dummy_file = f'C:\\Windows\\Temp\\{random()}' + else: + dummy_file = f'/tmp/{random()}' to_write = random(1024 * 1024 * 2.5).encode() with open(dummy_file, 'wb') as f: written = f.write(to_write) From 648d602ece1484ff06a7ae714f7eaaf506a0ff00 Mon Sep 17 00:00:00 2001 From: Parth Bansal Date: Mon, 8 Jul 2024 17:08:43 +0200 Subject: [PATCH 008/136] Fix for cancelled workflow (#701) ## Changes Fix for workflow that are cancelling due to failed workflow ## Tests - [ ] `make test` run locally - [ ] `make fmt` applied - [ ] relevant integration tests applied --- .github/workflows/push.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 297be4c8e..2a2737f16 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -10,6 +10,7 @@ jobs: tests-ubuntu: uses: ./.github/workflows/test.yml strategy: + fail-fast: false matrix: pyVersion: [ '3.7', '3.8', '3.9', '3.10', '3.11', '3.12' ] with: @@ -19,6 +20,7 @@ jobs: tests-windows: uses: ./.github/workflows/test.yml strategy: + fail-fast: false matrix: pyVersion: [ '3.9', '3.10', '3.11', '3.12' ] with: From 18c143f96b182855fa05bb5682db3c0f29c7f079 Mon Sep 17 00:00:00 2001 From: Parth Bansal Date: Mon, 8 Jul 2024 17:13:14 +0200 Subject: [PATCH 009/136] Fix test_core for windows (#702) ## Changes Fix `test_core.py` for windows. ## Tests - [ ] `make test` run locally - [ ] `make fmt` applied - [ ] relevant integration tests applied --- tests/test_core.py | 35 ++++++++++++++++++++++++++++------- 1 file changed, 28 insertions(+), 7 deletions(-) diff --git a/tests/test_core.py b/tests/test_core.py index eb2f6d954..057147159 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -23,6 +23,7 @@ databricks_cli) from databricks.sdk.environments import (ENVIRONMENTS, AzureEnvironment, Cloud, DatabricksEnvironment) +from databricks.sdk.oauth import Token from databricks.sdk.service.catalog import PermissionsChange from databricks.sdk.service.iam import AccessControlRequest from databricks.sdk.version import __version__ @@ -67,9 +68,14 @@ def test_databricks_cli_token_parse_expiry(date_string, expected): def write_small_dummy_executable(path: pathlib.Path): - cli = path.joinpath('databricks') - cli.write_text('#!/bin/sh\necho "hello world"\n') - cli.chmod(0o755) + if platform.system() == "Windows": + cli = path.joinpath('databricks.exe') + cli.touch() + cli.write_text('@echo off\necho "hello world"\n') + else: + cli = path.joinpath('databricks') + cli.write_text('#!/bin/sh\necho "hello world"\n') + cli.chmod(0o755) assert cli.stat().st_size < 1024 return cli @@ -133,9 +139,15 @@ def test_databricks_cli_token_source_installed_legacy_with_symlink(config, monke dir1.mkdir() dir2.mkdir() - (dir1 / "databricks").symlink_to(write_small_dummy_executable(dir2)) + if platform.system() == 'Windows': + (dir1 / "databricks.exe").symlink_to(write_small_dummy_executable(dir2)) + else: + (dir1 / "databricks").symlink_to(write_small_dummy_executable(dir2)) + + path = pathlib.Path(dir1) + path = str(path) + monkeypatch.setenv('PATH', path) - monkeypatch.setenv('PATH', dir1.as_posix()) with pytest.raises(FileNotFoundError, match="version <0.100.0 detected"): DatabricksCliTokenSource(config) @@ -175,10 +187,19 @@ def test_databricks_cli_credential_provider_installed_legacy(config, monkeypatch assert databricks_cli(config) == None -def test_databricks_cli_credential_provider_installed_new(config, monkeypatch, tmp_path): +def test_databricks_cli_credential_provider_installed_new(config, monkeypatch, tmp_path, mocker): + get_mock = mocker.patch('databricks.sdk.credentials_provider.CliTokenSource.refresh', + return_value=Token(access_token='token', + token_type='Bearer', + expiry=datetime(2023, 5, 22, 0, 0, 0))) write_large_dummy_executable(tmp_path) - monkeypatch.setenv('PATH', str(os.pathsep).join([tmp_path.as_posix(), os.environ['PATH']])) + path = str(os.pathsep).join([tmp_path.as_posix(), os.environ['PATH']]) + path = pathlib.Path(path) + path = str(path) + monkeypatch.setenv('PATH', path) + assert databricks_cli(config) is not None + assert get_mock.call_count == 1 def test_extra_and_upstream_user_agent(monkeypatch): From 984490d502d66f207b81a87a8c3080a275a8302a Mon Sep 17 00:00:00 2001 From: hectorcast-db Date: Tue, 9 Jul 2024 14:45:44 +0200 Subject: [PATCH 010/136] [Internal] Improve Changelog by grouping changes (#703) ## Changes Improve Changelog by grouping changes and enforce tag in PRs ## Tests - [X] `make test` run locally - [X] `make fmt` applied - [ ] relevant integration tests applied - [X] Recreate old changelog ``` ## 0.30.0 ### Other Changes * Add Windows WorkFlow ([#692](https://github.com/databricks/databricks-sdk-py/pull/692)). * Check trailing slash in host url ([#681](https://github.com/databricks/databricks-sdk-py/pull/681)). * Fix auth tests for windows. ([#697](https://github.com/databricks/databricks-sdk-py/pull/697)). * Remove duplicate ubuntu tests ([#693](https://github.com/databricks/databricks-sdk-py/pull/693)). * Support partners in SDK ([#648](https://github.com/databricks/databricks-sdk-py/pull/648)). * fix windows path ([#660](https://github.com/databricks/databricks-sdk-py/pull/660)) ([#673](https://github.com/databricks/databricks-sdk-py/pull/673)). ### API Changes: * Added [w.serving_endpoints_data_plane](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints_data_plane.html) workspace-level service. * Added `deploy()` and `start()` methods for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. * Added `batch_get()` method for [w.consumer_listings](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/consumer_listings.html) workspace-level service. * Added `batch_get()` method for [w.consumer_providers](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/consumer_providers.html) workspace-level service. * Added `create_schedule()`, `create_subscription()`, `delete_schedule()`, `delete_subscription()`, `get_schedule()`, `get_subscription()`, `list()`, `list_schedules()`, `list_subscriptions()` and `update_schedule()` methods for [w.lakeview](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/lakeview.html) workspace-level service. * Added `query_next_page()` method for [w.vector_search_indexes](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/vector_search_indexes.html) workspace-level service. * Added `databricks.sdk.service.serving.AppDeploymentMode`, `databricks.sdk.service.serving.ModelDataPlaneInfo` and `databricks.sdk.service.serving.StartAppRequest` dataclasses. * Added `databricks.sdk.service.catalog.CatalogIsolationMode` and `databricks.sdk.service.catalog.ListAccountStorageCredentialsResponse` dataclasses. * Added `databricks.sdk.service.dashboards.CreateScheduleRequest`, `databricks.sdk.service.dashboards.CreateSubscriptionRequest`, `databricks.sdk.service.dashboards.CronSchedule`, `databricks.sdk.service.dashboards.DashboardView`, `databricks.sdk.service.dashboards.DeleteScheduleRequest`, `any`, `databricks.sdk.service.dashboards.DeleteSubscriptionRequest`, `any`, `databricks.sdk.service.dashboards.GetScheduleRequest`, `databricks.sdk.service.dashboards.GetSubscriptionRequest`, `databricks.sdk.service.dashboards.ListDashboardsRequest`, `databricks.sdk.service.dashboards.ListDashboardsResponse`, `databricks.sdk.service.dashboards.ListSchedulesRequest`, `databricks.sdk.service.dashboards.ListSchedulesResponse`, `databricks.sdk.service.dashboards.ListSubscriptionsRequest`, `databricks.sdk.service.dashboards.ListSubscriptionsResponse`, `databricks.sdk.service.dashboards.Schedule`, `databricks.sdk.service.dashboards.SchedulePauseStatus`, `databricks.sdk.service.dashboards.Subscriber`, `databricks.sdk.service.dashboards.Subscription`, `databricks.sdk.service.dashboards.SubscriptionSubscriberDestination`, `databricks.sdk.service.dashboards.SubscriptionSubscriberUser` and `databricks.sdk.service.dashboards.UpdateScheduleRequest` dataclasses. * Added `databricks.sdk.service.jobs.PeriodicTriggerConfiguration` and `databricks.sdk.service.jobs.PeriodicTriggerConfigurationTimeUnit` dataclasses. * Added `databricks.sdk.service.marketplace.BatchGetListingsRequest`, `databricks.sdk.service.marketplace.BatchGetListingsResponse`, `databricks.sdk.service.marketplace.BatchGetProvidersRequest`, `databricks.sdk.service.marketplace.BatchGetProvidersResponse`, `databricks.sdk.service.marketplace.ProviderIconFile`, `databricks.sdk.service.marketplace.ProviderIconType` and `databricks.sdk.service.marketplace.ProviderListingSummaryInfo` dataclasses. * Added `databricks.sdk.service.oauth2.DataPlaneInfo` dataclass. * Added `databricks.sdk.service.vectorsearch.QueryVectorIndexNextPageRequest` dataclass. * Added `isolation_mode` field for `databricks.sdk.service.catalog.ExternalLocationInfo`. * Added `max_results` and `page_token` fields for `databricks.sdk.service.catalog.ListCatalogsRequest`. * Added `next_page_token` field for `databricks.sdk.service.catalog.ListCatalogsResponse`. * Added `table_serving_url` field for `databricks.sdk.service.catalog.OnlineTable`. * Added `isolation_mode` field for `databricks.sdk.service.catalog.StorageCredentialInfo`. * Added `isolation_mode` field for `databricks.sdk.service.catalog.UpdateExternalLocation`. * Added `isolation_mode` field for `databricks.sdk.service.catalog.UpdateStorageCredential`. * Added `termination_category` field for `databricks.sdk.service.jobs.ForEachTaskErrorMessageStats`. * Added `on_streaming_backlog_exceeded` field for `databricks.sdk.service.jobs.JobEmailNotifications`. * Added `environment_key` field for `databricks.sdk.service.jobs.RunTask`. * Added `environments` field for `databricks.sdk.service.jobs.SubmitRun`. * Added `dbt_task` and `environment_key` fields for `databricks.sdk.service.jobs.SubmitTask`. * Added `on_streaming_backlog_exceeded` field for `databricks.sdk.service.jobs.TaskEmailNotifications`. * Added `periodic` field for `databricks.sdk.service.jobs.TriggerSettings`. * Added `on_streaming_backlog_exceeded` field for `databricks.sdk.service.jobs.WebhookNotifications`. * Added `provider_summary` field for `databricks.sdk.service.marketplace.Listing`. * Added `service_principal_id` and `service_principal_name` fields for `databricks.sdk.service.serving.App`. * Added `mode` field for `databricks.sdk.service.serving.AppDeployment`. * Added `mode` field for `databricks.sdk.service.serving.CreateAppDeploymentRequest`. * Added `data_plane_info` field for `databricks.sdk.service.serving.ServingEndpointDetailed`. * Added `query_type` field for `databricks.sdk.service.vectorsearch.QueryVectorIndexRequest`. * Added `next_page_token` field for `databricks.sdk.service.vectorsearch.QueryVectorIndexResponse`. * Changed `list()` method for [a.account_storage_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/account/account_storage_credentials.html) account-level service to return `databricks.sdk.service.catalog.ListAccountStorageCredentialsResponse` dataclass. * Changed `isolation_mode` field for `databricks.sdk.service.catalog.CatalogInfo` to `databricks.sdk.service.catalog.CatalogIsolationMode` dataclass. * Changed `isolation_mode` field for `databricks.sdk.service.catalog.UpdateCatalog` to `databricks.sdk.service.catalog.CatalogIsolationMode` dataclass. * Removed `create_deployment()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. * Removed `condition_task`, `dbt_task`, `notebook_task`, `pipeline_task`, `python_wheel_task`, `run_job_task`, `spark_jar_task`, `spark_python_task`, `spark_submit_task` and `sql_task` fields for `databricks.sdk.service.jobs.SubmitRun`. OpenAPI SHA: 7437dabb9dadee402c1fc060df4c1ce8cc5369f0, Date: 2024-06-24 ``` --- .codegen.json | 1 + .codegen/changelog.md.tmpl | 44 ++++++++++++++++++++++++++++++----- .codegen/changelog_config.yml | 11 +++++++++ .github/workflows/push.yml | 19 +++++++++++++++ 4 files changed, 69 insertions(+), 6 deletions(-) create mode 100644 .codegen/changelog_config.yml diff --git a/.codegen.json b/.codegen.json index 25c666260..a1886bd80 100644 --- a/.codegen.json +++ b/.codegen.json @@ -1,5 +1,6 @@ { "formatter": "yapf -pri $FILENAMES && autoflake -i $FILENAMES && isort $FILENAMES", + "changelog_config": ".codegen/changelog_config.yml", "template_libraries": [ ".codegen/lib.tmpl" ], diff --git a/.codegen/changelog.md.tmpl b/.codegen/changelog.md.tmpl index 37bf395a7..c9f2e87c4 100644 --- a/.codegen/changelog.md.tmpl +++ b/.codegen/changelog.md.tmpl @@ -1,13 +1,17 @@ # Version changelog ## {{.Version}} +{{- range .GroupChanges}} -{{range .Changes -}} +### {{.Type.Message}} +{{range .Changes}} * {{.}}. -{{end}}{{- if .ApiChanges}} -API Changes: -{{range .ApiChanges}} - * {{.Action}} {{template "what" .}}{{if .Extra}} {{.Extra}}{{with .Other}} {{template "what" .}}{{end}}{{end}}. +{{- end}} +{{end}} +{{if .ApiChanges}} +### API Changes: +{{range .ApiChanges.GroupDiff}} + * {{.Action}} {{template "group-what" .}}{{if .Extra}} {{.Extra}}{{with .Other}} {{template "other-what" .}}{{end}}{{end}}. {{- end}} OpenAPI SHA: {{.Sha}}, Date: {{.Changed}} @@ -20,7 +24,35 @@ Dependency updates: ## {{.PrevVersion}} -{{- define "what" -}} +{{- define "group-what" -}} +{{if gt (len .Changes) 1 -}} {{template "single-what" .Changes.First}}{{end -}} +{{range .Changes.Middle -}}, {{template "single-what" .}}{{end -}} +{{if gt (len .Changes) 1}} and {{end}}{{template "single-what" .Changes.Last}}{{template "suffix-what" .}} +{{- end -}} + +{{- define "single-what" -}} + {{if eq .X "package" -}} + `databricks.sdk.service.{{.Package.Name}}` + {{- else if eq .X "service" -}} + {{template "service" .Service}} + {{- else if eq .X "method" -}} + `{{.Method.SnakeName}}()` + {{- else if eq .X "entity" -}} + {{template "entity" .Entity}} + {{- else if eq .X "field" -}} + `{{.Field.SnakeName}}` + {{- end}} +{{- end -}} + +{{- define "suffix-what" -}} + {{if eq .Type "package" }} package{{if gt (len .Changes) 1}}s{{end}} + {{- else if eq .Type "method" }} method{{if gt (len .Changes) 1}}s{{end}} for {{template "service" .Parent.Service}} + {{- else if eq .Type "entity" }} dataclass{{if gt (len .Changes) 1}}es{{end}} + {{- else if eq .Type "field" }} field{{if gt (len .Changes) 1}}s{{end}} for {{template "entity" .Parent.Entity}} + {{- end}} +{{- end -}} + +{{- define "other-what" -}} {{if eq .X "package" -}} `databricks.sdk.service.{{.Package.Name}}` package {{- else if eq .X "service" -}} diff --git a/.codegen/changelog_config.yml b/.codegen/changelog_config.yml new file mode 100644 index 000000000..a35312305 --- /dev/null +++ b/.codegen/changelog_config.yml @@ -0,0 +1,11 @@ +change_types: + - message: New Features and Improvements + tag: "[Feature]" + - message: Bug Fixes + tag: "[Fix]" + - message: Documentation + tag: "[Doc]" + - message: Internal Changes + tag: "[Internal]" + # Default for messages without a tag + - message: Other Changes \ No newline at end of file diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 2a2737f16..ff813bfa8 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -39,3 +39,22 @@ jobs: - name: Fail on differences run: git diff --exit-code + + commit-message: + runs-on: ubuntu-latest + if: ${{ github.event_name == 'pull_request' }} + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Validate Tag + run: | + TAG=$(echo ${{ github.event.pull_request.title }} | sed -ne 's/\[\(.*\)\].*/\1/p') + if grep -q "tag: \"[$TAG]\"" .codegen/changelog_config.yml; then + echo "Invalid or missing tag in commit message: [$TAG]" + exit 1 + else + echo "Valid tag found: [$TAG]" + fi \ No newline at end of file From ccf38b3b3777baf83c6f4e1cf2afc73fe6afc5ed Mon Sep 17 00:00:00 2001 From: hectorcast-db Date: Tue, 9 Jul 2024 16:55:26 +0200 Subject: [PATCH 011/136] [Internal] Add Release tag and Workflow fix (#704) ## Changes Add Release tag ## Tests - [ ] `make test` run locally - [ ] `make fmt` applied - [ ] relevant integration tests applied --- .codegen/changelog_config.yml | 3 +++ .github/workflows/push.yml | 6 +++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/.codegen/changelog_config.yml b/.codegen/changelog_config.yml index a35312305..ed2fe1046 100644 --- a/.codegen/changelog_config.yml +++ b/.codegen/changelog_config.yml @@ -7,5 +7,8 @@ change_types: tag: "[Doc]" - message: Internal Changes tag: "[Internal]" + # Does not appear in the Changelog. Only for PR validation. + - message: Release + tag: "[Release]" # Default for messages without a tag - message: Other Changes \ No newline at end of file diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index ff813bfa8..a61fee241 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -52,9 +52,9 @@ jobs: - name: Validate Tag run: | TAG=$(echo ${{ github.event.pull_request.title }} | sed -ne 's/\[\(.*\)\].*/\1/p') - if grep -q "tag: \"[$TAG]\"" .codegen/changelog_config.yml; then + if grep -q "tag: \"\[$TAG\]\"" .codegen/changelog_config.yml; then + echo "Valid tag found: [$TAG]" + else echo "Invalid or missing tag in commit message: [$TAG]" exit 1 - else - echo "Valid tag found: [$TAG]" fi \ No newline at end of file From 64629123b279dcd744a69b98f7fe315766f435ef Mon Sep 17 00:00:00 2001 From: hectorcast-db Date: Tue, 16 Jul 2024 09:27:05 +0200 Subject: [PATCH 012/136] [Internal] Move PR message validation to a separate workflow (#707) ## Changes Move PR message validation to a separate workflow ## Tests Updated title for this PR --- .github/workflows/message.yml | 25 +++++++++++++++++++++++++ .github/workflows/push.yml | 19 ------------------- 2 files changed, 25 insertions(+), 19 deletions(-) create mode 100644 .github/workflows/message.yml diff --git a/.github/workflows/message.yml b/.github/workflows/message.yml new file mode 100644 index 000000000..4632b80c5 --- /dev/null +++ b/.github/workflows/message.yml @@ -0,0 +1,25 @@ +name: Validate Commit Message + +on: + pull_request: + types: [opened, synchronize, edited] + +jobs: + + validate: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Validate Tag + run: | + TAG=$(echo ${{ github.event.pull_request.title }} | sed -ne 's/\[\(.*\)\].*/\1/p') + if grep -q "tag: \"\[$TAG\]\"" .codegen/changelog_config.yml; then + echo "Valid tag found: [$TAG]" + else + echo "Invalid or missing tag in commit message: [$TAG]" + exit 1 + fi \ No newline at end of file diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index a61fee241..2a2737f16 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -39,22 +39,3 @@ jobs: - name: Fail on differences run: git diff --exit-code - - commit-message: - runs-on: ubuntu-latest - if: ${{ github.event_name == 'pull_request' }} - steps: - - name: Checkout - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - name: Validate Tag - run: | - TAG=$(echo ${{ github.event.pull_request.title }} | sed -ne 's/\[\(.*\)\].*/\1/p') - if grep -q "tag: \"\[$TAG\]\"" .codegen/changelog_config.yml; then - echo "Valid tag found: [$TAG]" - else - echo "Invalid or missing tag in commit message: [$TAG]" - exit 1 - fi \ No newline at end of file From 3009a6bfc0e86438dcf042bb48b32140a5f658e1 Mon Sep 17 00:00:00 2001 From: hectorcast-db Date: Tue, 16 Jul 2024 10:21:36 +0200 Subject: [PATCH 013/136] [Feature] Add DataPlane support (#700) ## Changes Add DataPlane support ## Tests - [X] `make test` run locally - [X] `make fmt` applied - [ ] relevant integration tests applied - [X] Manual test against staging workspace (prod workspaces don't support DataPlane APIs) --- .codegen/__init__.py.tmpl | 40 ++++-- .codegen/service.py.tmpl | 70 ++++++++--- databricks/sdk/__init__.py | 12 +- databricks/sdk/core.py | 25 ++-- databricks/sdk/data_plane.py | 65 ++++++++++ databricks/sdk/service/serving.py | 118 ++++++++++++++++++ databricks/sdk/service/sql.py | 1 + docs/dbdataclasses/sql.rst | 2 + docs/workspace/catalog/endpoints.rst | 35 ++++++ docs/workspace/serving/index.rst | 3 +- .../serving/serving_endpoints_data_plane.rst | 59 +++++++++ tests/test_data_plane.py | 59 +++++++++ 12 files changed, 453 insertions(+), 36 deletions(-) create mode 100644 databricks/sdk/data_plane.py create mode 100644 docs/workspace/catalog/endpoints.rst create mode 100644 docs/workspace/serving/serving_endpoints_data_plane.rst create mode 100644 tests/test_data_plane.py diff --git a/.codegen/__init__.py.tmpl b/.codegen/__init__.py.tmpl index d5b83e3f2..572b50490 100644 --- a/.codegen/__init__.py.tmpl +++ b/.codegen/__init__.py.tmpl @@ -5,8 +5,8 @@ from databricks.sdk.credentials_provider import CredentialsStrategy from databricks.sdk.mixins.files import DbfsExt from databricks.sdk.mixins.compute import ClustersExt from databricks.sdk.mixins.workspace import WorkspaceExt -{{- range .Services}} {{if not .IsDataPlane}} -from databricks.sdk.service.{{.Package.Name}} import {{.PascalName}}API{{end}}{{end}} +{{- range .Services}} +from databricks.sdk.service.{{.Package.Name}} import {{.PascalName}}API{{end}} from databricks.sdk.service.provisioning import Workspace from databricks.sdk import azure @@ -61,8 +61,20 @@ class WorkspaceClient: self._dbutils = _make_dbutils(self._config) self._api_client = client.ApiClient(self._config) - {{- range .Services}}{{if and (not .IsAccounts) (not .HasParent) (not .IsDataPlane)}} - self._{{.SnakeName}} = {{template "api" .}}(self._api_client){{end -}}{{end}} + {{- range .Services}}{{if and (not .IsAccounts) (not .HasParent) .HasDataPlaneAPI (not .IsDataPlane)}} + {{.SnakeName}} = {{template "api" .}}(self._api_client){{end -}}{{end}} + + {{- range .Services}} + {{- if and (not .IsAccounts) (not .HasParent)}} + {{- if .IsDataPlane}} + self._{{.SnakeName}} = {{template "api" .}}(self._api_client, {{.ControlPlaneService.SnakeName}}) + {{- else if .HasDataPlaneAPI}} + self._{{.SnakeName}} = {{.SnakeName}} + {{- else}} + self._{{.SnakeName}} = {{template "api" .}}(self._api_client) + {{- end -}} + {{- end -}} + {{end}} @property def config(self) -> client.Config: @@ -76,7 +88,7 @@ class WorkspaceClient: def dbutils(self) -> dbutils.RemoteDbUtils: return self._dbutils - {{- range .Services}}{{if and (not .IsAccounts) (not .HasParent) (not .IsDataPlane)}} + {{- range .Services}}{{if and (not .IsAccounts) (not .HasParent)}} @property def {{.SnakeName}}(self) -> {{template "api" .}}: {{if .Description}}"""{{.Summary}}"""{{end}} @@ -117,8 +129,20 @@ class AccountClient: self._config = config.copy() self._api_client = client.ApiClient(self._config) - {{- range .Services}}{{if and .IsAccounts (not .HasParent) (not .IsDataPlane)}} - self._{{(.TrimPrefix "account").SnakeName}} = {{template "api" .}}(self._api_client){{end -}}{{end}} + {{- range .Services}}{{if and .IsAccounts (not .HasParent) .HasDataPlaneAPI (not .IsDataPlane)}} + {{(.TrimPrefix "account").SnakeName}} = {{template "api" .}}(self._api_client){{end -}}{{end}} + + {{- range .Services}} + {{- if and .IsAccounts (not .HasParent)}} + {{- if .IsDataPlane}} + self._{{(.TrimPrefix "account").SnakeName}} = {{template "api" .}}(self._api_client, {{.ControlPlaneService.SnakeName}}) + {{- else if .HasDataPlaneAPI}} + self._{{(.TrimPrefix "account").SnakeName}} = {{(.TrimPrefix "account").SnakeName}} + {{- else}} + self._{{(.TrimPrefix "account").SnakeName}} = {{template "api" .}}(self._api_client) + {{- end -}} + {{- end -}} + {{end}} @property def config(self) -> client.Config: @@ -128,7 +152,7 @@ class AccountClient: def api_client(self) -> client.ApiClient: return self._api_client - {{- range .Services}}{{if and .IsAccounts (not .HasParent) (not .IsDataPlane)}} + {{- range .Services}}{{if and .IsAccounts (not .HasParent)}} @property def {{(.TrimPrefix "account").SnakeName}}(self) -> {{template "api" .}}:{{if .Description}} """{{.Summary}}"""{{end}} diff --git a/.codegen/service.py.tmpl b/.codegen/service.py.tmpl index 39892b43c..643b1f33a 100644 --- a/.codegen/service.py.tmpl +++ b/.codegen/service.py.tmpl @@ -8,8 +8,12 @@ from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO import time import random import logging +import requests + +from ..data_plane import DataPlaneService from ..errors import OperationTimeout, OperationFailed from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter +from ..oauth import Token _LOG = logging.getLogger('databricks.sdk') @@ -100,12 +104,16 @@ class {{.PascalName}}{{if eq "List" .PascalName}}Request{{end}}:{{if .Descriptio {{- end -}} {{- end -}} -{{range .Services}} {{if not .IsDataPlane}} +{{range .Services}} class {{.PascalName}}API:{{if .Description}} """{{.Comment " " 110}}""" {{end}} - def __init__(self, api_client): + def __init__(self, api_client{{if .IsDataPlane}}, control_plane{{end}}): self._api = api_client + {{if .IsDataPlane -}} + self._control_plane = control_plane + self._data_plane_service = DataPlaneService() + {{end -}} {{range .Subservices}} self._{{.SnakeName}} = {{.PascalName}}API(self._api){{end}} @@ -183,6 +191,9 @@ class {{.PascalName}}API:{{if .Description}} {{if .Request -}} {{template "method-serialize" .}} {{- end}} + {{- if .Service.IsDataPlane}} + {{template "data-plane" .}} + {{- end}} {{template "method-headers" . }} {{if .Response.HasHeaderField -}} {{template "method-response-headers" . }} @@ -195,7 +206,27 @@ class {{.PascalName}}API:{{if .Description}} return self.{{template "safe-snake-name" .}}({{range $i, $x := .Request.Fields}}{{if $i}}, {{end}}{{template "safe-snake-name" .}}={{template "safe-snake-name" .}}{{end}}).result(timeout=timeout) {{end}} {{end -}} -{{end}} +{{- end}} + +{{define "data-plane" -}} + def info_getter(): + response = self._control_plane.{{.Service.DataPlaneInfoMethod.SnakeName}}( + {{- range .Service.DataPlaneInfoMethod.Request.Fields }} + {{.SnakeName}} = {{.SnakeName}}, + {{- end}} + ) + if response.{{(index .DataPlaneInfoFields 0).SnakeName}} is None: + raise Exception("Resource does not support direct Data Plane access") + return response{{range .DataPlaneInfoFields}}.{{.SnakeName}}{{end}} + + get_params = [{{- range .Service.DataPlaneInfoMethod.Request.Fields }}{{.SnakeName}},{{end}}] + data_plane_details = self._data_plane_service.get_data_plane_details('{{.SnakeName}}', get_params, info_getter, self._api.get_oauth_token) + token = data_plane_details.token + + def auth(r: requests.PreparedRequest) -> requests.PreparedRequest: + authorization = f"{token.token_type} {token.access_token}" + r.headers["Authorization"] = authorization + return r {{- end}} {{define "method-parameters" -}} @@ -325,19 +356,26 @@ class {{.PascalName}}API:{{if .Description}} {{- end}} {{define "method-do" -}} -self._api.do('{{.Verb}}', - {{ template "path" . }} - {{if .Request}} - {{- if .Request.HasQueryField}}, query=query{{end}} - {{- if .Request.MapValue}}, body=contents - {{- else if .Request.HasJsonField}}, body=body{{end}} - {{end}} - , headers=headers - {{if .Response.HasHeaderField -}} - , response_headers=response_headers - {{- end}} - {{- if and .IsRequestByteStream .RequestBodyField }}, data={{template "safe-snake-name" .RequestBodyField}}{{ end }} - {{- if .IsResponseByteStream }}, raw=True{{ end }}) + self._api.do('{{.Verb}}', + {{- if .Service.IsDataPlane -}} + url=data_plane_details.endpoint_url + {{- else -}} + {{ template "path" . }} + {{- end -}} + {{if .Request}} + {{- if .Request.HasQueryField}}, query=query{{end}} + {{- if .Request.MapValue}}, body=contents + {{- else if .Request.HasJsonField}}, body=body{{end}} + {{end}} + , headers=headers + {{if .Response.HasHeaderField -}} + , response_headers=response_headers + {{- end}} + {{- if and .IsRequestByteStream .RequestBodyField }}, data={{template "safe-snake-name" .RequestBodyField}}{{ end }} + {{- if .Service.IsDataPlane -}} + ,auth=auth + {{- end -}} + {{- if .IsResponseByteStream }}, raw=True{{ end }}) {{- end}} {{- define "path" -}} diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 05c95fb6f..8485efbac 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -55,7 +55,8 @@ NetworksAPI, PrivateAccessAPI, StorageAPI, VpcEndpointsAPI, Workspace, WorkspacesAPI) -from databricks.sdk.service.serving import AppsAPI, ServingEndpointsAPI +from databricks.sdk.service.serving import (AppsAPI, ServingEndpointsAPI, + ServingEndpointsDataPlaneAPI) from databricks.sdk.service.settings import (AccountIpAccessListsAPI, AccountSettingsAPI, AutomaticClusterUpdateAPI, @@ -162,6 +163,7 @@ def __init__(self, self._config = config.copy() self._dbutils = _make_dbutils(self._config) self._api_client = client.ApiClient(self._config) + serving_endpoints = ServingEndpointsAPI(self._api_client) self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client) self._alerts = AlertsAPI(self._api_client) self._apps = AppsAPI(self._api_client) @@ -226,7 +228,8 @@ def __init__(self, self._schemas = SchemasAPI(self._api_client) self._secrets = SecretsAPI(self._api_client) self._service_principals = ServicePrincipalsAPI(self._api_client) - self._serving_endpoints = ServingEndpointsAPI(self._api_client) + self._serving_endpoints = serving_endpoints + self._serving_endpoints_data_plane = ServingEndpointsDataPlaneAPI(self._api_client, serving_endpoints) self._settings = SettingsAPI(self._api_client) self._shares = SharesAPI(self._api_client) self._statement_execution = StatementExecutionAPI(self._api_client) @@ -577,6 +580,11 @@ def serving_endpoints(self) -> ServingEndpointsAPI: """The Serving Endpoints API allows you to create, update, and delete model serving endpoints.""" return self._serving_endpoints + @property + def serving_endpoints_data_plane(self) -> ServingEndpointsDataPlaneAPI: + """Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving endpoints service.""" + return self._serving_endpoints_data_plane + @property def settings(self) -> SettingsAPI: """Workspace Settings API allows users to manage settings at the workspace level.""" diff --git a/databricks/sdk/core.py b/databricks/sdk/core.py index cacbad908..b686bd7fd 100644 --- a/databricks/sdk/core.py +++ b/databricks/sdk/core.py @@ -133,31 +133,36 @@ def get_oauth_token(self, auth_details: str) -> Token: def do(self, method: str, - path: str, + path: str = None, + url: str = None, query: dict = None, headers: dict = None, body: dict = None, raw: bool = False, files=None, data=None, + auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None, response_headers: List[str] = None) -> Union[dict, BinaryIO]: - # Remove extra `/` from path for Files API - # Once we've fixed the OpenAPI spec, we can remove this - path = re.sub('^/api/2.0/fs/files//', '/api/2.0/fs/files/', path) if headers is None: headers = {} + if url is None: + # Remove extra `/` from path for Files API + # Once we've fixed the OpenAPI spec, we can remove this + path = re.sub('^/api/2.0/fs/files//', '/api/2.0/fs/files/', path) + url = f"{self._cfg.host}{path}" headers['User-Agent'] = self._user_agent_base retryable = retried(timeout=timedelta(seconds=self._retry_timeout_seconds), is_retryable=self._is_retryable, clock=self._cfg.clock) response = retryable(self._perform)(method, - path, + url, query=query, headers=headers, body=body, raw=raw, files=files, - data=data) + data=data, + auth=auth) resp = dict() for header in response_headers if response_headers else []: @@ -239,20 +244,22 @@ def _parse_retry_after(cls, response: requests.Response) -> Optional[int]: def _perform(self, method: str, - path: str, + url: str, query: dict = None, headers: dict = None, body: dict = None, raw: bool = False, files=None, - data=None): + data=None, + auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None): response = self._session.request(method, - f"{self._cfg.host}{path}", + url, params=self._fix_query_string(query), json=body, headers=headers, files=files, data=data, + auth=auth, stream=raw, timeout=self._http_timeout_seconds) try: diff --git a/databricks/sdk/data_plane.py b/databricks/sdk/data_plane.py new file mode 100644 index 000000000..6f6ddf80c --- /dev/null +++ b/databricks/sdk/data_plane.py @@ -0,0 +1,65 @@ +import threading +from dataclasses import dataclass +from typing import Callable, List + +from databricks.sdk.oauth import Token +from databricks.sdk.service.oauth2 import DataPlaneInfo + + +@dataclass +class DataPlaneDetails: + """ + Contains details required to query a DataPlane endpoint. + """ + endpoint_url: str + """URL used to query the endpoint through the DataPlane.""" + token: Token + """Token to query the DataPlane endpoint.""" + + +class DataPlaneService: + """Helper class to fetch and manage DataPlane details.""" + + def __init__(self): + self._data_plane_info = {} + self._tokens = {} + self._lock = threading.Lock() + + def get_data_plane_details(self, method: str, params: List[str], info_getter: Callable[[], DataPlaneInfo], + refresh: Callable[[str], Token]): + """Get and cache information required to query a Data Plane endpoint using the provided methods. + + Returns a cached DataPlaneDetails if the details have already been fetched previously and are still valid. + If not, it uses the provided functions to fetch the details. + + :param method: method name. Used to construct a unique key for the cache. + :param params: path params used in the "get" operation which uniquely determine the object. Used to construct a unique key for the cache. + :param info_getter: function which returns the DataPlaneInfo. It will only be called if the information is not already present in the cache. + :param refresh: function to refresh the token. It will only be called if the token is missing or expired. + """ + all_elements = params.copy() + all_elements.insert(0, method) + map_key = "/".join(all_elements) + info = self._data_plane_info.get(map_key) + if not info: + self._lock.acquire() + try: + info = self._data_plane_info.get(map_key) + if not info: + info = info_getter() + self._data_plane_info[map_key] = info + finally: + self._lock.release() + + token = self._tokens.get(map_key) + if not token or not token.valid: + self._lock.acquire() + token = self._tokens.get(map_key) + try: + if not token or not token.valid: + token = refresh(info.authorization_details) + self._tokens[map_key] = token + finally: + self._lock.release() + + return DataPlaneDetails(endpoint_url=info.endpoint_url, token=token) diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index 6c39c598d..0f3d00de9 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -10,6 +10,9 @@ from enum import Enum from typing import Any, BinaryIO, Callable, Dict, Iterator, List, Optional +import requests + +from ..data_plane import DataPlaneService from ..errors import OperationFailed from ._internal import Wait, _enum, _from_dict, _repeated_dict @@ -3335,3 +3338,118 @@ def update_permissions( body=body, headers=headers) return ServingEndpointPermissions.from_dict(res) + + +class ServingEndpointsDataPlaneAPI: + """Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving + endpoints service.""" + + def __init__(self, api_client, control_plane): + self._api = api_client + self._control_plane = control_plane + self._data_plane_service = DataPlaneService() + + def query(self, + name: str, + *, + dataframe_records: Optional[List[Any]] = None, + dataframe_split: Optional[DataframeSplitInput] = None, + extra_params: Optional[Dict[str, str]] = None, + input: Optional[Any] = None, + inputs: Optional[Any] = None, + instances: Optional[List[Any]] = None, + max_tokens: Optional[int] = None, + messages: Optional[List[ChatMessage]] = None, + n: Optional[int] = None, + prompt: Optional[Any] = None, + stop: Optional[List[str]] = None, + stream: Optional[bool] = None, + temperature: Optional[float] = None) -> QueryEndpointResponse: + """Query a serving endpoint. + + :param name: str + The name of the serving endpoint. This field is required. + :param dataframe_records: List[Any] (optional) + Pandas Dataframe input in the records orientation. + :param dataframe_split: :class:`DataframeSplitInput` (optional) + Pandas Dataframe input in the split orientation. + :param extra_params: Dict[str,str] (optional) + The extra parameters field used ONLY for __completions, chat,__ and __embeddings external & + foundation model__ serving endpoints. This is a map of strings and should only be used with other + external/foundation model query fields. + :param input: Any (optional) + The input string (or array of strings) field used ONLY for __embeddings external & foundation + model__ serving endpoints and is the only field (along with extra_params if needed) used by + embeddings queries. + :param inputs: Any (optional) + Tensor-based input in columnar format. + :param instances: List[Any] (optional) + Tensor-based input in row format. + :param max_tokens: int (optional) + The max tokens field used ONLY for __completions__ and __chat external & foundation model__ serving + endpoints. This is an integer and should only be used with other chat/completions query fields. + :param messages: List[:class:`ChatMessage`] (optional) + The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is a + map of strings and should only be used with other chat query fields. + :param n: int (optional) + The n (number of candidates) field used ONLY for __completions__ and __chat external & foundation + model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and should only be + used with other chat/completions query fields. + :param prompt: Any (optional) + The prompt string (or array of strings) field used ONLY for __completions external & foundation + model__ serving endpoints and should only be used with other completions query fields. + :param stop: List[str] (optional) + The stop sequences field used ONLY for __completions__ and __chat external & foundation model__ + serving endpoints. This is a list of strings and should only be used with other chat/completions + query fields. + :param stream: bool (optional) + The stream field used ONLY for __completions__ and __chat external & foundation model__ serving + endpoints. This is a boolean defaulting to false and should only be used with other chat/completions + query fields. + :param temperature: float (optional) + The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving + endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with + other chat/completions query fields. + + :returns: :class:`QueryEndpointResponse` + """ + body = {} + if dataframe_records is not None: body['dataframe_records'] = [v for v in dataframe_records] + if dataframe_split is not None: body['dataframe_split'] = dataframe_split.as_dict() + if extra_params is not None: body['extra_params'] = extra_params + if input is not None: body['input'] = input + if inputs is not None: body['inputs'] = inputs + if instances is not None: body['instances'] = [v for v in instances] + if max_tokens is not None: body['max_tokens'] = max_tokens + if messages is not None: body['messages'] = [v.as_dict() for v in messages] + if n is not None: body['n'] = n + if prompt is not None: body['prompt'] = prompt + if stop is not None: body['stop'] = [v for v in stop] + if stream is not None: body['stream'] = stream + if temperature is not None: body['temperature'] = temperature + + def info_getter(): + response = self._control_plane.get(name=name, ) + if response.data_plane_info is None: + raise Exception("Resource does not support direct Data Plane access") + return response.data_plane_info.query_info + + get_params = [name, ] + data_plane_details = self._data_plane_service.get_data_plane_details('query', get_params, info_getter, + self._api.get_oauth_token) + token = data_plane_details.token + + def auth(r: requests.PreparedRequest) -> requests.PreparedRequest: + authorization = f"{token.token_type} {token.access_token}" + r.headers["Authorization"] = authorization + return r + + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + response_headers = ['served-model-name', ] + res = self._api.do('POST', + url=data_plane_details.endpoint_url, + body=body, + headers=headers, + response_headers=response_headers, + auth=auth) + return QueryEndpointResponse.from_dict(res) diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index fa7f93f6e..b363ab7d2 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -360,6 +360,7 @@ def from_dict(cls, d: Dict[str, any]) -> ChannelInfo: class ChannelName(Enum): + """Name of the channel""" CHANNEL_NAME_CURRENT = 'CHANNEL_NAME_CURRENT' CHANNEL_NAME_CUSTOM = 'CHANNEL_NAME_CUSTOM' diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst index adf3ced56..fe1469a30 100644 --- a/docs/dbdataclasses/sql.rst +++ b/docs/dbdataclasses/sql.rst @@ -64,6 +64,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ChannelName + Name of the channel + .. py:attribute:: CHANNEL_NAME_CURRENT :value: "CHANNEL_NAME_CURRENT" diff --git a/docs/workspace/catalog/endpoints.rst b/docs/workspace/catalog/endpoints.rst new file mode 100644 index 000000000..8c6efba40 --- /dev/null +++ b/docs/workspace/catalog/endpoints.rst @@ -0,0 +1,35 @@ +``w.endpoints``: Online Endpoints +================================= +.. currentmodule:: databricks.sdk.service.catalog + +.. py:class:: EndpointsAPI + + Endpoints are used to connect to PG clusters. + + .. py:method:: create( [, endpoint: Optional[Endpoint]]) -> Endpoint + + Create an Endpoint. + + :param endpoint: :class:`Endpoint` (optional) + Endpoint + + :returns: :class:`Endpoint` + + + .. py:method:: delete(name: str) + + Delete an Endpoint. + + :param name: str + + + + + .. py:method:: get(name: str) -> Endpoint + + Get an Endpoint. + + :param name: str + + :returns: :class:`Endpoint` + \ No newline at end of file diff --git a/docs/workspace/serving/index.rst b/docs/workspace/serving/index.rst index ce3d216ff..1d0bdf7fc 100644 --- a/docs/workspace/serving/index.rst +++ b/docs/workspace/serving/index.rst @@ -8,4 +8,5 @@ Use real-time inference for machine learning :maxdepth: 1 apps - serving_endpoints \ No newline at end of file + serving_endpoints + serving_endpoints_data_plane \ No newline at end of file diff --git a/docs/workspace/serving/serving_endpoints_data_plane.rst b/docs/workspace/serving/serving_endpoints_data_plane.rst new file mode 100644 index 000000000..8fb09e7ff --- /dev/null +++ b/docs/workspace/serving/serving_endpoints_data_plane.rst @@ -0,0 +1,59 @@ +``w.serving_endpoints_data_plane``: Serving endpoints DataPlane +=============================================================== +.. currentmodule:: databricks.sdk.service.serving + +.. py:class:: ServingEndpointsDataPlaneAPI + + Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving + endpoints service. + + .. py:method:: query(name: str [, dataframe_records: Optional[List[Any]], dataframe_split: Optional[DataframeSplitInput], extra_params: Optional[Dict[str, str]], input: Optional[Any], inputs: Optional[Any], instances: Optional[List[Any]], max_tokens: Optional[int], messages: Optional[List[ChatMessage]], n: Optional[int], prompt: Optional[Any], stop: Optional[List[str]], stream: Optional[bool], temperature: Optional[float]]) -> QueryEndpointResponse + + Query a serving endpoint. + + :param name: str + The name of the serving endpoint. This field is required. + :param dataframe_records: List[Any] (optional) + Pandas Dataframe input in the records orientation. + :param dataframe_split: :class:`DataframeSplitInput` (optional) + Pandas Dataframe input in the split orientation. + :param extra_params: Dict[str,str] (optional) + The extra parameters field used ONLY for __completions, chat,__ and __embeddings external & + foundation model__ serving endpoints. This is a map of strings and should only be used with other + external/foundation model query fields. + :param input: Any (optional) + The input string (or array of strings) field used ONLY for __embeddings external & foundation + model__ serving endpoints and is the only field (along with extra_params if needed) used by + embeddings queries. + :param inputs: Any (optional) + Tensor-based input in columnar format. + :param instances: List[Any] (optional) + Tensor-based input in row format. + :param max_tokens: int (optional) + The max tokens field used ONLY for __completions__ and __chat external & foundation model__ serving + endpoints. This is an integer and should only be used with other chat/completions query fields. + :param messages: List[:class:`ChatMessage`] (optional) + The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is a + map of strings and should only be used with other chat query fields. + :param n: int (optional) + The n (number of candidates) field used ONLY for __completions__ and __chat external & foundation + model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and should only be + used with other chat/completions query fields. + :param prompt: Any (optional) + The prompt string (or array of strings) field used ONLY for __completions external & foundation + model__ serving endpoints and should only be used with other completions query fields. + :param stop: List[str] (optional) + The stop sequences field used ONLY for __completions__ and __chat external & foundation model__ + serving endpoints. This is a list of strings and should only be used with other chat/completions + query fields. + :param stream: bool (optional) + The stream field used ONLY for __completions__ and __chat external & foundation model__ serving + endpoints. This is a boolean defaulting to false and should only be used with other chat/completions + query fields. + :param temperature: float (optional) + The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving + endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with + other chat/completions query fields. + + :returns: :class:`QueryEndpointResponse` + \ No newline at end of file diff --git a/tests/test_data_plane.py b/tests/test_data_plane.py new file mode 100644 index 000000000..a74658964 --- /dev/null +++ b/tests/test_data_plane.py @@ -0,0 +1,59 @@ +from datetime import datetime, timedelta + +from databricks.sdk.data_plane import DataPlaneService +from databricks.sdk.oauth import Token +from databricks.sdk.service.oauth2 import DataPlaneInfo + +info = DataPlaneInfo(authorization_details="authDetails", endpoint_url="url") + +token = Token(access_token="token", token_type="type", expiry=datetime.now() + timedelta(hours=1)) + + +class MockRefresher: + + def __init__(self, expected: str): + self._expected = expected + + def __call__(self, auth_details: str) -> Token: + assert self._expected == auth_details + return token + + +def throw_exception(): + raise Exception("Expected value to be cached") + + +def test_not_cached(): + data_plane = DataPlaneService() + res = data_plane.get_data_plane_details("method", ["params"], lambda: info, + lambda a: MockRefresher(info.authorization_details).__call__(a)) + assert res.endpoint_url == info.endpoint_url + assert res.token == token + + +def test_token_expired(): + expired = Token(access_token="expired", token_type="type", expiry=datetime.now() + timedelta(hours=-1)) + data_plane = DataPlaneService() + data_plane._tokens["method/params"] = expired + res = data_plane.get_data_plane_details("method", ["params"], lambda: info, + lambda a: MockRefresher(info.authorization_details).__call__(a)) + assert res.endpoint_url == info.endpoint_url + assert res.token == token + + +def test_info_cached(): + data_plane = DataPlaneService() + data_plane._data_plane_info["method/params"] = info + res = data_plane.get_data_plane_details("method", ["params"], throw_exception, + lambda a: MockRefresher(info.authorization_details).__call__(a)) + assert res.endpoint_url == info.endpoint_url + assert res.token == token + + +def test_token_cached(): + data_plane = DataPlaneService() + data_plane._data_plane_info["method/params"] = info + data_plane._tokens["method/params"] = token + res = data_plane.get_data_plane_details("method", ["params"], throw_exception, throw_exception) + assert res.endpoint_url == info.endpoint_url + assert res.token == token From eace94d66b7f782836e8a7134d3e48ef21e55326 Mon Sep 17 00:00:00 2001 From: hectorcast-db Date: Wed, 17 Jul 2024 09:31:47 +0200 Subject: [PATCH 014/136] [Internal] Trigger the validate workflow in the merge queue (#709) ## Changes Trigger the validate workflow in the merge queue --- .github/workflows/message.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/message.yml b/.github/workflows/message.yml index 4632b80c5..0c0c09a39 100644 --- a/.github/workflows/message.yml +++ b/.github/workflows/message.yml @@ -3,11 +3,16 @@ name: Validate Commit Message on: pull_request: types: [opened, synchronize, edited] + merge_group: + types: [checks_requested] jobs: - validate: runs-on: ubuntu-latest + # GitHub required checks are shared between PRs and the Merge Queue. + # Since there is no PR title on Merge Queue, we need to trigger and + # skip this test for Merge Queue to succeed. + if: github.event_name == 'pull_request' steps: - name: Checkout uses: actions/checkout@v3 From f5c5f484716fd5d2bc3e91eb87009ac22359992d Mon Sep 17 00:00:00 2001 From: hectorcast-db Date: Thu, 18 Jul 2024 09:50:46 +0200 Subject: [PATCH 015/136] [Internal] Fix processing of `quoted` titles (#712) ## Changes Fix processing of `quoted` titles. Follows GitHub recommendations https://docs.github.com/en/actions/security-guides/security-hardening-for-github-actions#good-practices-for-mitigating-script-injection-attacks ## Tests This PR --- .github/workflows/message.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/message.yml b/.github/workflows/message.yml index 0c0c09a39..057556895 100644 --- a/.github/workflows/message.yml +++ b/.github/workflows/message.yml @@ -20,8 +20,10 @@ jobs: fetch-depth: 0 - name: Validate Tag + env: + TITLE: ${{ github.event.pull_request.title }} run: | - TAG=$(echo ${{ github.event.pull_request.title }} | sed -ne 's/\[\(.*\)\].*/\1/p') + TAG=$(echo "$TITLE" | sed -ne 's/\[\(.*\)\].*/\1/p') if grep -q "tag: \"\[$TAG\]\"" .codegen/changelog_config.yml; then echo "Valid tag found: [$TAG]" else From b0750ebb718e41e9e14cfbf76ddb4569642f75ee Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Thu, 18 Jul 2024 13:39:12 +0200 Subject: [PATCH 016/136] [Fix] Infer Azure tenant ID if not set (#638) ## Changes Port of https://github.com/databricks/databricks-sdk-go/pull/910 to the Python SDK. In order to use Azure U2M or M2M authentication with the Databricks SDK, users must request a token from the Entra ID instance that the underlying workspace or account belongs to, as Databricks rejects requests to workspaces with a token from a different Entra ID tenant. However, with Azure CLI auth, it is possible that a user is logged into multiple tenants at the same time. Currently, the SDK uses the subscription ID from the configured Azure Resource ID for the workspace when issuing the `az account get-access-token` command. However, when users don't specify the resource ID, the SDK simply fetches a token for the active subscription for the user. If the active subscription is in a different tenant than the workspace, users will see an error such as: ``` io.jsonwebtoken.IncorrectClaimException: Expected iss claim to be: https://sts.windows.net/72f988bf-86f1-41af-91ab-2d7cd011db47/, but was: https://sts.windows.net/e3fe3f22-4b98-4c04-82cc-d8817d1b17da/ ``` This PR modifies Azure CLI and Azure SP credential providers to attempt to load the tenant ID of the workspace if not provided before authenticating. Currently, there are no unauthenticated endpoints that the tenant ID can be directly fetched from. However, the tenant ID is indirectly exposed via the redirect URL used when logging into a workspace. In this PR, we fetch the tenant ID from this endpoint and configure it if not already set. Here, we lazily fetch the tenant ID only in the auth methods that need it. This prevents us from making any unnecessary requests if these Azure credential providers are not needed. ## Tests Unit tests check that the tenant ID is fetched automatically if not specified for an azure workspace when authenticating with client ID/secret or with the CLI. - [x] `make test` run locally - [x] `make fmt` applied - [x] relevant integration tests applied --- databricks/sdk/config.py | 27 +++++++++++++++++ databricks/sdk/credentials_provider.py | 30 +++++++++--------- tests/conftest.py | 13 ++++++++ tests/test_auth.py | 9 ++++-- tests/test_auth_manual_tests.py | 15 ++++++--- tests/test_config.py | 42 +++++++++++++++++++++++++- 6 files changed, 112 insertions(+), 24 deletions(-) diff --git a/databricks/sdk/config.py b/databricks/sdk/config.py index 47d0ecc44..28d57ad42 100644 --- a/databricks/sdk/config.py +++ b/databricks/sdk/config.py @@ -363,6 +363,33 @@ def _fix_host_if_needed(self): self.host = urllib.parse.urlunparse((o.scheme, netloc, path, o.params, o.query, o.fragment)) + def load_azure_tenant_id(self): + """[Internal] Load the Azure tenant ID from the Azure Databricks login page. + + If the tenant ID is already set, this method does nothing.""" + if not self.is_azure or self.azure_tenant_id is not None or self.host is None: + return + login_url = f'{self.host}/aad/auth' + logger.debug(f'Loading tenant ID from {login_url}') + resp = requests.get(login_url, allow_redirects=False) + if resp.status_code // 100 != 3: + logger.debug( + f'Failed to get tenant ID from {login_url}: expected status code 3xx, got {resp.status_code}') + return + entra_id_endpoint = resp.headers.get('Location') + if entra_id_endpoint is None: + logger.debug(f'No Location header in response from {login_url}') + return + # The Location header has the following form: https://login.microsoftonline.com//oauth2/authorize?... + # The domain may change depending on the Azure cloud (e.g. login.microsoftonline.us for US Government cloud). + url = urllib.parse.urlparse(entra_id_endpoint) + path_segments = url.path.split('/') + if len(path_segments) < 2: + logger.debug(f'Invalid path in Location header: {url.path}') + return + self.azure_tenant_id = path_segments[1] + logger.debug(f'Loaded tenant ID: {self.azure_tenant_id}') + def _set_inner_config(self, keyword_args: Dict[str, any]): for attr in self.attributes(): if attr.name not in keyword_args: diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py index 50c2eee89..cfdf80e0d 100644 --- a/databricks/sdk/credentials_provider.py +++ b/databricks/sdk/credentials_provider.py @@ -233,8 +233,7 @@ def _ensure_host_present(cfg: 'Config', token_source_for: Callable[[str], TokenS cfg.host = f"https://{resp.json()['properties']['workspaceUrl']}" -@oauth_credentials_strategy('azure-client-secret', - ['is_azure', 'azure_client_id', 'azure_client_secret', 'azure_tenant_id']) +@oauth_credentials_strategy('azure-client-secret', ['is_azure', 'azure_client_id', 'azure_client_secret']) def azure_service_principal(cfg: 'Config') -> CredentialsProvider: """ Adds refreshed Azure Active Directory (AAD) Service Principal OAuth tokens to every request, while automatically resolving different Azure environment endpoints. """ @@ -248,6 +247,7 @@ def token_source_for(resource: str) -> TokenSource: use_params=True) _ensure_host_present(cfg, token_source_for) + cfg.load_azure_tenant_id() logger.info("Configured AAD token for Service Principal (%s)", cfg.azure_client_id) inner = token_source_for(cfg.effective_azure_login_app_id) cloud = token_source_for(cfg.arm_environment.service_management_endpoint) @@ -432,11 +432,13 @@ def refresh(self) -> Token: class AzureCliTokenSource(CliTokenSource): """ Obtain the token granted by `az login` CLI command """ - def __init__(self, resource: str, subscription: str = ""): + def __init__(self, resource: str, subscription: Optional[str] = None, tenant: Optional[str] = None): cmd = ["az", "account", "get-access-token", "--resource", resource, "--output", "json"] - if subscription != "": + if subscription is not None: cmd.append("--subscription") cmd.append(subscription) + if tenant: + cmd.extend(["--tenant", tenant]) super().__init__(cmd=cmd, token_type_field='tokenType', access_token_field='accessToken', @@ -464,8 +466,10 @@ def is_human_user(self) -> bool: @staticmethod def for_resource(cfg: 'Config', resource: str) -> 'AzureCliTokenSource': subscription = AzureCliTokenSource.get_subscription(cfg) - if subscription != "": - token_source = AzureCliTokenSource(resource, subscription) + if subscription is not None: + token_source = AzureCliTokenSource(resource, + subscription=subscription, + tenant=cfg.azure_tenant_id) try: # This will fail if the user has access to the workspace, but not to the subscription # itself. @@ -475,25 +479,26 @@ def for_resource(cfg: 'Config', resource: str) -> 'AzureCliTokenSource': except OSError: logger.warning("Failed to get token for subscription. Using resource only token.") - token_source = AzureCliTokenSource(resource) + token_source = AzureCliTokenSource(resource, subscription=None, tenant=cfg.azure_tenant_id) token_source.token() return token_source @staticmethod - def get_subscription(cfg: 'Config') -> str: + def get_subscription(cfg: 'Config') -> Optional[str]: resource = cfg.azure_workspace_resource_id if resource is None or resource == "": - return "" + return None components = resource.split('/') if len(components) < 3: logger.warning("Invalid azure workspace resource ID") - return "" + return None return components[2] @credentials_strategy('azure-cli', ['is_azure']) def azure_cli(cfg: 'Config') -> Optional[CredentialsProvider]: """ Adds refreshed OAuth token granted by `az login` command to every request. """ + cfg.load_azure_tenant_id() token_source = None mgmt_token_source = None try: @@ -517,11 +522,6 @@ def azure_cli(cfg: 'Config') -> Optional[CredentialsProvider]: _ensure_host_present(cfg, lambda resource: AzureCliTokenSource.for_resource(cfg, resource)) logger.info("Using Azure CLI authentication with AAD tokens") - if not cfg.is_account_client and AzureCliTokenSource.get_subscription(cfg) == "": - logger.warning( - "azure_workspace_resource_id field not provided. " - "It is recommended to specify this field in the Databricks configuration to avoid authentication errors." - ) def inner() -> Dict[str, str]: token = token_source.token() diff --git a/tests/conftest.py b/tests/conftest.py index a7e520dc9..0f415ecf1 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -77,3 +77,16 @@ def set_az_path(monkeypatch): monkeypatch.setenv('COMSPEC', 'C:\\Windows\\System32\\WindowsPowerShell\\v1.0\\powershell.exe') else: monkeypatch.setenv('PATH', __tests__ + "/testdata:/bin") + + +@pytest.fixture +def mock_tenant(requests_mock): + + def stub_tenant_request(host, tenant_id="test-tenant-id"): + mock = requests_mock.get( + f'https://{host}/aad/auth', + status_code=302, + headers={'Location': f'https://login.microsoftonline.com/{tenant_id}/oauth2/authorize'}) + return mock + + return stub_tenant_request diff --git a/tests/test_auth.py b/tests/test_auth.py index fd73378b2..cd8f3cfc1 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -193,9 +193,10 @@ def test_config_azure_pat(): assert cfg.is_azure -def test_config_azure_cli_host(monkeypatch): +def test_config_azure_cli_host(monkeypatch, mock_tenant): set_home(monkeypatch, '/testdata/azure') set_az_path(monkeypatch) + mock_tenant('adb-123.4.azuredatabricks.net') cfg = Config(host='https://adb-123.4.azuredatabricks.net', azure_workspace_resource_id='/sub/rg/ws') assert cfg.auth_type == 'azure-cli' @@ -229,9 +230,10 @@ def test_config_azure_cli_host_pat_conflict_with_config_file_present_without_def cfg = Config(token='x', azure_workspace_resource_id='/sub/rg/ws') -def test_config_azure_cli_host_and_resource_id(monkeypatch): +def test_config_azure_cli_host_and_resource_id(monkeypatch, mock_tenant): set_home(monkeypatch, '/testdata') set_az_path(monkeypatch) + mock_tenant('adb-123.4.azuredatabricks.net') cfg = Config(host='https://adb-123.4.azuredatabricks.net', azure_workspace_resource_id='/sub/rg/ws') assert cfg.auth_type == 'azure-cli' @@ -239,10 +241,11 @@ def test_config_azure_cli_host_and_resource_id(monkeypatch): assert cfg.is_azure -def test_config_azure_cli_host_and_resource_i_d_configuration_precedence(monkeypatch): +def test_config_azure_cli_host_and_resource_i_d_configuration_precedence(monkeypatch, mock_tenant): monkeypatch.setenv('DATABRICKS_CONFIG_PROFILE', 'justhost') set_home(monkeypatch, '/testdata/azure') set_az_path(monkeypatch) + mock_tenant('adb-123.4.azuredatabricks.net') cfg = Config(host='https://adb-123.4.azuredatabricks.net', azure_workspace_resource_id='/sub/rg/ws') assert cfg.auth_type == 'azure-cli' diff --git a/tests/test_auth_manual_tests.py b/tests/test_auth_manual_tests.py index e2874c427..34aa3a9c2 100644 --- a/tests/test_auth_manual_tests.py +++ b/tests/test_auth_manual_tests.py @@ -3,9 +3,10 @@ from .conftest import set_az_path, set_home -def test_azure_cli_workspace_header_present(monkeypatch): +def test_azure_cli_workspace_header_present(monkeypatch, mock_tenant): set_home(monkeypatch, '/testdata/azure') set_az_path(monkeypatch) + mock_tenant('adb-123.4.azuredatabricks.net') resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123' cfg = Config(auth_type='azure-cli', host='https://adb-123.4.azuredatabricks.net', @@ -14,9 +15,10 @@ def test_azure_cli_workspace_header_present(monkeypatch): assert cfg.authenticate()['X-Databricks-Azure-Workspace-Resource-Id'] == resource_id -def test_azure_cli_user_with_management_access(monkeypatch): +def test_azure_cli_user_with_management_access(monkeypatch, mock_tenant): set_home(monkeypatch, '/testdata/azure') set_az_path(monkeypatch) + mock_tenant('adb-123.4.azuredatabricks.net') resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123' cfg = Config(auth_type='azure-cli', host='https://adb-123.4.azuredatabricks.net', @@ -24,9 +26,10 @@ def test_azure_cli_user_with_management_access(monkeypatch): assert 'X-Databricks-Azure-SP-Management-Token' in cfg.authenticate() -def test_azure_cli_user_no_management_access(monkeypatch): +def test_azure_cli_user_no_management_access(monkeypatch, mock_tenant): set_home(monkeypatch, '/testdata/azure') set_az_path(monkeypatch) + mock_tenant('adb-123.4.azuredatabricks.net') monkeypatch.setenv('FAIL_IF', 'https://management.core.windows.net/') resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123' cfg = Config(auth_type='azure-cli', @@ -35,9 +38,10 @@ def test_azure_cli_user_no_management_access(monkeypatch): assert 'X-Databricks-Azure-SP-Management-Token' not in cfg.authenticate() -def test_azure_cli_fallback(monkeypatch): +def test_azure_cli_fallback(monkeypatch, mock_tenant): set_home(monkeypatch, '/testdata/azure') set_az_path(monkeypatch) + mock_tenant('adb-123.4.azuredatabricks.net') monkeypatch.setenv('FAIL_IF', 'subscription') resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123' cfg = Config(auth_type='azure-cli', @@ -46,9 +50,10 @@ def test_azure_cli_fallback(monkeypatch): assert 'X-Databricks-Azure-SP-Management-Token' in cfg.authenticate() -def test_azure_cli_with_warning_on_stderr(monkeypatch): +def test_azure_cli_with_warning_on_stderr(monkeypatch, mock_tenant): set_home(monkeypatch, '/testdata/azure') set_az_path(monkeypatch) + mock_tenant('adb-123.4.azuredatabricks.net') monkeypatch.setenv('WARN', 'this is a warning') resource_id = '/subscriptions/123/resourceGroups/abc/providers/Microsoft.Databricks/workspaces/abc123' cfg = Config(auth_type='azure-cli', diff --git a/tests/test_config.py b/tests/test_config.py index 4d3a0ebef..4bab85cf1 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -1,3 +1,4 @@ +import os import platform import pytest @@ -6,7 +7,9 @@ from databricks.sdk.config import Config, with_product, with_user_agent_extra from databricks.sdk.version import __version__ -from .conftest import noop_credentials +from .conftest import noop_credentials, set_az_path + +__tests__ = os.path.dirname(__file__) def test_config_supports_legacy_credentials_provider(): @@ -74,3 +77,40 @@ def test_config_copy_deep_copies_user_agent_other_info(config): assert "blueprint/0.4.6" in config.user_agent assert "blueprint/0.4.6" in config_copy.user_agent useragent._reset_extra(original_extra) + + +def test_load_azure_tenant_id_404(requests_mock, monkeypatch): + set_az_path(monkeypatch) + mock = requests_mock.get('https://abc123.azuredatabricks.net/aad/auth', status_code=404) + cfg = Config(host="https://abc123.azuredatabricks.net") + assert cfg.azure_tenant_id is None + assert mock.called_once + + +def test_load_azure_tenant_id_no_location_header(requests_mock, monkeypatch): + set_az_path(monkeypatch) + mock = requests_mock.get('https://abc123.azuredatabricks.net/aad/auth', status_code=302) + cfg = Config(host="https://abc123.azuredatabricks.net") + assert cfg.azure_tenant_id is None + assert mock.called_once + + +def test_load_azure_tenant_id_unparsable_location_header(requests_mock, monkeypatch): + set_az_path(monkeypatch) + mock = requests_mock.get('https://abc123.azuredatabricks.net/aad/auth', + status_code=302, + headers={'Location': 'https://unexpected-location'}) + cfg = Config(host="https://abc123.azuredatabricks.net") + assert cfg.azure_tenant_id is None + assert mock.called_once + + +def test_load_azure_tenant_id_happy_path(requests_mock, monkeypatch): + set_az_path(monkeypatch) + mock = requests_mock.get( + 'https://abc123.azuredatabricks.net/aad/auth', + status_code=302, + headers={'Location': 'https://login.microsoftonline.com/tenant-id/oauth2/authorize'}) + cfg = Config(host="https://abc123.azuredatabricks.net") + assert cfg.azure_tenant_id == 'tenant-id' + assert mock.called_once From 6a9c534e39e8bb447725d6cdf3612549849f0bdd Mon Sep 17 00:00:00 2001 From: hectorcast-db Date: Mon, 22 Jul 2024 15:57:22 +0200 Subject: [PATCH 017/136] [Internal] Update OpenAPI spec (#715) ## Changes Update OpenAPI spec ## Tests - [X] `make test` run locally - [X] `make fmt` applied - [x] relevant integration tests applied --- .codegen/_openapi_sha | 2 +- .codegen/service.py.tmpl | 4 +- databricks/sdk/__init__.py | 65 +- databricks/sdk/service/billing.py | 689 ++++-- databricks/sdk/service/catalog.py | 51 +- databricks/sdk/service/compute.py | 182 +- databricks/sdk/service/dashboards.py | 598 ++++- databricks/sdk/service/iam.py | 144 +- databricks/sdk/service/jobs.py | 7 +- databricks/sdk/service/marketplace.py | 124 +- databricks/sdk/service/oauth2.py | 197 +- databricks/sdk/service/serving.py | 240 +- databricks/sdk/service/settings.py | 427 +++- databricks/sdk/service/sharing.py | 1 + databricks/sdk/service/sql.py | 2931 ++++++++++++++++++++----- tests/integration/test_sql.py | 2 +- tests/test_core.py | 4 +- 17 files changed, 4520 insertions(+), 1148 deletions(-) diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index c4b47ca14..ed18d818d 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -7437dabb9dadee402c1fc060df4c1ce8cc5369f0 \ No newline at end of file +37e2bbe0cbcbbbe78a06a018d4fab06314a26a40 \ No newline at end of file diff --git a/.codegen/service.py.tmpl b/.codegen/service.py.tmpl index 643b1f33a..b4e6b1dc9 100644 --- a/.codegen/service.py.tmpl +++ b/.codegen/service.py.tmpl @@ -295,7 +295,7 @@ class {{.PascalName}}API:{{if .Description}} {{if .NeedsOffsetDedupe -}} # deduplicate items that may have been added during iteration seen = set() - {{- end}}{{if and .Pagination.Offset (not (eq .Path "/api/2.0/clusters/events")) }} + {{- end}}{{if and .Pagination.Offset (not (eq .Path "/api/2.1/clusters/events")) }} query['{{.Pagination.Offset.Name}}'] = {{- if eq .Pagination.Increment 1 -}} 1 @@ -321,7 +321,7 @@ class {{.PascalName}}API:{{if .Description}} if '{{.Pagination.Token.Bind.Name}}' not in json or not json['{{.Pagination.Token.Bind.Name}}']: return {{if or (eq "GET" .Verb) (eq "HEAD" .Verb)}}query{{else}}body{{end}}['{{.Pagination.Token.PollField.Name}}'] = json['{{.Pagination.Token.Bind.Name}}'] - {{- else if eq .Path "/api/2.0/clusters/events" -}} + {{- else if eq .Path "/api/2.1/clusters/events" -}} if 'next_page' not in json or not json['next_page']: return body = json['next_page'] diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 8485efbac..7603678e3 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -27,7 +27,7 @@ InstancePoolsAPI, InstanceProfilesAPI, LibrariesAPI, PolicyFamiliesAPI) -from databricks.sdk.service.dashboards import LakeviewAPI +from databricks.sdk.service.dashboards import GenieAPI, LakeviewAPI from databricks.sdk.service.files import DbfsAPI, FilesAPI from databricks.sdk.service.iam import (AccountAccessControlAPI, AccountAccessControlProxyAPI, @@ -68,6 +68,7 @@ EsmEnablementAccountAPI, IpAccessListsAPI, NetworkConnectivityAPI, + NotificationDestinationsAPI, PersonalComputeAPI, RestrictWorkspaceAdminsAPI, SettingsAPI, TokenManagementAPI, @@ -75,11 +76,13 @@ from databricks.sdk.service.sharing import (CleanRoomsAPI, ProvidersAPI, RecipientActivationAPI, RecipientsAPI, SharesAPI) -from databricks.sdk.service.sql import (AlertsAPI, DashboardsAPI, - DashboardWidgetsAPI, DataSourcesAPI, - DbsqlPermissionsAPI, QueriesAPI, +from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI, + DashboardsAPI, DashboardWidgetsAPI, + DataSourcesAPI, DbsqlPermissionsAPI, + QueriesAPI, QueriesLegacyAPI, QueryHistoryAPI, QueryVisualizationsAPI, + QueryVisualizationsLegacyAPI, StatementExecutionAPI, WarehousesAPI) from databricks.sdk.service.vectorsearch import (VectorSearchEndpointsAPI, VectorSearchIndexesAPI) @@ -166,6 +169,7 @@ def __init__(self, serving_endpoints = ServingEndpointsAPI(self._api_client) self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client) self._alerts = AlertsAPI(self._api_client) + self._alerts_legacy = AlertsLegacyAPI(self._api_client) self._apps = AppsAPI(self._api_client) self._artifact_allowlists = ArtifactAllowlistsAPI(self._api_client) self._catalogs = CatalogsAPI(self._api_client) @@ -190,6 +194,7 @@ def __init__(self, self._external_locations = ExternalLocationsAPI(self._api_client) self._files = FilesAPI(self._api_client) self._functions = FunctionsAPI(self._api_client) + self._genie = GenieAPI(self._api_client) self._git_credentials = GitCredentialsAPI(self._api_client) self._global_init_scripts = GlobalInitScriptsAPI(self._api_client) self._grants = GrantsAPI(self._api_client) @@ -203,6 +208,7 @@ def __init__(self, self._metastores = MetastoresAPI(self._api_client) self._model_registry = ModelRegistryAPI(self._api_client) self._model_versions = ModelVersionsAPI(self._api_client) + self._notification_destinations = NotificationDestinationsAPI(self._api_client) self._online_tables = OnlineTablesAPI(self._api_client) self._permission_migration = PermissionMigrationAPI(self._api_client) self._permissions = PermissionsAPI(self._api_client) @@ -219,8 +225,10 @@ def __init__(self, self._providers = ProvidersAPI(self._api_client) self._quality_monitors = QualityMonitorsAPI(self._api_client) self._queries = QueriesAPI(self._api_client) + self._queries_legacy = QueriesLegacyAPI(self._api_client) self._query_history = QueryHistoryAPI(self._api_client) self._query_visualizations = QueryVisualizationsAPI(self._api_client) + self._query_visualizations_legacy = QueryVisualizationsLegacyAPI(self._api_client) self._recipient_activation = RecipientActivationAPI(self._api_client) self._recipients = RecipientsAPI(self._api_client) self._registered_models = RegisteredModelsAPI(self._api_client) @@ -270,6 +278,11 @@ def alerts(self) -> AlertsAPI: """The alerts API can be used to perform CRUD operations on alerts.""" return self._alerts + @property + def alerts_legacy(self) -> AlertsLegacyAPI: + """The alerts API can be used to perform CRUD operations on alerts.""" + return self._alerts_legacy + @property def apps(self) -> AppsAPI: """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on.""" @@ -390,6 +403,11 @@ def functions(self) -> FunctionsAPI: """Functions implement User-Defined Functions (UDFs) in Unity Catalog.""" return self._functions + @property + def genie(self) -> GenieAPI: + """Genie provides a no-code experience for business users, powered by AI/BI.""" + return self._genie + @property def git_credentials(self) -> GitCredentialsAPI: """Registers personal access token for Databricks to do operations on behalf of the user.""" @@ -455,6 +473,11 @@ def model_versions(self) -> ModelVersionsAPI: """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog.""" return self._model_versions + @property + def notification_destinations(self) -> NotificationDestinationsAPI: + """The notification destinations API lets you programmatically manage a workspace's notification destinations.""" + return self._notification_destinations + @property def online_tables(self) -> OnlineTablesAPI: """Online tables provide lower latency and higher QPS access to data from Delta tables.""" @@ -462,7 +485,7 @@ def online_tables(self) -> OnlineTablesAPI: @property def permission_migration(self) -> PermissionMigrationAPI: - """This spec contains undocumented permission migration APIs used in https://github.com/databrickslabs/ucx.""" + """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx.""" return self._permission_migration @property @@ -527,19 +550,29 @@ def quality_monitors(self) -> QualityMonitorsAPI: @property def queries(self) -> QueriesAPI: - """These endpoints are used for CRUD operations on query definitions.""" + """The queries API can be used to perform CRUD operations on queries.""" return self._queries + @property + def queries_legacy(self) -> QueriesLegacyAPI: + """These endpoints are used for CRUD operations on query definitions.""" + return self._queries_legacy + @property def query_history(self) -> QueryHistoryAPI: - """Access the history of queries through SQL warehouses.""" + """A service responsible for storing and retrieving the list of queries run against SQL endpoints, serverless compute, and DLT.""" return self._query_history @property def query_visualizations(self) -> QueryVisualizationsAPI: - """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace.""" + """This is an evolving API that facilitates the addition and removal of visualizations from existing queries in the Databricks Workspace.""" return self._query_visualizations + @property + def query_visualizations_legacy(self) -> QueryVisualizationsLegacyAPI: + """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace.""" + return self._query_visualizations_legacy + @property def recipient_activation(self) -> RecipientActivationAPI: """The Recipient Activation API is only applicable in the open sharing model where the recipient object has the authentication type of `TOKEN`.""" @@ -742,7 +775,6 @@ def __init__(self, self._api_client = client.ApiClient(self._config) self._access_control = AccountAccessControlAPI(self._api_client) self._billable_usage = BillableUsageAPI(self._api_client) - self._budgets = BudgetsAPI(self._api_client) self._credentials = CredentialsAPI(self._api_client) self._custom_app_integration = CustomAppIntegrationAPI(self._api_client) self._encryption_keys = EncryptionKeysAPI(self._api_client) @@ -765,6 +797,7 @@ def __init__(self, self._vpc_endpoints = VpcEndpointsAPI(self._api_client) self._workspace_assignment = WorkspaceAssignmentAPI(self._api_client) self._workspaces = WorkspacesAPI(self._api_client) + self._budgets = BudgetsAPI(self._api_client) @property def config(self) -> client.Config: @@ -784,11 +817,6 @@ def billable_usage(self) -> BillableUsageAPI: """This API allows you to download billable usage logs for the specified account and date range.""" return self._billable_usage - @property - def budgets(self) -> BudgetsAPI: - """These APIs manage budget configuration including notifications for exceeding a budget for a period.""" - return self._budgets - @property def credentials(self) -> CredentialsAPI: """These APIs manage credential configurations for this workspace.""" @@ -796,7 +824,7 @@ def credentials(self) -> CredentialsAPI: @property def custom_app_integration(self) -> CustomAppIntegrationAPI: - """These APIs enable administrators to manage custom oauth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.""" + """These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.""" return self._custom_app_integration @property @@ -851,7 +879,7 @@ def private_access(self) -> PrivateAccessAPI: @property def published_app_integration(self) -> PublishedAppIntegrationAPI: - """These APIs enable administrators to manage published oauth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.""" + """These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.""" return self._published_app_integration @property @@ -899,6 +927,11 @@ def workspaces(self) -> WorkspacesAPI: """These APIs manage workspaces for this account.""" return self._workspaces + @property + def budgets(self) -> BudgetsAPI: + """These APIs manage budget configurations for this account.""" + return self._budgets + def get_workspace_client(self, workspace: Workspace) -> WorkspaceClient: """Constructs a ``WorkspaceClient`` for the given workspace. diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index 1d4a773c6..d2ef50bc3 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -15,204 +15,372 @@ @dataclass -class Budget: - """Budget configuration to be created.""" +class ActionConfiguration: + action_configuration_id: Optional[str] = None + """Databricks action configuration ID.""" - name: str - """Human-readable name of the budget.""" + action_type: Optional[ActionConfigurationType] = None + """The type of the action.""" - period: str - """Period length in years, months, weeks and/or days. Examples: `1 month`, `30 days`, `1 year, 2 - months, 1 week, 2 days`""" + target: Optional[str] = None + """Target for the action. For example, an email address.""" - start_date: str - """Start date of the budget period calculation.""" + def as_dict(self) -> dict: + """Serializes the ActionConfiguration into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.action_configuration_id is not None: + body['action_configuration_id'] = self.action_configuration_id + if self.action_type is not None: body['action_type'] = self.action_type.value + if self.target is not None: body['target'] = self.target + return body - target_amount: str - """Target amount of the budget per period in USD.""" + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ActionConfiguration: + """Deserializes the ActionConfiguration from a dictionary.""" + return cls(action_configuration_id=d.get('action_configuration_id', None), + action_type=_enum(d, 'action_type', ActionConfigurationType), + target=d.get('target', None)) - filter: str - """SQL-like filter expression with workspaceId, SKU and tag. Usage in your account that matches - this expression will be counted in this budget. - - Supported properties on left-hand side of comparison: * `workspaceId` - the ID of the workspace - * `sku` - SKU of the cluster, e.g. `STANDARD_ALL_PURPOSE_COMPUTE` * `tag.tagName`, `tag.'tag - name'` - tag of the cluster - - Supported comparison operators: * `=` - equal * `!=` - not equal - - Supported logical operators: `AND`, `OR`. - - Examples: * `workspaceId=123 OR (sku='STANDARD_ALL_PURPOSE_COMPUTE' AND tag.'my tag'='my - value')` * `workspaceId!=456` * `sku='STANDARD_ALL_PURPOSE_COMPUTE' OR - sku='PREMIUM_ALL_PURPOSE_COMPUTE'` * `tag.name1='value1' AND tag.name2='value2'`""" - alerts: Optional[List[BudgetAlert]] = None +class ActionConfigurationType(Enum): + + EMAIL_NOTIFICATION = 'EMAIL_NOTIFICATION' + + +@dataclass +class AlertConfiguration: + action_configurations: Optional[List[ActionConfiguration]] = None + """Configured actions for this alert. These define what happens when an alert enters a triggered + state.""" + + alert_configuration_id: Optional[str] = None + """Databricks alert configuration ID.""" + + quantity_threshold: Optional[str] = None + """The threshold for the budget alert to determine if it is in a triggered state. The number is + evaluated based on `quantity_type`.""" + + quantity_type: Optional[AlertConfigurationQuantityType] = None + """The way to calculate cost for this budget alert. This is what `quantity_threshold` is measured + in.""" - end_date: Optional[str] = None - """Optional end date of the budget.""" + time_period: Optional[AlertConfigurationTimePeriod] = None + """The time window of usage data for the budget.""" + + trigger_type: Optional[AlertConfigurationTriggerType] = None + """The evaluation method to determine when this budget alert is in a triggered state.""" def as_dict(self) -> dict: - """Serializes the Budget into a dictionary suitable for use as a JSON request body.""" + """Serializes the AlertConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alerts: body['alerts'] = [v.as_dict() for v in self.alerts] - if self.end_date is not None: body['end_date'] = self.end_date - if self.filter is not None: body['filter'] = self.filter - if self.name is not None: body['name'] = self.name - if self.period is not None: body['period'] = self.period - if self.start_date is not None: body['start_date'] = self.start_date - if self.target_amount is not None: body['target_amount'] = self.target_amount + if self.action_configurations: + body['action_configurations'] = [v.as_dict() for v in self.action_configurations] + if self.alert_configuration_id is not None: + body['alert_configuration_id'] = self.alert_configuration_id + if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold + if self.quantity_type is not None: body['quantity_type'] = self.quantity_type.value + if self.time_period is not None: body['time_period'] = self.time_period.value + if self.trigger_type is not None: body['trigger_type'] = self.trigger_type.value return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> Budget: - """Deserializes the Budget from a dictionary.""" - return cls(alerts=_repeated_dict(d, 'alerts', BudgetAlert), - end_date=d.get('end_date', None), - filter=d.get('filter', None), - name=d.get('name', None), - period=d.get('period', None), - start_date=d.get('start_date', None), - target_amount=d.get('target_amount', None)) + def from_dict(cls, d: Dict[str, any]) -> AlertConfiguration: + """Deserializes the AlertConfiguration from a dictionary.""" + return cls(action_configurations=_repeated_dict(d, 'action_configurations', ActionConfiguration), + alert_configuration_id=d.get('alert_configuration_id', None), + quantity_threshold=d.get('quantity_threshold', None), + quantity_type=_enum(d, 'quantity_type', AlertConfigurationQuantityType), + time_period=_enum(d, 'time_period', AlertConfigurationTimePeriod), + trigger_type=_enum(d, 'trigger_type', AlertConfigurationTriggerType)) + + +class AlertConfigurationQuantityType(Enum): + + LIST_PRICE_DOLLARS_USD = 'LIST_PRICE_DOLLARS_USD' + + +class AlertConfigurationTimePeriod(Enum): + + MONTH = 'MONTH' + + +class AlertConfigurationTriggerType(Enum): + + CUMULATIVE_SPENDING_EXCEEDED = 'CUMULATIVE_SPENDING_EXCEEDED' @dataclass -class BudgetAlert: - email_notifications: Optional[List[str]] = None - """List of email addresses to be notified when budget percentage is exceeded in the given period.""" +class BudgetConfiguration: + account_id: Optional[str] = None + """Databricks account ID.""" + + alert_configurations: Optional[List[AlertConfiguration]] = None + """Alerts to configure when this budget is in a triggered state. Budgets must have exactly one + alert configuration.""" + + budget_configuration_id: Optional[str] = None + """Databricks budget configuration ID.""" + + create_time: Optional[int] = None + """Creation time of this budget configuration.""" + + display_name: Optional[str] = None + """Human-readable name of budget configuration. Max Length: 128""" - min_percentage: Optional[int] = None - """Percentage of the target amount used in the currect period that will trigger a notification.""" + filter: Optional[BudgetConfigurationFilter] = None + """Configured filters for this budget. These are applied to your account's usage to limit the scope + of what is considered for this budget. Leave empty to include all usage for this account. All + provided filters must be matched for usage to be included.""" + + update_time: Optional[int] = None + """Update time of this budget configuration.""" def as_dict(self) -> dict: - """Serializes the BudgetAlert into a dictionary suitable for use as a JSON request body.""" + """Serializes the BudgetConfiguration into a dictionary suitable for use as a JSON request body.""" body = {} - if self.email_notifications: body['email_notifications'] = [v for v in self.email_notifications] - if self.min_percentage is not None: body['min_percentage'] = self.min_percentage + if self.account_id is not None: body['account_id'] = self.account_id + if self.alert_configurations: + body['alert_configurations'] = [v.as_dict() for v in self.alert_configurations] + if self.budget_configuration_id is not None: + body['budget_configuration_id'] = self.budget_configuration_id + if self.create_time is not None: body['create_time'] = self.create_time + if self.display_name is not None: body['display_name'] = self.display_name + if self.filter: body['filter'] = self.filter.as_dict() + if self.update_time is not None: body['update_time'] = self.update_time return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> BudgetAlert: - """Deserializes the BudgetAlert from a dictionary.""" - return cls(email_notifications=d.get('email_notifications', None), - min_percentage=d.get('min_percentage', None)) + def from_dict(cls, d: Dict[str, any]) -> BudgetConfiguration: + """Deserializes the BudgetConfiguration from a dictionary.""" + return cls(account_id=d.get('account_id', None), + alert_configurations=_repeated_dict(d, 'alert_configurations', AlertConfiguration), + budget_configuration_id=d.get('budget_configuration_id', None), + create_time=d.get('create_time', None), + display_name=d.get('display_name', None), + filter=_from_dict(d, 'filter', BudgetConfigurationFilter), + update_time=d.get('update_time', None)) @dataclass -class BudgetList: - """List of budgets.""" +class BudgetConfigurationFilter: + tags: Optional[List[BudgetConfigurationFilterTagClause]] = None + """A list of tag keys and values that will limit the budget to usage that includes those specific + custom tags. Tags are case-sensitive and should be entered exactly as they appear in your usage + data.""" - budgets: Optional[List[BudgetWithStatus]] = None + workspace_id: Optional[BudgetConfigurationFilterWorkspaceIdClause] = None + """If provided, usage must match with the provided Databricks workspace IDs.""" def as_dict(self) -> dict: - """Serializes the BudgetList into a dictionary suitable for use as a JSON request body.""" + """Serializes the BudgetConfigurationFilter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budgets: body['budgets'] = [v.as_dict() for v in self.budgets] + if self.tags: body['tags'] = [v.as_dict() for v in self.tags] + if self.workspace_id: body['workspace_id'] = self.workspace_id.as_dict() return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> BudgetList: - """Deserializes the BudgetList from a dictionary.""" - return cls(budgets=_repeated_dict(d, 'budgets', BudgetWithStatus)) + def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilter: + """Deserializes the BudgetConfigurationFilter from a dictionary.""" + return cls(tags=_repeated_dict(d, 'tags', BudgetConfigurationFilterTagClause), + workspace_id=_from_dict(d, 'workspace_id', BudgetConfigurationFilterWorkspaceIdClause)) @dataclass -class BudgetWithStatus: - """Budget configuration with daily status.""" +class BudgetConfigurationFilterClause: + operator: Optional[BudgetConfigurationFilterOperator] = None - alerts: Optional[List[BudgetAlert]] = None + values: Optional[List[str]] = None - budget_id: Optional[str] = None + def as_dict(self) -> dict: + """Serializes the BudgetConfigurationFilterClause into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.operator is not None: body['operator'] = self.operator.value + if self.values: body['values'] = [v for v in self.values] + return body - creation_time: Optional[str] = None + @classmethod + def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterClause: + """Deserializes the BudgetConfigurationFilterClause from a dictionary.""" + return cls(operator=_enum(d, 'operator', BudgetConfigurationFilterOperator), + values=d.get('values', None)) - end_date: Optional[str] = None - """Optional end date of the budget.""" - filter: Optional[str] = None - """SQL-like filter expression with workspaceId, SKU and tag. Usage in your account that matches - this expression will be counted in this budget. - - Supported properties on left-hand side of comparison: * `workspaceId` - the ID of the workspace - * `sku` - SKU of the cluster, e.g. `STANDARD_ALL_PURPOSE_COMPUTE` * `tag.tagName`, `tag.'tag - name'` - tag of the cluster - - Supported comparison operators: * `=` - equal * `!=` - not equal - - Supported logical operators: `AND`, `OR`. - - Examples: * `workspaceId=123 OR (sku='STANDARD_ALL_PURPOSE_COMPUTE' AND tag.'my tag'='my - value')` * `workspaceId!=456` * `sku='STANDARD_ALL_PURPOSE_COMPUTE' OR - sku='PREMIUM_ALL_PURPOSE_COMPUTE'` * `tag.name1='value1' AND tag.name2='value2'`""" +class BudgetConfigurationFilterOperator(Enum): + + IN = 'IN' - name: Optional[str] = None - """Human-readable name of the budget.""" - period: Optional[str] = None - """Period length in years, months, weeks and/or days. Examples: `1 month`, `30 days`, `1 year, 2 - months, 1 week, 2 days`""" +@dataclass +class BudgetConfigurationFilterTagClause: + key: Optional[str] = None + + value: Optional[BudgetConfigurationFilterClause] = None + + def as_dict(self) -> dict: + """Serializes the BudgetConfigurationFilterTagClause into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.key is not None: body['key'] = self.key + if self.value: body['value'] = self.value.as_dict() + return body - start_date: Optional[str] = None - """Start date of the budget period calculation.""" + @classmethod + def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterTagClause: + """Deserializes the BudgetConfigurationFilterTagClause from a dictionary.""" + return cls(key=d.get('key', None), value=_from_dict(d, 'value', BudgetConfigurationFilterClause)) - status_daily: Optional[List[BudgetWithStatusStatusDailyItem]] = None - """Amount used in the budget for each day (noncumulative).""" - target_amount: Optional[str] = None - """Target amount of the budget per period in USD.""" +@dataclass +class BudgetConfigurationFilterWorkspaceIdClause: + operator: Optional[BudgetConfigurationFilterOperator] = None - update_time: Optional[str] = None + values: Optional[List[int]] = None def as_dict(self) -> dict: - """Serializes the BudgetWithStatus into a dictionary suitable for use as a JSON request body.""" + """Serializes the BudgetConfigurationFilterWorkspaceIdClause into a dictionary suitable for use as a JSON request body.""" body = {} - if self.alerts: body['alerts'] = [v.as_dict() for v in self.alerts] - if self.budget_id is not None: body['budget_id'] = self.budget_id - if self.creation_time is not None: body['creation_time'] = self.creation_time - if self.end_date is not None: body['end_date'] = self.end_date - if self.filter is not None: body['filter'] = self.filter - if self.name is not None: body['name'] = self.name - if self.period is not None: body['period'] = self.period - if self.start_date is not None: body['start_date'] = self.start_date - if self.status_daily: body['status_daily'] = [v.as_dict() for v in self.status_daily] - if self.target_amount is not None: body['target_amount'] = self.target_amount - if self.update_time is not None: body['update_time'] = self.update_time + if self.operator is not None: body['operator'] = self.operator.value + if self.values: body['values'] = [v for v in self.values] return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> BudgetWithStatus: - """Deserializes the BudgetWithStatus from a dictionary.""" - return cls(alerts=_repeated_dict(d, 'alerts', BudgetAlert), - budget_id=d.get('budget_id', None), - creation_time=d.get('creation_time', None), - end_date=d.get('end_date', None), - filter=d.get('filter', None), - name=d.get('name', None), - period=d.get('period', None), - start_date=d.get('start_date', None), - status_daily=_repeated_dict(d, 'status_daily', BudgetWithStatusStatusDailyItem), - target_amount=d.get('target_amount', None), - update_time=d.get('update_time', None)) + def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterWorkspaceIdClause: + """Deserializes the BudgetConfigurationFilterWorkspaceIdClause from a dictionary.""" + return cls(operator=_enum(d, 'operator', BudgetConfigurationFilterOperator), + values=d.get('values', None)) @dataclass -class BudgetWithStatusStatusDailyItem: - amount: Optional[str] = None - """Amount used in this day in USD.""" +class CreateBudgetConfigurationBudget: + account_id: Optional[str] = None + """Databricks account ID.""" + + alert_configurations: Optional[List[CreateBudgetConfigurationBudgetAlertConfigurations]] = None + """Alerts to configure when this budget is in a triggered state. Budgets must have exactly one + alert configuration.""" - date: Optional[str] = None + display_name: Optional[str] = None + """Human-readable name of budget configuration. Max Length: 128""" + + filter: Optional[BudgetConfigurationFilter] = None + """Configured filters for this budget. These are applied to your account's usage to limit the scope + of what is considered for this budget. Leave empty to include all usage for this account. All + provided filters must be matched for usage to be included.""" def as_dict(self) -> dict: - """Serializes the BudgetWithStatusStatusDailyItem into a dictionary suitable for use as a JSON request body.""" + """Serializes the CreateBudgetConfigurationBudget into a dictionary suitable for use as a JSON request body.""" body = {} - if self.amount is not None: body['amount'] = self.amount - if self.date is not None: body['date'] = self.date + if self.account_id is not None: body['account_id'] = self.account_id + if self.alert_configurations: + body['alert_configurations'] = [v.as_dict() for v in self.alert_configurations] + if self.display_name is not None: body['display_name'] = self.display_name + if self.filter: body['filter'] = self.filter.as_dict() return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> BudgetWithStatusStatusDailyItem: - """Deserializes the BudgetWithStatusStatusDailyItem from a dictionary.""" - return cls(amount=d.get('amount', None), date=d.get('date', None)) + def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudget: + """Deserializes the CreateBudgetConfigurationBudget from a dictionary.""" + return cls(account_id=d.get('account_id', None), + alert_configurations=_repeated_dict(d, 'alert_configurations', + CreateBudgetConfigurationBudgetAlertConfigurations), + display_name=d.get('display_name', None), + filter=_from_dict(d, 'filter', BudgetConfigurationFilter)) + + +@dataclass +class CreateBudgetConfigurationBudgetActionConfigurations: + action_type: Optional[ActionConfigurationType] = None + """The type of the action.""" + + target: Optional[str] = None + """Target for the action. For example, an email address.""" + + def as_dict(self) -> dict: + """Serializes the CreateBudgetConfigurationBudgetActionConfigurations into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.action_type is not None: body['action_type'] = self.action_type.value + if self.target is not None: body['target'] = self.target + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudgetActionConfigurations: + """Deserializes the CreateBudgetConfigurationBudgetActionConfigurations from a dictionary.""" + return cls(action_type=_enum(d, 'action_type', ActionConfigurationType), target=d.get('target', None)) + + +@dataclass +class CreateBudgetConfigurationBudgetAlertConfigurations: + action_configurations: Optional[List[CreateBudgetConfigurationBudgetActionConfigurations]] = None + """Configured actions for this alert. These define what happens when an alert enters a triggered + state.""" + + quantity_threshold: Optional[str] = None + """The threshold for the budget alert to determine if it is in a triggered state. The number is + evaluated based on `quantity_type`.""" + + quantity_type: Optional[AlertConfigurationQuantityType] = None + """The way to calculate cost for this budget alert. This is what `quantity_threshold` is measured + in.""" + + time_period: Optional[AlertConfigurationTimePeriod] = None + """The time window of usage data for the budget.""" + + trigger_type: Optional[AlertConfigurationTriggerType] = None + """The evaluation method to determine when this budget alert is in a triggered state.""" + + def as_dict(self) -> dict: + """Serializes the CreateBudgetConfigurationBudgetAlertConfigurations into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.action_configurations: + body['action_configurations'] = [v.as_dict() for v in self.action_configurations] + if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold + if self.quantity_type is not None: body['quantity_type'] = self.quantity_type.value + if self.time_period is not None: body['time_period'] = self.time_period.value + if self.trigger_type is not None: body['trigger_type'] = self.trigger_type.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudgetAlertConfigurations: + """Deserializes the CreateBudgetConfigurationBudgetAlertConfigurations from a dictionary.""" + return cls(action_configurations=_repeated_dict(d, 'action_configurations', + CreateBudgetConfigurationBudgetActionConfigurations), + quantity_threshold=d.get('quantity_threshold', None), + quantity_type=_enum(d, 'quantity_type', AlertConfigurationQuantityType), + time_period=_enum(d, 'time_period', AlertConfigurationTimePeriod), + trigger_type=_enum(d, 'trigger_type', AlertConfigurationTriggerType)) + + +@dataclass +class CreateBudgetConfigurationRequest: + budget: CreateBudgetConfigurationBudget + """Properties of the new budget configuration.""" + + def as_dict(self) -> dict: + """Serializes the CreateBudgetConfigurationRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.budget: body['budget'] = self.budget.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationRequest: + """Deserializes the CreateBudgetConfigurationRequest from a dictionary.""" + return cls(budget=_from_dict(d, 'budget', CreateBudgetConfigurationBudget)) + + +@dataclass +class CreateBudgetConfigurationResponse: + budget: Optional[BudgetConfiguration] = None + """The created budget configuration.""" + + def as_dict(self) -> dict: + """Serializes the CreateBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.budget: body['budget'] = self.budget.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationResponse: + """Deserializes the CreateBudgetConfigurationResponse from a dictionary.""" + return cls(budget=_from_dict(d, 'budget', BudgetConfiguration)) @dataclass @@ -316,16 +484,16 @@ def from_dict(cls, d: Dict[str, any]) -> CreateLogDeliveryConfigurationParams: @dataclass -class DeleteResponse: +class DeleteBudgetConfigurationResponse: def as_dict(self) -> dict: - """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the DeleteBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body.""" body = {} return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: - """Deserializes the DeleteResponse from a dictionary.""" + def from_dict(cls, d: Dict[str, any]) -> DeleteBudgetConfigurationResponse: + """Deserializes the DeleteBudgetConfigurationResponse from a dictionary.""" return cls() @@ -361,6 +529,44 @@ def from_dict(cls, d: Dict[str, any]) -> DownloadResponse: return cls(contents=d.get('contents', None)) +@dataclass +class GetBudgetConfigurationResponse: + budget: Optional[BudgetConfiguration] = None + + def as_dict(self) -> dict: + """Serializes the GetBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.budget: body['budget'] = self.budget.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GetBudgetConfigurationResponse: + """Deserializes the GetBudgetConfigurationResponse from a dictionary.""" + return cls(budget=_from_dict(d, 'budget', BudgetConfiguration)) + + +@dataclass +class ListBudgetConfigurationsResponse: + budgets: Optional[List[BudgetConfiguration]] = None + + next_page_token: Optional[str] = None + """Token which can be sent as `page_token` to retrieve the next page of results. If this field is + omitted, there are no subsequent budgets.""" + + def as_dict(self) -> dict: + """Serializes the ListBudgetConfigurationsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.budgets: body['budgets'] = [v.as_dict() for v in self.budgets] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListBudgetConfigurationsResponse: + """Deserializes the ListBudgetConfigurationsResponse from a dictionary.""" + return cls(budgets=_repeated_dict(d, 'budgets', BudgetConfiguration), + next_page_token=d.get('next_page_token', None)) + + class LogDeliveryConfigStatus(Enum): """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the @@ -586,81 +792,110 @@ def from_dict(cls, d: Dict[str, any]) -> PatchStatusResponse: @dataclass -class UpdateLogDeliveryConfigurationStatusRequest: - status: LogDeliveryConfigStatus - """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). - Defaults to `ENABLED`. You can [enable or disable the - configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration - is not supported, so disable a log delivery configuration that is no longer needed.""" +class UpdateBudgetConfigurationBudget: + account_id: Optional[str] = None + """Databricks account ID.""" - log_delivery_configuration_id: Optional[str] = None - """Databricks log delivery configuration ID""" + alert_configurations: Optional[List[AlertConfiguration]] = None + """Alerts to configure when this budget is in a triggered state. Budgets must have exactly one + alert configuration.""" + + budget_configuration_id: Optional[str] = None + """Databricks budget configuration ID.""" + + display_name: Optional[str] = None + """Human-readable name of budget configuration. Max Length: 128""" + + filter: Optional[BudgetConfigurationFilter] = None + """Configured filters for this budget. These are applied to your account's usage to limit the scope + of what is considered for this budget. Leave empty to include all usage for this account. All + provided filters must be matched for usage to be included.""" def as_dict(self) -> dict: - """Serializes the UpdateLogDeliveryConfigurationStatusRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateBudgetConfigurationBudget into a dictionary suitable for use as a JSON request body.""" body = {} - if self.log_delivery_configuration_id is not None: - body['log_delivery_configuration_id'] = self.log_delivery_configuration_id - if self.status is not None: body['status'] = self.status.value + if self.account_id is not None: body['account_id'] = self.account_id + if self.alert_configurations: + body['alert_configurations'] = [v.as_dict() for v in self.alert_configurations] + if self.budget_configuration_id is not None: + body['budget_configuration_id'] = self.budget_configuration_id + if self.display_name is not None: body['display_name'] = self.display_name + if self.filter: body['filter'] = self.filter.as_dict() return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> UpdateLogDeliveryConfigurationStatusRequest: - """Deserializes the UpdateLogDeliveryConfigurationStatusRequest from a dictionary.""" - return cls(log_delivery_configuration_id=d.get('log_delivery_configuration_id', None), - status=_enum(d, 'status', LogDeliveryConfigStatus)) + def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationBudget: + """Deserializes the UpdateBudgetConfigurationBudget from a dictionary.""" + return cls(account_id=d.get('account_id', None), + alert_configurations=_repeated_dict(d, 'alert_configurations', AlertConfiguration), + budget_configuration_id=d.get('budget_configuration_id', None), + display_name=d.get('display_name', None), + filter=_from_dict(d, 'filter', BudgetConfigurationFilter)) @dataclass -class UpdateResponse: +class UpdateBudgetConfigurationRequest: + budget: UpdateBudgetConfigurationBudget + """The updated budget. This will overwrite the budget specified by the budget ID.""" + + budget_id: Optional[str] = None + """The Databricks budget configuration ID.""" def as_dict(self) -> dict: - """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateBudgetConfigurationRequest into a dictionary suitable for use as a JSON request body.""" body = {} + if self.budget: body['budget'] = self.budget.as_dict() + if self.budget_id is not None: body['budget_id'] = self.budget_id return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> UpdateResponse: - """Deserializes the UpdateResponse from a dictionary.""" - return cls() + def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationRequest: + """Deserializes the UpdateBudgetConfigurationRequest from a dictionary.""" + return cls(budget=_from_dict(d, 'budget', UpdateBudgetConfigurationBudget), + budget_id=d.get('budget_id', None)) @dataclass -class WrappedBudget: - budget: Budget - """Budget configuration to be created.""" - - budget_id: Optional[str] = None - """Budget ID""" +class UpdateBudgetConfigurationResponse: + budget: Optional[BudgetConfiguration] = None + """The updated budget.""" def as_dict(self) -> dict: - """Serializes the WrappedBudget into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body.""" body = {} if self.budget: body['budget'] = self.budget.as_dict() - if self.budget_id is not None: body['budget_id'] = self.budget_id return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> WrappedBudget: - """Deserializes the WrappedBudget from a dictionary.""" - return cls(budget=_from_dict(d, 'budget', Budget), budget_id=d.get('budget_id', None)) + def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationResponse: + """Deserializes the UpdateBudgetConfigurationResponse from a dictionary.""" + return cls(budget=_from_dict(d, 'budget', BudgetConfiguration)) @dataclass -class WrappedBudgetWithStatus: - budget: BudgetWithStatus - """Budget configuration with daily status.""" +class UpdateLogDeliveryConfigurationStatusRequest: + status: LogDeliveryConfigStatus + """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). + Defaults to `ENABLED`. You can [enable or disable the + configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration + is not supported, so disable a log delivery configuration that is no longer needed.""" + + log_delivery_configuration_id: Optional[str] = None + """Databricks log delivery configuration ID""" def as_dict(self) -> dict: - """Serializes the WrappedBudgetWithStatus into a dictionary suitable for use as a JSON request body.""" + """Serializes the UpdateLogDeliveryConfigurationStatusRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.budget: body['budget'] = self.budget.as_dict() + if self.log_delivery_configuration_id is not None: + body['log_delivery_configuration_id'] = self.log_delivery_configuration_id + if self.status is not None: body['status'] = self.status.value return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> WrappedBudgetWithStatus: - """Deserializes the WrappedBudgetWithStatus from a dictionary.""" - return cls(budget=_from_dict(d, 'budget', BudgetWithStatus)) + def from_dict(cls, d: Dict[str, any]) -> UpdateLogDeliveryConfigurationStatusRequest: + """Deserializes the UpdateLogDeliveryConfigurationStatusRequest from a dictionary.""" + return cls(log_delivery_configuration_id=d.get('log_delivery_configuration_id', None), + status=_enum(d, 'status', LogDeliveryConfigStatus)) @dataclass @@ -767,39 +1002,42 @@ def download(self, class BudgetsAPI: - """These APIs manage budget configuration including notifications for exceeding a budget for a period. They - can also retrieve the status of each budget.""" + """These APIs manage budget configurations for this account. Budgets enable you to monitor usage across your + account. You can set up budgets to either track account-wide spending, or apply filters to track the + spending of specific teams, projects, or workspaces.""" def __init__(self, api_client): self._api = api_client - def create(self, budget: Budget) -> WrappedBudgetWithStatus: - """Create a new budget. + def create(self, budget: CreateBudgetConfigurationBudget) -> CreateBudgetConfigurationResponse: + """Create new budget. - Creates a new budget in the specified account. + Create a new budget configuration for an account. For full details, see + https://docs.databricks.com/en/admin/account-settings/budgets.html. - :param budget: :class:`Budget` - Budget configuration to be created. + :param budget: :class:`CreateBudgetConfigurationBudget` + Properties of the new budget configuration. - :returns: :class:`WrappedBudgetWithStatus` + :returns: :class:`CreateBudgetConfigurationResponse` """ body = {} if budget is not None: body['budget'] = budget.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', - f'/api/2.0/accounts/{self._api.account_id}/budget', + f'/api/2.1/accounts/{self._api.account_id}/budgets', body=body, headers=headers) - return WrappedBudgetWithStatus.from_dict(res) + return CreateBudgetConfigurationResponse.from_dict(res) def delete(self, budget_id: str): """Delete budget. - Deletes the budget specified by its UUID. + Deletes a budget configuration for an account. Both account and budget configuration are specified by + ID. This cannot be undone. :param budget_id: str - Budget ID + The Databricks budget configuration ID. """ @@ -807,63 +1045,78 @@ def delete(self, budget_id: str): headers = {'Accept': 'application/json', } self._api.do('DELETE', - f'/api/2.0/accounts/{self._api.account_id}/budget/{budget_id}', + f'/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}', headers=headers) - def get(self, budget_id: str) -> WrappedBudgetWithStatus: - """Get budget and its status. + def get(self, budget_id: str) -> GetBudgetConfigurationResponse: + """Get budget. - Gets the budget specified by its UUID, including noncumulative status for each day that the budget is - configured to include. + Gets a budget configuration for an account. Both account and budget configuration are specified by ID. :param budget_id: str - Budget ID + The Databricks budget configuration ID. - :returns: :class:`WrappedBudgetWithStatus` + :returns: :class:`GetBudgetConfigurationResponse` """ headers = {'Accept': 'application/json', } res = self._api.do('GET', - f'/api/2.0/accounts/{self._api.account_id}/budget/{budget_id}', + f'/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}', headers=headers) - return WrappedBudgetWithStatus.from_dict(res) + return GetBudgetConfigurationResponse.from_dict(res) - def list(self) -> Iterator[BudgetWithStatus]: + def list(self, *, page_token: Optional[str] = None) -> Iterator[BudgetConfiguration]: """Get all budgets. - Gets all budgets associated with this account, including noncumulative status for each day that the - budget is configured to include. + Gets all budgets associated with this account. + + :param page_token: str (optional) + A page token received from a previous get all budget configurations call. This token can be used to + retrieve the subsequent page. Requests first page if absent. - :returns: Iterator over :class:`BudgetWithStatus` + :returns: Iterator over :class:`BudgetConfiguration` """ + query = {} + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - json = self._api.do('GET', f'/api/2.0/accounts/{self._api.account_id}/budget', headers=headers) - parsed = BudgetList.from_dict(json).budgets - return parsed if parsed is not None else [] - - def update(self, budget_id: str, budget: Budget): + while True: + json = self._api.do('GET', + f'/api/2.1/accounts/{self._api.account_id}/budgets', + query=query, + headers=headers) + if 'budgets' in json: + for v in json['budgets']: + yield BudgetConfiguration.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def update(self, budget_id: str, + budget: UpdateBudgetConfigurationBudget) -> UpdateBudgetConfigurationResponse: """Modify budget. - Modifies a budget in this account. Budget properties are completely overwritten. + Updates a budget configuration for an account. Both account and budget configuration are specified by + ID. :param budget_id: str - Budget ID - :param budget: :class:`Budget` - Budget configuration to be created. - + The Databricks budget configuration ID. + :param budget: :class:`UpdateBudgetConfigurationBudget` + The updated budget. This will overwrite the budget specified by the budget ID. + :returns: :class:`UpdateBudgetConfigurationResponse` """ body = {} if budget is not None: body['budget'] = budget.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('PATCH', - f'/api/2.0/accounts/{self._api.account_id}/budget/{budget_id}', - body=body, - headers=headers) + res = self._api.do('PUT', + f'/api/2.1/accounts/{self._api.account_id}/budgets/{budget_id}', + body=body, + headers=headers) + return UpdateBudgetConfigurationResponse.from_dict(res) class LogDeliveryAPI: diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index e6456bc01..c6da9b8c5 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -2418,6 +2418,13 @@ class FunctionParameterType(Enum): PARAM = 'PARAM' +class GetBindingsSecurableType(Enum): + + CATALOG = 'catalog' + EXTERNAL_LOCATION = 'external_location' + STORAGE_CREDENTIAL = 'storage_credential' + + @dataclass class GetMetastoreSummaryResponse: cloud: Optional[str] = None @@ -3742,7 +3749,6 @@ class OnlineTableState(Enum): ONLINE_CONTINUOUS_UPDATE = 'ONLINE_CONTINUOUS_UPDATE' ONLINE_NO_PENDING_UPDATE = 'ONLINE_NO_PENDING_UPDATE' ONLINE_PIPELINE_FAILED = 'ONLINE_PIPELINE_FAILED' - ONLINE_TABLE_STATE_UNSPECIFIED = 'ONLINE_TABLE_STATE_UNSPECIFIED' ONLINE_TRIGGERED_UPDATE = 'ONLINE_TRIGGERED_UPDATE' ONLINE_UPDATING_PIPELINE_RESOURCES = 'ONLINE_UPDATING_PIPELINE_RESOURCES' PROVISIONING = 'PROVISIONING' @@ -3935,6 +3941,7 @@ class Privilege(Enum): CREATE_VIEW = 'CREATE_VIEW' CREATE_VOLUME = 'CREATE_VOLUME' EXECUTE = 'EXECUTE' + MANAGE = 'MANAGE' MANAGE_ALLOWLIST = 'MANAGE_ALLOWLIST' MODIFY = 'MODIFY' READ_FILES = 'READ_FILES' @@ -4849,6 +4856,13 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateAssignmentResponse: return cls() +class UpdateBindingsSecurableType(Enum): + + CATALOG = 'catalog' + EXTERNAL_LOCATION = 'external_location' + STORAGE_CREDENTIAL = 'storage_credential' + + @dataclass class UpdateCatalog: comment: Optional[str] = None @@ -5492,8 +5506,8 @@ class UpdateWorkspaceBindingsParameters: securable_name: Optional[str] = None """The name of the securable.""" - securable_type: Optional[str] = None - """The type of the securable.""" + securable_type: Optional[UpdateBindingsSecurableType] = None + """The type of the securable to bind to a workspace.""" def as_dict(self) -> dict: """Serializes the UpdateWorkspaceBindingsParameters into a dictionary suitable for use as a JSON request body.""" @@ -5501,7 +5515,7 @@ def as_dict(self) -> dict: if self.add: body['add'] = [v.as_dict() for v in self.add] if self.remove: body['remove'] = [v.as_dict() for v in self.remove] if self.securable_name is not None: body['securable_name'] = self.securable_name - if self.securable_type is not None: body['securable_type'] = self.securable_type + if self.securable_type is not None: body['securable_type'] = self.securable_type.value return body @classmethod @@ -5510,7 +5524,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceBindingsParameters: return cls(add=_repeated_dict(d, 'add', WorkspaceBinding), remove=_repeated_dict(d, 'remove', WorkspaceBinding), securable_name=d.get('securable_name', None), - securable_type=d.get('securable_type', None)) + securable_type=_enum(d, 'securable_type', UpdateBindingsSecurableType)) @dataclass @@ -8172,7 +8186,7 @@ def create(self, res = self._api.do('POST', '/api/2.1/unity-catalog/schemas', body=body, headers=headers) return SchemaInfo.from_dict(res) - def delete(self, full_name: str): + def delete(self, full_name: str, *, force: Optional[bool] = None): """Delete a schema. Deletes the specified schema from the parent catalog. The caller must be the owner of the schema or an @@ -8180,13 +8194,17 @@ def delete(self, full_name: str): :param full_name: str Full name of the schema. + :param force: bool (optional) + Force deletion even if the schema is not empty. """ + query = {} + if force is not None: query['force'] = force headers = {'Accept': 'application/json', } - self._api.do('DELETE', f'/api/2.1/unity-catalog/schemas/{full_name}', headers=headers) + self._api.do('DELETE', f'/api/2.1/unity-catalog/schemas/{full_name}', query=query, headers=headers) def get(self, full_name: str, *, include_browse: Optional[bool] = None) -> SchemaInfo: """Get a schema. @@ -9172,7 +9190,7 @@ class WorkspaceBindingsAPI: the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which introduces the ability to bind a securable in READ_ONLY mode (catalogs only). - Securables that support binding: - catalog""" + Securable types that support binding: - catalog - storage_credential - external_location""" def __init__(self, api_client): self._api = api_client @@ -9196,14 +9214,15 @@ def get(self, name: str) -> CurrentWorkspaceBindings: headers=headers) return CurrentWorkspaceBindings.from_dict(res) - def get_bindings(self, securable_type: str, securable_name: str) -> WorkspaceBindingsResponse: + def get_bindings(self, securable_type: GetBindingsSecurableType, + securable_name: str) -> WorkspaceBindingsResponse: """Get securable workspace bindings. Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. - :param securable_type: str - The type of the securable. + :param securable_type: :class:`GetBindingsSecurableType` + The type of the securable to bind to a workspace. :param securable_name: str The name of the securable. @@ -9213,7 +9232,7 @@ def get_bindings(self, securable_type: str, securable_name: str) -> WorkspaceBin headers = {'Accept': 'application/json', } res = self._api.do('GET', - f'/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}', + f'/api/2.1/unity-catalog/bindings/{securable_type.value}/{securable_name}', headers=headers) return WorkspaceBindingsResponse.from_dict(res) @@ -9248,7 +9267,7 @@ def update(self, return CurrentWorkspaceBindings.from_dict(res) def update_bindings(self, - securable_type: str, + securable_type: UpdateBindingsSecurableType, securable_name: str, *, add: Optional[List[WorkspaceBinding]] = None, @@ -9258,8 +9277,8 @@ def update_bindings(self, Updates workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. - :param securable_type: str - The type of the securable. + :param securable_type: :class:`UpdateBindingsSecurableType` + The type of the securable to bind to a workspace. :param securable_name: str The name of the securable. :param add: List[:class:`WorkspaceBinding`] (optional) @@ -9275,7 +9294,7 @@ def update_bindings(self, headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('PATCH', - f'/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}', + f'/api/2.1/unity-catalog/bindings/{securable_type.value}/{securable_name}', body=body, headers=headers) return WorkspaceBindingsResponse.from_dict(res) diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index 4e6a02152..bbfda7891 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -4461,11 +4461,8 @@ class Library: """Specification of a CRAN library to be installed as part of the library""" egg: Optional[str] = None - """URI of the egg library to install. Supported URIs include Workspace paths, Unity Catalog Volumes - paths, and S3 URIs. For example: `{ "egg": "/Workspace/path/to/library.egg" }`, `{ "egg" : - "/Volumes/path/to/library.egg" }` or `{ "egg": "s3://my-bucket/library.egg" }`. If S3 is used, - please make sure the cluster has read access on the library. You may need to launch the cluster - with an IAM role to access the S3 URI.""" + """Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is + not supported in Databricks Runtime 14.0 and above.""" jar: Optional[str] = None """URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes @@ -4603,21 +4600,103 @@ def from_dict(cls, d: Dict[str, any]) -> ListAvailableZonesResponse: return cls(default_zone=d.get('default_zone', None), zones=d.get('zones', None)) +@dataclass +class ListClustersFilterBy: + cluster_sources: Optional[List[ClusterSource]] = None + """The source of cluster creation.""" + + cluster_states: Optional[List[State]] = None + """The current state of the clusters.""" + + is_pinned: Optional[bool] = None + """Whether the clusters are pinned or not.""" + + policy_id: Optional[str] = None + """The ID of the cluster policy used to create the cluster if applicable.""" + + def as_dict(self) -> dict: + """Serializes the ListClustersFilterBy into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.cluster_sources: body['cluster_sources'] = [v.value for v in self.cluster_sources] + if self.cluster_states: body['cluster_states'] = [v.value for v in self.cluster_states] + if self.is_pinned is not None: body['is_pinned'] = self.is_pinned + if self.policy_id is not None: body['policy_id'] = self.policy_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListClustersFilterBy: + """Deserializes the ListClustersFilterBy from a dictionary.""" + return cls(cluster_sources=_repeated_enum(d, 'cluster_sources', ClusterSource), + cluster_states=_repeated_enum(d, 'cluster_states', State), + is_pinned=d.get('is_pinned', None), + policy_id=d.get('policy_id', None)) + + @dataclass class ListClustersResponse: clusters: Optional[List[ClusterDetails]] = None """""" + next_page_token: Optional[str] = None + """This field represents the pagination token to retrieve the next page of results. If the value is + "", it means no further results for the request.""" + + prev_page_token: Optional[str] = None + """This field represents the pagination token to retrieve the previous page of results. If the + value is "", it means no further results for the request.""" + def as_dict(self) -> dict: """Serializes the ListClustersResponse into a dictionary suitable for use as a JSON request body.""" body = {} if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token return body @classmethod def from_dict(cls, d: Dict[str, any]) -> ListClustersResponse: """Deserializes the ListClustersResponse from a dictionary.""" - return cls(clusters=_repeated_dict(d, 'clusters', ClusterDetails)) + return cls(clusters=_repeated_dict(d, 'clusters', ClusterDetails), + next_page_token=d.get('next_page_token', None), + prev_page_token=d.get('prev_page_token', None)) + + +@dataclass +class ListClustersSortBy: + direction: Optional[ListClustersSortByDirection] = None + """The direction to sort by.""" + + field: Optional[ListClustersSortByField] = None + """The sorting criteria. By default, clusters are sorted by 3 columns from highest to lowest + precedence: cluster state, pinned or unpinned, then cluster name.""" + + def as_dict(self) -> dict: + """Serializes the ListClustersSortBy into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.direction is not None: body['direction'] = self.direction.value + if self.field is not None: body['field'] = self.field.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListClustersSortBy: + """Deserializes the ListClustersSortBy from a dictionary.""" + return cls(direction=_enum(d, 'direction', ListClustersSortByDirection), + field=_enum(d, 'field', ListClustersSortByField)) + + +class ListClustersSortByDirection(Enum): + """The direction to sort by.""" + + ASC = 'ASC' + DESC = 'DESC' + + +class ListClustersSortByField(Enum): + """The sorting criteria. By default, clusters are sorted by 3 columns from highest to lowest + precedence: cluster state, pinned or unpinned, then cluster name.""" + + CLUSTER_NAME = 'CLUSTER_NAME' + DEFAULT = 'DEFAULT' @dataclass @@ -6174,9 +6253,8 @@ class ClustersAPI: restart an all-purpose cluster. Multiple users can share such clusters to do collaborative interactive analysis. - IMPORTANT: Databricks retains cluster configuration information for up to 200 all-purpose clusters - terminated in the last 30 days and up to 30 job clusters recently terminated by the job scheduler. To keep - an all-purpose cluster configuration even after it has been terminated for more than 30 days, an + IMPORTANT: Databricks retains cluster configuration information for terminated clusters for 30 days. To + keep an all-purpose cluster configuration even after it has been terminated for more than 30 days, an administrator can pin a cluster to the cluster list.""" def __init__(self, api_client): @@ -6263,7 +6341,7 @@ def change_owner(self, cluster_id: str, owner_username: str): if owner_username is not None: body['owner_username'] = owner_username headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/clusters/change-owner', body=body, headers=headers) + self._api.do('POST', '/api/2.1/clusters/change-owner', body=body, headers=headers) def create(self, spark_version: str, @@ -6462,7 +6540,7 @@ def create(self, if workload_type is not None: body['workload_type'] = workload_type.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - op_response = self._api.do('POST', '/api/2.0/clusters/create', body=body, headers=headers) + op_response = self._api.do('POST', '/api/2.1/clusters/create', body=body, headers=headers) return Wait(self.wait_get_cluster_running, response=CreateClusterResponse.from_dict(op_response), cluster_id=op_response['cluster_id']) @@ -6546,7 +6624,7 @@ def delete(self, cluster_id: str) -> Wait[ClusterDetails]: if cluster_id is not None: body['cluster_id'] = cluster_id headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - op_response = self._api.do('POST', '/api/2.0/clusters/delete', body=body, headers=headers) + op_response = self._api.do('POST', '/api/2.1/clusters/delete', body=body, headers=headers) return Wait(self.wait_get_cluster_terminated, response=DeleteClusterResponse.from_dict(op_response), cluster_id=cluster_id) @@ -6756,7 +6834,7 @@ def edit(self, if workload_type is not None: body['workload_type'] = workload_type.as_dict() headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - op_response = self._api.do('POST', '/api/2.0/clusters/edit', body=body, headers=headers) + op_response = self._api.do('POST', '/api/2.1/clusters/edit', body=body, headers=headers) return Wait(self.wait_get_cluster_running, response=EditClusterResponse.from_dict(op_response), cluster_id=cluster_id) @@ -6867,7 +6945,7 @@ def events(self, headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } while True: - json = self._api.do('POST', '/api/2.0/clusters/events', body=body, headers=headers) + json = self._api.do('POST', '/api/2.1/clusters/events', body=body, headers=headers) if 'events' in json: for v in json['events']: yield ClusterEvent.from_dict(v) @@ -6891,7 +6969,7 @@ def get(self, cluster_id: str) -> ClusterDetails: if cluster_id is not None: query['cluster_id'] = cluster_id headers = {'Accept': 'application/json', } - res = self._api.do('GET', '/api/2.0/clusters/get', query=query, headers=headers) + res = self._api.do('GET', '/api/2.1/clusters/get', query=query, headers=headers) return ClusterDetails.from_dict(res) def get_permission_levels(self, cluster_id: str) -> GetClusterPermissionLevelsResponse: @@ -6928,33 +7006,46 @@ def get_permissions(self, cluster_id: str) -> ClusterPermissions: res = self._api.do('GET', f'/api/2.0/permissions/clusters/{cluster_id}', headers=headers) return ClusterPermissions.from_dict(res) - def list(self, *, can_use_client: Optional[str] = None) -> Iterator[ClusterDetails]: - """List all clusters. - - Return information about all pinned clusters, active clusters, up to 200 of the most recently - terminated all-purpose clusters in the past 30 days, and up to 30 of the most recently terminated job - clusters in the past 30 days. - - For example, if there is 1 pinned cluster, 4 active clusters, 45 terminated all-purpose clusters in - the past 30 days, and 50 terminated job clusters in the past 30 days, then this API returns the 1 - pinned cluster, 4 active clusters, all 45 terminated all-purpose clusters, and the 30 most recently - terminated job clusters. - - :param can_use_client: str (optional) - Filter clusters based on what type of client it can be used for. Could be either NOTEBOOKS or JOBS. - No input for this field will get all clusters in the workspace without filtering on its supported - client + def list(self, + *, + filter_by: Optional[ListClustersFilterBy] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + sort_by: Optional[ListClustersSortBy] = None) -> Iterator[ClusterDetails]: + """List clusters. + + Return information about all pinned and active clusters, and all clusters terminated within the last + 30 days. Clusters terminated prior to this period are not included. + + :param filter_by: :class:`ListClustersFilterBy` (optional) + Filters to apply to the list of clusters. + :param page_size: int (optional) + Use this field to specify the maximum number of results to be returned by the server. The server may + further constrain the maximum number of results returned in a single page. + :param page_token: str (optional) + Use next_page_token or prev_page_token returned from the previous request to list the next or + previous page of clusters respectively. + :param sort_by: :class:`ListClustersSortBy` (optional) + Sort the list of clusters by a specific criteria. :returns: Iterator over :class:`ClusterDetails` """ query = {} - if can_use_client is not None: query['can_use_client'] = can_use_client + if filter_by is not None: query['filter_by'] = filter_by.as_dict() + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + if sort_by is not None: query['sort_by'] = sort_by.as_dict() headers = {'Accept': 'application/json', } - json = self._api.do('GET', '/api/2.0/clusters/list', query=query, headers=headers) - parsed = ListClustersResponse.from_dict(json).clusters - return parsed if parsed is not None else [] + while True: + json = self._api.do('GET', '/api/2.1/clusters/list', query=query, headers=headers) + if 'clusters' in json: + for v in json['clusters']: + yield ClusterDetails.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] def list_node_types(self) -> ListNodeTypesResponse: """List node types. @@ -6966,7 +7057,7 @@ def list_node_types(self) -> ListNodeTypesResponse: headers = {'Accept': 'application/json', } - res = self._api.do('GET', '/api/2.0/clusters/list-node-types', headers=headers) + res = self._api.do('GET', '/api/2.1/clusters/list-node-types', headers=headers) return ListNodeTypesResponse.from_dict(res) def list_zones(self) -> ListAvailableZonesResponse: @@ -6980,7 +7071,7 @@ def list_zones(self) -> ListAvailableZonesResponse: headers = {'Accept': 'application/json', } - res = self._api.do('GET', '/api/2.0/clusters/list-zones', headers=headers) + res = self._api.do('GET', '/api/2.1/clusters/list-zones', headers=headers) return ListAvailableZonesResponse.from_dict(res) def permanent_delete(self, cluster_id: str): @@ -7001,7 +7092,7 @@ def permanent_delete(self, cluster_id: str): if cluster_id is not None: body['cluster_id'] = cluster_id headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/clusters/permanent-delete', body=body, headers=headers) + self._api.do('POST', '/api/2.1/clusters/permanent-delete', body=body, headers=headers) def pin(self, cluster_id: str): """Pin cluster. @@ -7018,7 +7109,7 @@ def pin(self, cluster_id: str): if cluster_id is not None: body['cluster_id'] = cluster_id headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/clusters/pin', body=body, headers=headers) + self._api.do('POST', '/api/2.1/clusters/pin', body=body, headers=headers) def resize(self, cluster_id: str, @@ -7055,7 +7146,7 @@ def resize(self, if num_workers is not None: body['num_workers'] = num_workers headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - op_response = self._api.do('POST', '/api/2.0/clusters/resize', body=body, headers=headers) + op_response = self._api.do('POST', '/api/2.1/clusters/resize', body=body, headers=headers) return Wait(self.wait_get_cluster_running, response=ResizeClusterResponse.from_dict(op_response), cluster_id=cluster_id) @@ -7089,7 +7180,7 @@ def restart(self, cluster_id: str, *, restart_user: Optional[str] = None) -> Wai if restart_user is not None: body['restart_user'] = restart_user headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - op_response = self._api.do('POST', '/api/2.0/clusters/restart', body=body, headers=headers) + op_response = self._api.do('POST', '/api/2.1/clusters/restart', body=body, headers=headers) return Wait(self.wait_get_cluster_running, response=RestartClusterResponse.from_dict(op_response), cluster_id=cluster_id) @@ -7134,7 +7225,7 @@ def spark_versions(self) -> GetSparkVersionsResponse: headers = {'Accept': 'application/json', } - res = self._api.do('GET', '/api/2.0/clusters/spark-versions', headers=headers) + res = self._api.do('GET', '/api/2.1/clusters/spark-versions', headers=headers) return GetSparkVersionsResponse.from_dict(res) def start(self, cluster_id: str) -> Wait[ClusterDetails]: @@ -7158,7 +7249,7 @@ def start(self, cluster_id: str) -> Wait[ClusterDetails]: if cluster_id is not None: body['cluster_id'] = cluster_id headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - op_response = self._api.do('POST', '/api/2.0/clusters/start', body=body, headers=headers) + op_response = self._api.do('POST', '/api/2.1/clusters/start', body=body, headers=headers) return Wait(self.wait_get_cluster_running, response=StartClusterResponse.from_dict(op_response), cluster_id=cluster_id) @@ -7182,7 +7273,7 @@ def unpin(self, cluster_id: str): if cluster_id is not None: body['cluster_id'] = cluster_id headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('POST', '/api/2.0/clusters/unpin', body=body, headers=headers) + self._api.do('POST', '/api/2.1/clusters/unpin', body=body, headers=headers) def update_permissions( self, @@ -7209,7 +7300,8 @@ def update_permissions( class CommandExecutionAPI: - """This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters.""" + """This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters. This API + only supports (classic) all-purpose clusters. Serverless compute is not supported.""" def __init__(self, api_client): self._api = api_client diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index b24d03183..bf571dd49 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -3,14 +3,20 @@ from __future__ import annotations import logging +import random +import time from dataclasses import dataclass +from datetime import timedelta from enum import Enum -from typing import Dict, Iterator, List, Optional +from typing import Callable, Dict, Iterator, List, Optional -from ._internal import _enum, _from_dict, _repeated_dict +from ..errors import OperationFailed +from ._internal import Wait, _enum, _from_dict, _repeated_dict _LOG = logging.getLogger('databricks.sdk') +from databricks.sdk.service import sql + # all definitions in this file are in alphabetical order @@ -233,6 +239,242 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteSubscriptionResponse: return cls() +@dataclass +class GenieAttachment: + """Genie AI Response""" + + query: Optional[QueryAttachment] = None + + text: Optional[TextAttachment] = None + + def as_dict(self) -> dict: + """Serializes the GenieAttachment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.query: body['query'] = self.query.as_dict() + if self.text: body['text'] = self.text.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GenieAttachment: + """Deserializes the GenieAttachment from a dictionary.""" + return cls(query=_from_dict(d, 'query', QueryAttachment), text=_from_dict(d, 'text', TextAttachment)) + + +@dataclass +class GenieConversation: + id: str + """Conversation ID""" + + space_id: str + """Genie space ID""" + + user_id: int + """ID of the user who created the conversation""" + + title: str + """Conversation title""" + + created_timestamp: Optional[int] = None + """Timestamp when the message was created""" + + last_updated_timestamp: Optional[int] = None + """Timestamp when the message was last updated""" + + def as_dict(self) -> dict: + """Serializes the GenieConversation into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + if self.space_id is not None: body['space_id'] = self.space_id + if self.title is not None: body['title'] = self.title + if self.user_id is not None: body['user_id'] = self.user_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GenieConversation: + """Deserializes the GenieConversation from a dictionary.""" + return cls(created_timestamp=d.get('created_timestamp', None), + id=d.get('id', None), + last_updated_timestamp=d.get('last_updated_timestamp', None), + space_id=d.get('space_id', None), + title=d.get('title', None), + user_id=d.get('user_id', None)) + + +@dataclass +class GenieCreateConversationMessageRequest: + content: str + """User message content.""" + + conversation_id: Optional[str] = None + """The ID associated with the conversation.""" + + space_id: Optional[str] = None + """The ID associated with the Genie space where the conversation is started.""" + + def as_dict(self) -> dict: + """Serializes the GenieCreateConversationMessageRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.content is not None: body['content'] = self.content + if self.conversation_id is not None: body['conversation_id'] = self.conversation_id + if self.space_id is not None: body['space_id'] = self.space_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GenieCreateConversationMessageRequest: + """Deserializes the GenieCreateConversationMessageRequest from a dictionary.""" + return cls(content=d.get('content', None), + conversation_id=d.get('conversation_id', None), + space_id=d.get('space_id', None)) + + +@dataclass +class GenieGetMessageQueryResultResponse: + statement_response: Optional[sql.StatementResponse] = None + """SQL Statement Execution response. See [Get status, manifest, and result first + chunk](:method:statementexecution/getstatement) for more details.""" + + def as_dict(self) -> dict: + """Serializes the GenieGetMessageQueryResultResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.statement_response: body['statement_response'] = self.statement_response.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GenieGetMessageQueryResultResponse: + """Deserializes the GenieGetMessageQueryResultResponse from a dictionary.""" + return cls(statement_response=_from_dict(d, 'statement_response', sql.StatementResponse)) + + +@dataclass +class GenieMessage: + id: str + """Message ID""" + + space_id: str + """Genie space ID""" + + conversation_id: str + """Conversation ID""" + + content: str + """User message content""" + + attachments: Optional[List[GenieAttachment]] = None + """AI produced response to the message""" + + created_timestamp: Optional[int] = None + """Timestamp when the message was created""" + + error: Optional[MessageError] = None + """Error message if AI failed to respond to the message""" + + last_updated_timestamp: Optional[int] = None + """Timestamp when the message was last updated""" + + query_result: Optional[Result] = None + """The result of SQL query if the message has a query attachment""" + + status: Optional[MessageStatus] = None + """MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data + sources. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * + `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling + [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `FAILED`: Generating a + response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message + processing is completed. Results are in the `attachments` field. Get the SQL query result by + calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message + has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user + needs to execute the query again. * `CANCELLED`: Message has been cancelled.""" + + user_id: Optional[int] = None + """ID of the user who created the message""" + + def as_dict(self) -> dict: + """Serializes the GenieMessage into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.attachments: body['attachments'] = [v.as_dict() for v in self.attachments] + if self.content is not None: body['content'] = self.content + if self.conversation_id is not None: body['conversation_id'] = self.conversation_id + if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp + if self.error: body['error'] = self.error.as_dict() + if self.id is not None: body['id'] = self.id + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + if self.query_result: body['query_result'] = self.query_result.as_dict() + if self.space_id is not None: body['space_id'] = self.space_id + if self.status is not None: body['status'] = self.status.value + if self.user_id is not None: body['user_id'] = self.user_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GenieMessage: + """Deserializes the GenieMessage from a dictionary.""" + return cls(attachments=_repeated_dict(d, 'attachments', GenieAttachment), + content=d.get('content', None), + conversation_id=d.get('conversation_id', None), + created_timestamp=d.get('created_timestamp', None), + error=_from_dict(d, 'error', MessageError), + id=d.get('id', None), + last_updated_timestamp=d.get('last_updated_timestamp', None), + query_result=_from_dict(d, 'query_result', Result), + space_id=d.get('space_id', None), + status=_enum(d, 'status', MessageStatus), + user_id=d.get('user_id', None)) + + +@dataclass +class GenieStartConversationMessageRequest: + content: str + """The text of the message that starts the conversation.""" + + space_id: Optional[str] = None + """The ID associated with the Genie space where you want to start a conversation.""" + + def as_dict(self) -> dict: + """Serializes the GenieStartConversationMessageRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.content is not None: body['content'] = self.content + if self.space_id is not None: body['space_id'] = self.space_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GenieStartConversationMessageRequest: + """Deserializes the GenieStartConversationMessageRequest from a dictionary.""" + return cls(content=d.get('content', None), space_id=d.get('space_id', None)) + + +@dataclass +class GenieStartConversationResponse: + message_id: str + """Message ID""" + + conversation_id: str + """Conversation ID""" + + conversation: Optional[GenieConversation] = None + + message: Optional[GenieMessage] = None + + def as_dict(self) -> dict: + """Serializes the GenieStartConversationResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.conversation: body['conversation'] = self.conversation.as_dict() + if self.conversation_id is not None: body['conversation_id'] = self.conversation_id + if self.message: body['message'] = self.message.as_dict() + if self.message_id is not None: body['message_id'] = self.message_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GenieStartConversationResponse: + """Deserializes the GenieStartConversationResponse from a dictionary.""" + return cls(conversation=_from_dict(d, 'conversation', GenieConversation), + conversation_id=d.get('conversation_id', None), + message=_from_dict(d, 'message', GenieMessage), + message_id=d.get('message_id', None)) + + class LifecycleState(Enum): ACTIVE = 'ACTIVE' @@ -305,6 +547,88 @@ def from_dict(cls, d: Dict[str, any]) -> ListSubscriptionsResponse: subscriptions=_repeated_dict(d, 'subscriptions', Subscription)) +@dataclass +class MessageError: + error: Optional[str] = None + + type: Optional[MessageErrorType] = None + + def as_dict(self) -> dict: + """Serializes the MessageError into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.error is not None: body['error'] = self.error + if self.type is not None: body['type'] = self.type.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> MessageError: + """Deserializes the MessageError from a dictionary.""" + return cls(error=d.get('error', None), type=_enum(d, 'type', MessageErrorType)) + + +class MessageErrorType(Enum): + + BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION = 'BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION' + CHAT_COMPLETION_CLIENT_EXCEPTION = 'CHAT_COMPLETION_CLIENT_EXCEPTION' + CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION = 'CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION' + CHAT_COMPLETION_NETWORK_EXCEPTION = 'CHAT_COMPLETION_NETWORK_EXCEPTION' + CONTENT_FILTER_EXCEPTION = 'CONTENT_FILTER_EXCEPTION' + CONTEXT_EXCEEDED_EXCEPTION = 'CONTEXT_EXCEEDED_EXCEPTION' + COULD_NOT_GET_UC_SCHEMA_EXCEPTION = 'COULD_NOT_GET_UC_SCHEMA_EXCEPTION' + DEPLOYMENT_NOT_FOUND_EXCEPTION = 'DEPLOYMENT_NOT_FOUND_EXCEPTION' + FUNCTIONS_NOT_AVAILABLE_EXCEPTION = 'FUNCTIONS_NOT_AVAILABLE_EXCEPTION' + FUNCTION_ARGUMENTS_INVALID_EXCEPTION = 'FUNCTION_ARGUMENTS_INVALID_EXCEPTION' + FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION = 'FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION' + FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION = 'FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION' + GENERIC_CHAT_COMPLETION_EXCEPTION = 'GENERIC_CHAT_COMPLETION_EXCEPTION' + GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION = 'GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION' + GENERIC_SQL_EXEC_API_CALL_EXCEPTION = 'GENERIC_SQL_EXEC_API_CALL_EXCEPTION' + ILLEGAL_PARAMETER_DEFINITION_EXCEPTION = 'ILLEGAL_PARAMETER_DEFINITION_EXCEPTION' + INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION = 'INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION' + INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION = 'INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION' + INVALID_CHAT_COMPLETION_JSON_EXCEPTION = 'INVALID_CHAT_COMPLETION_JSON_EXCEPTION' + INVALID_COMPLETION_REQUEST_EXCEPTION = 'INVALID_COMPLETION_REQUEST_EXCEPTION' + INVALID_FUNCTION_CALL_EXCEPTION = 'INVALID_FUNCTION_CALL_EXCEPTION' + INVALID_TABLE_IDENTIFIER_EXCEPTION = 'INVALID_TABLE_IDENTIFIER_EXCEPTION' + LOCAL_CONTEXT_EXCEEDED_EXCEPTION = 'LOCAL_CONTEXT_EXCEEDED_EXCEPTION' + MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION' + MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION' + NO_TABLES_TO_QUERY_EXCEPTION = 'NO_TABLES_TO_QUERY_EXCEPTION' + RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION = 'RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION' + RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION = 'RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION' + REPLY_PROCESS_TIMEOUT_EXCEPTION = 'REPLY_PROCESS_TIMEOUT_EXCEPTION' + RETRYABLE_PROCESSING_EXCEPTION = 'RETRYABLE_PROCESSING_EXCEPTION' + SQL_EXECUTION_EXCEPTION = 'SQL_EXECUTION_EXCEPTION' + TABLES_MISSING_EXCEPTION = 'TABLES_MISSING_EXCEPTION' + TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION = 'TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION' + TOO_MANY_TABLES_EXCEPTION = 'TOO_MANY_TABLES_EXCEPTION' + UNEXPECTED_REPLY_PROCESS_EXCEPTION = 'UNEXPECTED_REPLY_PROCESS_EXCEPTION' + UNKNOWN_AI_MODEL = 'UNKNOWN_AI_MODEL' + WAREHOUSE_ACCESS_MISSING_EXCEPTION = 'WAREHOUSE_ACCESS_MISSING_EXCEPTION' + WAREHOUSE_NOT_FOUND_EXCEPTION = 'WAREHOUSE_NOT_FOUND_EXCEPTION' + + +class MessageStatus(Enum): + """MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data + sources. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * + `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling + [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `FAILED`: Generating a + response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message + processing is completed. Results are in the `attachments` field. Get the SQL query result by + calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message + has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user + needs to execute the query again. * `CANCELLED`: Message has been cancelled.""" + + ASKING_AI = 'ASKING_AI' + CANCELLED = 'CANCELLED' + COMPLETED = 'COMPLETED' + EXECUTING_QUERY = 'EXECUTING_QUERY' + FAILED = 'FAILED' + FETCHING_METADATA = 'FETCHING_METADATA' + QUERY_RESULT_EXPIRED = 'QUERY_RESULT_EXPIRED' + SUBMITTED = 'SUBMITTED' + + @dataclass class MigrateDashboardRequest: source_dashboard_id: str @@ -392,6 +716,72 @@ def from_dict(cls, d: Dict[str, any]) -> PublishedDashboard: warehouse_id=d.get('warehouse_id', None)) +@dataclass +class QueryAttachment: + description: Optional[str] = None + """Description of the query""" + + instruction_id: Optional[str] = None + """If the query was created on an instruction (trusted asset) we link to the id""" + + instruction_title: Optional[str] = None + """Always store the title next to the id in case the original instruction title changes or the + instruction is deleted.""" + + last_updated_timestamp: Optional[int] = None + """Time when the user updated the query last""" + + query: Optional[str] = None + """AI generated SQL query""" + + title: Optional[str] = None + """Name of the query""" + + def as_dict(self) -> dict: + """Serializes the QueryAttachment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.instruction_id is not None: body['instruction_id'] = self.instruction_id + if self.instruction_title is not None: body['instruction_title'] = self.instruction_title + if self.last_updated_timestamp is not None: + body['last_updated_timestamp'] = self.last_updated_timestamp + if self.query is not None: body['query'] = self.query + if self.title is not None: body['title'] = self.title + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> QueryAttachment: + """Deserializes the QueryAttachment from a dictionary.""" + return cls(description=d.get('description', None), + instruction_id=d.get('instruction_id', None), + instruction_title=d.get('instruction_title', None), + last_updated_timestamp=d.get('last_updated_timestamp', None), + query=d.get('query', None), + title=d.get('title', None)) + + +@dataclass +class Result: + row_count: Optional[int] = None + """Row count of the result""" + + statement_id: Optional[str] = None + """Statement Execution API statement id. Use [Get status, manifest, and result first + chunk](:method:statementexecution/getstatement) to get the full result data.""" + + def as_dict(self) -> dict: + """Serializes the Result into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.row_count is not None: body['row_count'] = self.row_count + if self.statement_id is not None: body['statement_id'] = self.statement_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> Result: + """Deserializes the Result from a dictionary.""" + return cls(row_count=d.get('row_count', None), statement_id=d.get('statement_id', None)) + + @dataclass class Schedule: cron_schedule: CronSchedule @@ -565,6 +955,23 @@ def from_dict(cls, d: Dict[str, any]) -> SubscriptionSubscriberUser: return cls(user_id=d.get('user_id', None)) +@dataclass +class TextAttachment: + content: Optional[str] = None + """AI generated message""" + + def as_dict(self) -> dict: + """Serializes the TextAttachment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.content is not None: body['content'] = self.content + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> TextAttachment: + """Deserializes the TextAttachment from a dictionary.""" + return cls(content=d.get('content', None)) + + @dataclass class TrashDashboardResponse: @@ -675,6 +1082,193 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateScheduleRequest: schedule_id=d.get('schedule_id', None)) +class GenieAPI: + """Genie provides a no-code experience for business users, powered by AI/BI. Analysts set up spaces that + business users can use to ask questions using natural language. Genie uses data registered to Unity + Catalog and requires at least CAN USE permission on a Pro or Serverless SQL warehouse. Also, Databricks + Assistant must be enabled.""" + + def __init__(self, api_client): + self._api = api_client + + def wait_get_message_genie_completed( + self, + conversation_id: str, + message_id: str, + space_id: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[GenieMessage], None]] = None) -> GenieMessage: + deadline = time.time() + timeout.total_seconds() + target_states = (MessageStatus.COMPLETED, ) + failure_states = (MessageStatus.FAILED, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get_message(conversation_id=conversation_id, message_id=message_id, space_id=space_id) + status = poll.status + status_message = f'current status: {status}' + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach COMPLETED, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"conversation_id={conversation_id}, message_id={message_id}, space_id={space_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def create_message(self, space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage]: + """Create conversation message. + + Create new message in [conversation](:method:genie/startconversation). The AI response uses all + previously created messages in the conversation to respond. + + :param space_id: str + The ID associated with the Genie space where the conversation is started. + :param conversation_id: str + The ID associated with the conversation. + :param content: str + User message content. + + :returns: + Long-running operation waiter for :class:`GenieMessage`. + See :method:wait_get_message_genie_completed for more details. + """ + body = {} + if content is not None: body['content'] = content + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + op_response = self._api.do( + 'POST', + f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages', + body=body, + headers=headers) + return Wait(self.wait_get_message_genie_completed, + response=GenieMessage.from_dict(op_response), + conversation_id=conversation_id, + message_id=op_response['id'], + space_id=space_id) + + def create_message_and_wait(self, + space_id: str, + conversation_id: str, + content: str, + timeout=timedelta(minutes=20)) -> GenieMessage: + return self.create_message(content=content, conversation_id=conversation_id, + space_id=space_id).result(timeout=timeout) + + def execute_message_query(self, space_id: str, conversation_id: str, + message_id: str) -> GenieGetMessageQueryResultResponse: + """Execute SQL query in a conversation message. + + Execute the SQL query in the message. + + :param space_id: str + Genie space ID + :param conversation_id: str + Conversation ID + :param message_id: str + Message ID + + :returns: :class:`GenieGetMessageQueryResultResponse` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do( + 'POST', + f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/execute-query', + headers=headers) + return GenieGetMessageQueryResultResponse.from_dict(res) + + def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage: + """Get conversation message. + + Get message from conversation. + + :param space_id: str + The ID associated with the Genie space where the target conversation is located. + :param conversation_id: str + The ID associated with the target conversation. + :param message_id: str + The ID associated with the target message from the identified conversation. + + :returns: :class:`GenieMessage` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do( + 'GET', + f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}', + headers=headers) + return GenieMessage.from_dict(res) + + def get_message_query_result(self, space_id: str, conversation_id: str, + message_id: str) -> GenieGetMessageQueryResultResponse: + """Get conversation message SQL query result. + + Get the result of SQL query if the message has a query attachment. This is only available if a message + has a query attachment and the message status is `EXECUTING_QUERY`. + + :param space_id: str + Genie space ID + :param conversation_id: str + Conversation ID + :param message_id: str + Message ID + + :returns: :class:`GenieGetMessageQueryResultResponse` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do( + 'GET', + f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result', + headers=headers) + return GenieGetMessageQueryResultResponse.from_dict(res) + + def start_conversation(self, space_id: str, content: str) -> Wait[GenieMessage]: + """Start conversation. + + Start a new conversation. + + :param space_id: str + The ID associated with the Genie space where you want to start a conversation. + :param content: str + The text of the message that starts the conversation. + + :returns: + Long-running operation waiter for :class:`GenieMessage`. + See :method:wait_get_message_genie_completed for more details. + """ + body = {} + if content is not None: body['content'] = content + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + op_response = self._api.do('POST', + f'/api/2.0/genie/spaces/{space_id}/start-conversation', + body=body, + headers=headers) + return Wait(self.wait_get_message_genie_completed, + response=GenieStartConversationResponse.from_dict(op_response), + conversation_id=op_response['conversation_id'], + message_id=op_response['message_id'], + space_id=space_id) + + def start_conversation_and_wait(self, space_id: str, content: str, + timeout=timedelta(minutes=20)) -> GenieMessage: + return self.start_conversation(content=content, space_id=space_id).result(timeout=timeout) + + class LakeviewAPI: """These APIs provide specific management operations for Lakeview dashboards. Generic resource management can be done with Workspace API (import, export, get-status, list, delete).""" diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index 27f448ccb..b5cf91846 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -132,16 +132,16 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: @dataclass -class DeleteWorkspaceAssignments: +class DeleteWorkspacePermissionAssignmentResponse: def as_dict(self) -> dict: - """Serializes the DeleteWorkspaceAssignments into a dictionary suitable for use as a JSON request body.""" + """Serializes the DeleteWorkspacePermissionAssignmentResponse into a dictionary suitable for use as a JSON request body.""" body = {} return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> DeleteWorkspaceAssignments: - """Deserializes the DeleteWorkspaceAssignments from a dictionary.""" + def from_dict(cls, d: Dict[str, any]) -> DeleteWorkspacePermissionAssignmentResponse: + """Deserializes the DeleteWorkspacePermissionAssignmentResponse from a dictionary.""" return cls() @@ -406,6 +406,56 @@ def from_dict(cls, d: Dict[str, any]) -> ListUsersResponse: total_results=d.get('totalResults', None)) +@dataclass +class MigratePermissionsRequest: + workspace_id: int + """WorkspaceId of the associated workspace where the permission migration will occur.""" + + from_workspace_group_name: str + """The name of the workspace group that permissions will be migrated from.""" + + to_account_group_name: str + """The name of the account group that permissions will be migrated to.""" + + size: Optional[int] = None + """The maximum number of permissions that will be migrated.""" + + def as_dict(self) -> dict: + """Serializes the MigratePermissionsRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.from_workspace_group_name is not None: + body['from_workspace_group_name'] = self.from_workspace_group_name + if self.size is not None: body['size'] = self.size + if self.to_account_group_name is not None: body['to_account_group_name'] = self.to_account_group_name + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> MigratePermissionsRequest: + """Deserializes the MigratePermissionsRequest from a dictionary.""" + return cls(from_workspace_group_name=d.get('from_workspace_group_name', None), + size=d.get('size', None), + to_account_group_name=d.get('to_account_group_name', None), + workspace_id=d.get('workspace_id', None)) + + +@dataclass +class MigratePermissionsResponse: + permissions_migrated: Optional[int] = None + """Number of permissions migrated.""" + + def as_dict(self) -> dict: + """Serializes the MigratePermissionsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.permissions_migrated is not None: body['permissions_migrated'] = self.permissions_migrated + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> MigratePermissionsResponse: + """Deserializes the MigratePermissionsResponse from a dictionary.""" + return cls(permissions_migrated=d.get('permissions_migrated', None)) + + @dataclass class Name: family_name: Optional[str] = None @@ -723,6 +773,9 @@ def from_dict(cls, d: Dict[str, any]) -> Permission: @dataclass class PermissionAssignment: + """The output format for existing workspace PermissionAssignment records, which contains some info + for user consumption.""" + error: Optional[str] = None """Error response associated with a workspace permission assignment, if any.""" @@ -787,57 +840,6 @@ class PermissionLevel(Enum): IS_OWNER = 'IS_OWNER' -@dataclass -class PermissionMigrationRequest: - workspace_id: int - """WorkspaceId of the associated workspace where the permission migration will occur. Both - workspace group and account group must be in this workspace.""" - - from_workspace_group_name: str - """The name of the workspace group that permissions will be migrated from.""" - - to_account_group_name: str - """The name of the account group that permissions will be migrated to.""" - - size: Optional[int] = None - """The maximum number of permissions that will be migrated.""" - - def as_dict(self) -> dict: - """Serializes the PermissionMigrationRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.from_workspace_group_name is not None: - body['from_workspace_group_name'] = self.from_workspace_group_name - if self.size is not None: body['size'] = self.size - if self.to_account_group_name is not None: body['to_account_group_name'] = self.to_account_group_name - if self.workspace_id is not None: body['workspace_id'] = self.workspace_id - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> PermissionMigrationRequest: - """Deserializes the PermissionMigrationRequest from a dictionary.""" - return cls(from_workspace_group_name=d.get('from_workspace_group_name', None), - size=d.get('size', None), - to_account_group_name=d.get('to_account_group_name', None), - workspace_id=d.get('workspace_id', None)) - - -@dataclass -class PermissionMigrationResponse: - permissions_migrated: Optional[int] = None - """Number of permissions migrated.""" - - def as_dict(self) -> dict: - """Serializes the PermissionMigrationResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.permissions_migrated is not None: body['permissions_migrated'] = self.permissions_migrated - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> PermissionMigrationResponse: - """Deserializes the PermissionMigrationResponse from a dictionary.""" - return cls(permissions_migrated=d.get('permissions_migrated', None)) - - @dataclass class PermissionOutput: description: Optional[str] = None @@ -911,6 +913,8 @@ def from_dict(cls, d: Dict[str, any]) -> PermissionsRequest: @dataclass class PrincipalOutput: + """Information about the principal assigned to the workspace.""" + display_name: Optional[str] = None """The display name of the principal.""" @@ -1134,7 +1138,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateRuleSetRequest: @dataclass class UpdateWorkspaceAssignments: - permissions: List[WorkspacePermission] + permissions: Optional[List[WorkspacePermission]] = None """Array of permissions assignments to update on the workspace. Note that excluding this field will have the same effect as providing an empty list which will result in the deletion of all permissions for the principal.""" @@ -1143,7 +1147,7 @@ class UpdateWorkspaceAssignments: """The ID of the user, service principal, or group.""" workspace_id: Optional[int] = None - """The workspace ID.""" + """The workspace ID for the account.""" def as_dict(self) -> dict: """Serializes the UpdateWorkspaceAssignments into a dictionary suitable for use as a JSON request body.""" @@ -2495,7 +2499,7 @@ def update(self, class PermissionMigrationAPI: - """This spec contains undocumented permission migration APIs used in https://github.com/databrickslabs/ucx.""" + """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx""" def __init__(self, api_client): self._api = api_client @@ -2505,14 +2509,11 @@ def migrate_permissions(self, from_workspace_group_name: str, to_account_group_name: str, *, - size: Optional[int] = None) -> PermissionMigrationResponse: + size: Optional[int] = None) -> MigratePermissionsResponse: """Migrate Permissions. - Migrate a batch of permissions from a workspace local group to an account group. - :param workspace_id: int - WorkspaceId of the associated workspace where the permission migration will occur. Both workspace - group and account group must be in this workspace. + WorkspaceId of the associated workspace where the permission migration will occur. :param from_workspace_group_name: str The name of the workspace group that permissions will be migrated from. :param to_account_group_name: str @@ -2520,7 +2521,7 @@ def migrate_permissions(self, :param size: int (optional) The maximum number of permissions that will be migrated. - :returns: :class:`PermissionMigrationResponse` + :returns: :class:`MigratePermissionsResponse` """ body = {} if from_workspace_group_name is not None: @@ -2531,7 +2532,7 @@ def migrate_permissions(self, headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', '/api/2.0/permissionmigration', body=body, headers=headers) - return PermissionMigrationResponse.from_dict(res) + return MigratePermissionsResponse.from_dict(res) class PermissionsAPI: @@ -3313,7 +3314,7 @@ def delete(self, workspace_id: int, principal_id: int): principal. :param workspace_id: int - The workspace ID. + The workspace ID for the account. :param principal_id: int The ID of the user, service principal, or group. @@ -3366,18 +3367,21 @@ def list(self, workspace_id: int) -> Iterator[PermissionAssignment]: parsed = PermissionAssignments.from_dict(json).permission_assignments return parsed if parsed is not None else [] - def update(self, workspace_id: int, principal_id: int, - permissions: List[WorkspacePermission]) -> PermissionAssignment: + def update(self, + workspace_id: int, + principal_id: int, + *, + permissions: Optional[List[WorkspacePermission]] = None) -> PermissionAssignment: """Create or update permissions assignment. Creates or updates the workspace permissions assignment in a given account and workspace for the specified principal. :param workspace_id: int - The workspace ID. + The workspace ID for the account. :param principal_id: int The ID of the user, service principal, or group. - :param permissions: List[:class:`WorkspacePermission`] + :param permissions: List[:class:`WorkspacePermission`] (optional) Array of permissions assignments to update on the workspace. Note that excluding this field will have the same effect as providing an empty list which will result in the deletion of all permissions for the principal. diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index f96d7dd75..cf677fd06 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -480,7 +480,7 @@ class CreateJob: """Deployment information for jobs managed by external sources.""" description: Optional[str] = None - """An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding.""" + """An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.""" edit_mode: Optional[JobEditMode] = None """Edit mode of the job. @@ -1601,7 +1601,7 @@ class JobSettings: """Deployment information for jobs managed by external sources.""" description: Optional[str] = None - """An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding.""" + """An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.""" edit_mode: Optional[JobEditMode] = None """Edit mode of the job. @@ -2055,7 +2055,6 @@ class PeriodicTriggerConfigurationTimeUnit(Enum): DAYS = 'DAYS' HOURS = 'HOURS' - TIME_UNIT_UNSPECIFIED = 'TIME_UNIT_UNSPECIFIED' WEEKS = 'WEEKS' @@ -5192,7 +5191,7 @@ def create(self, :param deployment: :class:`JobDeployment` (optional) Deployment information for jobs managed by external sources. :param description: str (optional) - An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding. + An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding. :param edit_mode: :class:`JobEditMode` (optional) Edit mode of the job. diff --git a/databricks/sdk/service/marketplace.py b/databricks/sdk/service/marketplace.py index 57cd4f38f..1a2dedf31 100755 --- a/databricks/sdk/service/marketplace.py +++ b/databricks/sdk/service/marketplace.py @@ -56,7 +56,6 @@ class AssetType(Enum): ASSET_TYPE_MEDIA = 'ASSET_TYPE_MEDIA' ASSET_TYPE_MODEL = 'ASSET_TYPE_MODEL' ASSET_TYPE_NOTEBOOK = 'ASSET_TYPE_NOTEBOOK' - ASSET_TYPE_UNSPECIFIED = 'ASSET_TYPE_UNSPECIFIED' @dataclass @@ -804,11 +803,6 @@ class FileStatus(Enum): FILE_STATUS_STAGING = 'FILE_STATUS_STAGING' -class FilterType(Enum): - - METASTORE = 'METASTORE' - - class FulfillmentType(Enum): INSTALL = 'INSTALL' @@ -1297,16 +1291,11 @@ class Listing: id: Optional[str] = None - provider_summary: Optional[ProviderListingSummaryInfo] = None - """we can not use just ProviderListingSummary since we already have same name on entity side of the - state""" - def as_dict(self) -> dict: """Serializes the Listing into a dictionary suitable for use as a JSON request body.""" body = {} if self.detail: body['detail'] = self.detail.as_dict() if self.id is not None: body['id'] = self.id - if self.provider_summary: body['provider_summary'] = self.provider_summary.as_dict() if self.summary: body['summary'] = self.summary.as_dict() return body @@ -1315,7 +1304,6 @@ def from_dict(cls, d: Dict[str, any]) -> Listing: """Deserializes the Listing from a dictionary.""" return cls(detail=_from_dict(d, 'detail', ListingDetail), id=d.get('id', None), - provider_summary=_from_dict(d, 'provider_summary', ProviderListingSummaryInfo), summary=_from_dict(d, 'summary', ListingSummary)) @@ -1461,23 +1449,18 @@ def from_dict(cls, d: Dict[str, any]) -> ListingFulfillment: @dataclass class ListingSetting: - filters: Optional[List[VisibilityFilter]] = None - """filters are joined with `or` conjunction.""" - visibility: Optional[Visibility] = None def as_dict(self) -> dict: """Serializes the ListingSetting into a dictionary suitable for use as a JSON request body.""" body = {} - if self.filters: body['filters'] = [v.as_dict() for v in self.filters] if self.visibility is not None: body['visibility'] = self.visibility.value return body @classmethod def from_dict(cls, d: Dict[str, any]) -> ListingSetting: """Deserializes the ListingSetting from a dictionary.""" - return cls(filters=_repeated_dict(d, 'filters', VisibilityFilter), - visibility=_enum(d, 'visibility', Visibility)) + return cls(visibility=_enum(d, 'visibility', Visibility)) class ListingShareType(Enum): @@ -1517,8 +1500,6 @@ class ListingSummary: """if a git repo is being created, a listing will be initialized with this field as opposed to a share""" - metastore_id: Optional[str] = None - provider_id: Optional[str] = None provider_region: Optional[RegionInfo] = None @@ -1552,7 +1533,6 @@ def as_dict(self) -> dict: if self.exchange_ids: body['exchange_ids'] = [v for v in self.exchange_ids] if self.git_repo: body['git_repo'] = self.git_repo.as_dict() if self.listing_type is not None: body['listingType'] = self.listing_type.value - if self.metastore_id is not None: body['metastore_id'] = self.metastore_id if self.name is not None: body['name'] = self.name if self.provider_id is not None: body['provider_id'] = self.provider_id if self.provider_region: body['provider_region'] = self.provider_region.as_dict() @@ -1577,7 +1557,6 @@ def from_dict(cls, d: Dict[str, any]) -> ListingSummary: exchange_ids=d.get('exchange_ids', None), git_repo=_from_dict(d, 'git_repo', RepoInfo), listing_type=_enum(d, 'listingType', ListingType), - metastore_id=d.get('metastore_id', None), name=d.get('name', None), provider_id=d.get('provider_id', None), provider_region=_from_dict(d, 'provider_region', RegionInfo), @@ -1617,7 +1596,6 @@ class ListingTagType(Enum): LISTING_TAG_TYPE_LANGUAGE = 'LISTING_TAG_TYPE_LANGUAGE' LISTING_TAG_TYPE_TASK = 'LISTING_TAG_TYPE_TASK' - LISTING_TAG_TYPE_UNSPECIFIED = 'LISTING_TAG_TYPE_UNSPECIFIED' class ListingType(Enum): @@ -1733,37 +1711,6 @@ def from_dict(cls, d: Dict[str, any]) -> ProviderAnalyticsDashboard: return cls(id=d.get('id', None)) -@dataclass -class ProviderIconFile: - icon_file_id: Optional[str] = None - - icon_file_path: Optional[str] = None - - icon_type: Optional[ProviderIconType] = None - - def as_dict(self) -> dict: - """Serializes the ProviderIconFile into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.icon_file_id is not None: body['icon_file_id'] = self.icon_file_id - if self.icon_file_path is not None: body['icon_file_path'] = self.icon_file_path - if self.icon_type is not None: body['icon_type'] = self.icon_type.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> ProviderIconFile: - """Deserializes the ProviderIconFile from a dictionary.""" - return cls(icon_file_id=d.get('icon_file_id', None), - icon_file_path=d.get('icon_file_path', None), - icon_type=_enum(d, 'icon_type', ProviderIconType)) - - -class ProviderIconType(Enum): - - DARK = 'DARK' - PRIMARY = 'PRIMARY' - PROVIDER_ICON_TYPE_UNSPECIFIED = 'PROVIDER_ICON_TYPE_UNSPECIFIED' - - @dataclass class ProviderInfo: name: str @@ -1837,33 +1784,6 @@ def from_dict(cls, d: Dict[str, any]) -> ProviderInfo: term_of_service_link=d.get('term_of_service_link', None)) -@dataclass -class ProviderListingSummaryInfo: - """we can not use just ProviderListingSummary since we already have same name on entity side of the - state""" - - description: Optional[str] = None - - icon_files: Optional[List[ProviderIconFile]] = None - - name: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the ProviderListingSummaryInfo into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: body['description'] = self.description - if self.icon_files: body['icon_files'] = [v.as_dict() for v in self.icon_files] - if self.name is not None: body['name'] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> ProviderListingSummaryInfo: - """Deserializes the ProviderListingSummaryInfo from a dictionary.""" - return cls(description=d.get('description', None), - icon_files=_repeated_dict(d, 'icon_files', ProviderIconFile), - name=d.get('name', None)) - - @dataclass class RegionInfo: cloud: Optional[str] = None @@ -1996,14 +1916,6 @@ def from_dict(cls, d: Dict[str, any]) -> SharedDataObject: return cls(data_object_type=d.get('data_object_type', None), name=d.get('name', None)) -class SortBy(Enum): - - SORT_BY_DATE = 'SORT_BY_DATE' - SORT_BY_RELEVANCE = 'SORT_BY_RELEVANCE' - SORT_BY_TITLE = 'SORT_BY_TITLE' - SORT_BY_UNSPECIFIED = 'SORT_BY_UNSPECIFIED' - - @dataclass class TokenDetail: bearer_token: Optional[str] = None @@ -2369,25 +2281,6 @@ class Visibility(Enum): PUBLIC = 'PUBLIC' -@dataclass -class VisibilityFilter: - filter_type: Optional[FilterType] = None - - filter_value: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the VisibilityFilter into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.filter_type is not None: body['filterType'] = self.filter_type.value - if self.filter_value is not None: body['filterValue'] = self.filter_value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> VisibilityFilter: - """Deserializes the VisibilityFilter from a dictionary.""" - return cls(filter_type=_enum(d, 'filterType', FilterType), filter_value=d.get('filterValue', None)) - - class ConsumerFulfillmentsAPI: """Fulfillments are entities that allow consumers to preview installations.""" @@ -2667,14 +2560,12 @@ def list(self, *, assets: Optional[List[AssetType]] = None, categories: Optional[List[Category]] = None, - is_ascending: Optional[bool] = None, is_free: Optional[bool] = None, is_private_exchange: Optional[bool] = None, is_staff_pick: Optional[bool] = None, page_size: Optional[int] = None, page_token: Optional[str] = None, provider_ids: Optional[List[str]] = None, - sort_by: Optional[SortBy] = None, tags: Optional[List[ListingTag]] = None) -> Iterator[Listing]: """List listings. @@ -2684,7 +2575,6 @@ def list(self, Matches any of the following asset types :param categories: List[:class:`Category`] (optional) Matches any of the following categories - :param is_ascending: bool (optional) :param is_free: bool (optional) Filters each listing based on if it is free. :param is_private_exchange: bool (optional) @@ -2695,8 +2585,6 @@ def list(self, :param page_token: str (optional) :param provider_ids: List[str] (optional) Matches any of the following provider ids - :param sort_by: :class:`SortBy` (optional) - Criteria for sorting the resulting set of listings. :param tags: List[:class:`ListingTag`] (optional) Matches any of the following tags @@ -2706,14 +2594,12 @@ def list(self, query = {} if assets is not None: query['assets'] = [v.value for v in assets] if categories is not None: query['categories'] = [v.value for v in categories] - if is_ascending is not None: query['is_ascending'] = is_ascending if is_free is not None: query['is_free'] = is_free if is_private_exchange is not None: query['is_private_exchange'] = is_private_exchange if is_staff_pick is not None: query['is_staff_pick'] = is_staff_pick if page_size is not None: query['page_size'] = page_size if page_token is not None: query['page_token'] = page_token if provider_ids is not None: query['provider_ids'] = [v for v in provider_ids] - if sort_by is not None: query['sort_by'] = sort_by.value if tags is not None: query['tags'] = [v.as_dict() for v in tags] headers = {'Accept': 'application/json', } @@ -2731,13 +2617,11 @@ def search(self, *, assets: Optional[List[AssetType]] = None, categories: Optional[List[Category]] = None, - is_ascending: Optional[bool] = None, is_free: Optional[bool] = None, is_private_exchange: Optional[bool] = None, page_size: Optional[int] = None, page_token: Optional[str] = None, - provider_ids: Optional[List[str]] = None, - sort_by: Optional[SortBy] = None) -> Iterator[Listing]: + provider_ids: Optional[List[str]] = None) -> Iterator[Listing]: """Search listings. Search published listings in the Databricks Marketplace that the consumer has access to. This query @@ -2749,14 +2633,12 @@ def search(self, Matches any of the following asset types :param categories: List[:class:`Category`] (optional) Matches any of the following categories - :param is_ascending: bool (optional) :param is_free: bool (optional) :param is_private_exchange: bool (optional) :param page_size: int (optional) :param page_token: str (optional) :param provider_ids: List[str] (optional) Matches any of the following provider ids - :param sort_by: :class:`SortBy` (optional) :returns: Iterator over :class:`Listing` """ @@ -2764,14 +2646,12 @@ def search(self, query = {} if assets is not None: query['assets'] = [v.value for v in assets] if categories is not None: query['categories'] = [v.value for v in categories] - if is_ascending is not None: query['is_ascending'] = is_ascending if is_free is not None: query['is_free'] = is_free if is_private_exchange is not None: query['is_private_exchange'] = is_private_exchange if page_size is not None: query['page_size'] = page_size if page_token is not None: query['page_token'] = page_token if provider_ids is not None: query['provider_ids'] = [v for v in provider_ids] if query is not None: query['query'] = query - if sort_by is not None: query['sort_by'] = sort_by.value headers = {'Accept': 'application/json', } while True: diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py index 44132ee88..0c439ae7e 100755 --- a/databricks/sdk/service/oauth2.py +++ b/databricks/sdk/service/oauth2.py @@ -15,14 +15,14 @@ @dataclass class CreateCustomAppIntegration: - name: str - """name of the custom oauth app""" + confidential: Optional[bool] = None + """This field indicates whether an OAuth client secret is required to authenticate this client.""" - redirect_urls: List[str] - """List of oauth redirect urls""" + name: Optional[str] = None + """Name of the custom OAuth app""" - confidential: Optional[bool] = None - """indicates if an oauth client-secret should be generated""" + redirect_urls: Optional[List[str]] = None + """List of OAuth redirect urls""" scopes: Optional[List[str]] = None """OAuth scopes granted to the application. Supported scopes: all-apis, sql, offline_access, @@ -54,14 +54,14 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCustomAppIntegration: @dataclass class CreateCustomAppIntegrationOutput: client_id: Optional[str] = None - """oauth client-id generated by the Databricks""" + """OAuth client-id generated by the Databricks""" client_secret: Optional[str] = None - """oauth client-secret generated by the Databricks if this is a confidential oauth app + """OAuth client-secret generated by the Databricks. If this is a confidential OAuth app client-secret will be generated.""" integration_id: Optional[str] = None - """unique integration id for the custom oauth app""" + """Unique integration id for the custom OAuth app""" def as_dict(self) -> dict: """Serializes the CreateCustomAppIntegrationOutput into a dictionary suitable for use as a JSON request body.""" @@ -82,7 +82,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCustomAppIntegrationOutput: @dataclass class CreatePublishedAppIntegration: app_id: Optional[str] = None - """app_id of the oauth published app integration. For example power-bi, tableau-deskop""" + """App id of the OAuth published app integration. For example power-bi, tableau-deskop""" token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy""" @@ -104,7 +104,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreatePublishedAppIntegration: @dataclass class CreatePublishedAppIntegrationOutput: integration_id: Optional[str] = None - """unique integration id for the published oauth app""" + """Unique integration id for the published OAuth app""" def as_dict(self) -> dict: """Serializes the CreatePublishedAppIntegrationOutput into a dictionary suitable for use as a JSON request body.""" @@ -227,19 +227,27 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: @dataclass class GetCustomAppIntegrationOutput: client_id: Optional[str] = None - """oauth client id of the custom oauth app""" + """The client id of the custom OAuth app""" confidential: Optional[bool] = None - """indicates if an oauth client-secret should be generated""" + """This field indicates whether an OAuth client secret is required to authenticate this client.""" + + create_time: Optional[str] = None + + created_by: Optional[int] = None + + creator_username: Optional[str] = None integration_id: Optional[str] = None """ID of this custom app""" name: Optional[str] = None - """name of the custom oauth app""" + """The display name of the custom OAuth app""" redirect_urls: Optional[List[str]] = None - """List of oauth redirect urls""" + """List of OAuth redirect urls""" + + scopes: Optional[List[str]] = None token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy""" @@ -249,9 +257,13 @@ def as_dict(self) -> dict: body = {} if self.client_id is not None: body['client_id'] = self.client_id if self.confidential is not None: body['confidential'] = self.confidential + if self.create_time is not None: body['create_time'] = self.create_time + if self.created_by is not None: body['created_by'] = self.created_by + if self.creator_username is not None: body['creator_username'] = self.creator_username if self.integration_id is not None: body['integration_id'] = self.integration_id if self.name is not None: body['name'] = self.name if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls] + if self.scopes: body['scopes'] = [v for v in self.scopes] if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() return body @@ -260,39 +272,51 @@ def from_dict(cls, d: Dict[str, any]) -> GetCustomAppIntegrationOutput: """Deserializes the GetCustomAppIntegrationOutput from a dictionary.""" return cls(client_id=d.get('client_id', None), confidential=d.get('confidential', None), + create_time=d.get('create_time', None), + created_by=d.get('created_by', None), + creator_username=d.get('creator_username', None), integration_id=d.get('integration_id', None), name=d.get('name', None), redirect_urls=d.get('redirect_urls', None), + scopes=d.get('scopes', None), token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy)) @dataclass class GetCustomAppIntegrationsOutput: apps: Optional[List[GetCustomAppIntegrationOutput]] = None - """Array of Custom OAuth App Integrations defined for the account.""" + """List of Custom OAuth App Integrations defined for the account.""" + + next_page_token: Optional[str] = None def as_dict(self) -> dict: """Serializes the GetCustomAppIntegrationsOutput into a dictionary suitable for use as a JSON request body.""" body = {} if self.apps: body['apps'] = [v.as_dict() for v in self.apps] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, any]) -> GetCustomAppIntegrationsOutput: """Deserializes the GetCustomAppIntegrationsOutput from a dictionary.""" - return cls(apps=_repeated_dict(d, 'apps', GetCustomAppIntegrationOutput)) + return cls(apps=_repeated_dict(d, 'apps', GetCustomAppIntegrationOutput), + next_page_token=d.get('next_page_token', None)) @dataclass class GetPublishedAppIntegrationOutput: app_id: Optional[str] = None - """app-id of the published app integration""" + """App-id of the published app integration""" + + create_time: Optional[str] = None + + created_by: Optional[int] = None integration_id: Optional[str] = None - """unique integration id for the published oauth app""" + """Unique integration id for the published OAuth app""" name: Optional[str] = None - """name of the published oauth app""" + """Display name of the published OAuth app""" token_access_policy: Optional[TokenAccessPolicy] = None """Token access policy""" @@ -301,6 +325,8 @@ def as_dict(self) -> dict: """Serializes the GetPublishedAppIntegrationOutput into a dictionary suitable for use as a JSON request body.""" body = {} if self.app_id is not None: body['app_id'] = self.app_id + if self.create_time is not None: body['create_time'] = self.create_time + if self.created_by is not None: body['created_by'] = self.created_by if self.integration_id is not None: body['integration_id'] = self.integration_id if self.name is not None: body['name'] = self.name if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict() @@ -310,6 +336,8 @@ def as_dict(self) -> dict: def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppIntegrationOutput: """Deserializes the GetPublishedAppIntegrationOutput from a dictionary.""" return cls(app_id=d.get('app_id', None), + create_time=d.get('create_time', None), + created_by=d.get('created_by', None), integration_id=d.get('integration_id', None), name=d.get('name', None), token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy)) @@ -318,24 +346,28 @@ def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppIntegrationOutput: @dataclass class GetPublishedAppIntegrationsOutput: apps: Optional[List[GetPublishedAppIntegrationOutput]] = None - """Array of Published OAuth App Integrations defined for the account.""" + """List of Published OAuth App Integrations defined for the account.""" + + next_page_token: Optional[str] = None def as_dict(self) -> dict: """Serializes the GetPublishedAppIntegrationsOutput into a dictionary suitable for use as a JSON request body.""" body = {} if self.apps: body['apps'] = [v.as_dict() for v in self.apps] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppIntegrationsOutput: """Deserializes the GetPublishedAppIntegrationsOutput from a dictionary.""" - return cls(apps=_repeated_dict(d, 'apps', GetPublishedAppIntegrationOutput)) + return cls(apps=_repeated_dict(d, 'apps', GetPublishedAppIntegrationOutput), + next_page_token=d.get('next_page_token', None)) @dataclass class GetPublishedAppsOutput: apps: Optional[List[PublishedAppOutput]] = None - """Array of Published OAuth Apps.""" + """List of Published OAuth Apps.""" next_page_token: Optional[str] = None """A token that can be used to get the next page of results. If not present, there are no more @@ -388,7 +420,7 @@ class PublishedAppOutput: apps.""" name: Optional[str] = None - """Name of the published OAuth app.""" + """The display name of the published OAuth app.""" redirect_urls: Optional[List[str]] = None """Redirect URLs of the published OAuth app.""" @@ -485,13 +517,12 @@ def from_dict(cls, d: Dict[str, any]) -> TokenAccessPolicy: @dataclass class UpdateCustomAppIntegration: integration_id: Optional[str] = None - """The oauth app integration ID.""" redirect_urls: Optional[List[str]] = None - """List of oauth redirect urls to be updated in the custom oauth app integration""" + """List of OAuth redirect urls to be updated in the custom OAuth app integration""" token_access_policy: Optional[TokenAccessPolicy] = None - """Token access policy to be updated in the custom oauth app integration""" + """Token access policy to be updated in the custom OAuth app integration""" def as_dict(self) -> dict: """Serializes the UpdateCustomAppIntegration into a dictionary suitable for use as a JSON request body.""" @@ -526,10 +557,9 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateCustomAppIntegrationOutput: @dataclass class UpdatePublishedAppIntegration: integration_id: Optional[str] = None - """The oauth app integration ID.""" token_access_policy: Optional[TokenAccessPolicy] = None - """Token access policy to be updated in the published oauth app integration""" + """Token access policy to be updated in the published OAuth app integration""" def as_dict(self) -> dict: """Serializes the UpdatePublishedAppIntegration into a dictionary suitable for use as a JSON request body.""" @@ -560,31 +590,31 @@ def from_dict(cls, d: Dict[str, any]) -> UpdatePublishedAppIntegrationOutput: class CustomAppIntegrationAPI: - """These APIs enable administrators to manage custom oauth app integrations, which is required for + """These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.""" def __init__(self, api_client): self._api = api_client def create(self, - name: str, - redirect_urls: List[str], *, confidential: Optional[bool] = None, + name: Optional[str] = None, + redirect_urls: Optional[List[str]] = None, scopes: Optional[List[str]] = None, token_access_policy: Optional[TokenAccessPolicy] = None) -> CreateCustomAppIntegrationOutput: """Create Custom OAuth App Integration. Create Custom OAuth App Integration. - You can retrieve the custom oauth app integration via :method:CustomAppIntegration/get. + You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. - :param name: str - name of the custom oauth app - :param redirect_urls: List[str] - List of oauth redirect urls :param confidential: bool (optional) - indicates if an oauth client-secret should be generated + This field indicates whether an OAuth client secret is required to authenticate this client. + :param name: str (optional) + Name of the custom OAuth app + :param redirect_urls: List[str] (optional) + List of OAuth redirect urls :param scopes: List[str] (optional) OAuth scopes granted to the application. Supported scopes: all-apis, sql, offline_access, openid, profile, email. @@ -610,11 +640,10 @@ def create(self, def delete(self, integration_id: str): """Delete Custom OAuth App Integration. - Delete an existing Custom OAuth App Integration. You can retrieve the custom oauth app integration via + Delete an existing Custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. :param integration_id: str - The oauth app integration ID. """ @@ -632,7 +661,6 @@ def get(self, integration_id: str) -> GetCustomAppIntegrationOutput: Gets the Custom OAuth App Integration for the given integration id. :param integration_id: str - The oauth app integration ID. :returns: :class:`GetCustomAppIntegrationOutput` """ @@ -645,21 +673,39 @@ def get(self, integration_id: str) -> GetCustomAppIntegrationOutput: headers=headers) return GetCustomAppIntegrationOutput.from_dict(res) - def list(self) -> Iterator[GetCustomAppIntegrationOutput]: + def list(self, + *, + include_creator_username: Optional[bool] = None, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[GetCustomAppIntegrationOutput]: """Get custom oauth app integrations. - Get the list of custom oauth app integrations for the specified Databricks account + Get the list of custom OAuth app integrations for the specified Databricks account + + :param include_creator_username: bool (optional) + :param page_size: int (optional) + :param page_token: str (optional) :returns: Iterator over :class:`GetCustomAppIntegrationOutput` """ + query = {} + if include_creator_username is not None: query['include_creator_username'] = include_creator_username + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - json = self._api.do('GET', - f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations', - headers=headers) - parsed = GetCustomAppIntegrationsOutput.from_dict(json).apps - return parsed if parsed is not None else [] + while True: + json = self._api.do('GET', + f'/api/2.0/accounts/{self._api.account_id}/oauth2/custom-app-integrations', + query=query, + headers=headers) + if 'apps' in json: + for v in json['apps']: + yield GetCustomAppIntegrationOutput.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] def update(self, integration_id: str, @@ -668,15 +714,14 @@ def update(self, token_access_policy: Optional[TokenAccessPolicy] = None): """Updates Custom OAuth App Integration. - Updates an existing custom OAuth App Integration. You can retrieve the custom oauth app integration + Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. :param integration_id: str - The oauth app integration ID. :param redirect_urls: List[str] (optional) - List of oauth redirect urls to be updated in the custom oauth app integration + List of OAuth redirect urls to be updated in the custom OAuth app integration :param token_access_policy: :class:`TokenAccessPolicy` (optional) - Token access policy to be updated in the custom oauth app integration + Token access policy to be updated in the custom OAuth app integration """ @@ -709,7 +754,7 @@ def list(self, Get all the available published OAuth apps in Databricks. :param page_size: int (optional) - The max number of OAuth published apps to return. + The max number of OAuth published apps to return in one page. :param page_token: str (optional) A token that can be used to get the next page of results. @@ -723,7 +768,7 @@ def list(self, while True: json = self._api.do('GET', - f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-apps/', + f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-apps', query=query, headers=headers) if 'apps' in json: @@ -735,7 +780,7 @@ def list(self, class PublishedAppIntegrationAPI: - """These APIs enable administrators to manage published oauth app integrations, which is required for + """These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.""" def __init__(self, api_client): @@ -750,10 +795,10 @@ def create( Create Published OAuth App Integration. - You can retrieve the published oauth app integration via :method:PublishedAppIntegration/get. + You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. :param app_id: str (optional) - app_id of the oauth published app integration. For example power-bi, tableau-deskop + App id of the OAuth published app integration. For example power-bi, tableau-deskop :param token_access_policy: :class:`TokenAccessPolicy` (optional) Token access policy @@ -773,11 +818,10 @@ def create( def delete(self, integration_id: str): """Delete Published OAuth App Integration. - Delete an existing Published OAuth App Integration. You can retrieve the published oauth app + Delete an existing Published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. :param integration_id: str - The oauth app integration ID. """ @@ -795,7 +839,6 @@ def get(self, integration_id: str) -> GetPublishedAppIntegrationOutput: Gets the Published OAuth App Integration for the given integration id. :param integration_id: str - The oauth app integration ID. :returns: :class:`GetPublishedAppIntegrationOutput` """ @@ -808,32 +851,46 @@ def get(self, integration_id: str) -> GetPublishedAppIntegrationOutput: headers=headers) return GetPublishedAppIntegrationOutput.from_dict(res) - def list(self) -> Iterator[GetPublishedAppIntegrationOutput]: + def list(self, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[GetPublishedAppIntegrationOutput]: """Get published oauth app integrations. - Get the list of published oauth app integrations for the specified Databricks account + Get the list of published OAuth app integrations for the specified Databricks account + + :param page_size: int (optional) + :param page_token: str (optional) :returns: Iterator over :class:`GetPublishedAppIntegrationOutput` """ + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - json = self._api.do('GET', - f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations', - headers=headers) - parsed = GetPublishedAppIntegrationsOutput.from_dict(json).apps - return parsed if parsed is not None else [] + while True: + json = self._api.do('GET', + f'/api/2.0/accounts/{self._api.account_id}/oauth2/published-app-integrations', + query=query, + headers=headers) + if 'apps' in json: + for v in json['apps']: + yield GetPublishedAppIntegrationOutput.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] def update(self, integration_id: str, *, token_access_policy: Optional[TokenAccessPolicy] = None): """Updates Published OAuth App Integration. - Updates an existing published OAuth App Integration. You can retrieve the published oauth app + Updates an existing published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. :param integration_id: str - The oauth app integration ID. :param token_access_policy: :class:`TokenAccessPolicy` (optional) - Token access policy to be updated in the published oauth app integration + Token access policy to be updated in the published OAuth app integration """ diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index 0f3d00de9..b1c43a926 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -25,19 +25,29 @@ @dataclass class Ai21LabsConfig: - ai21labs_api_key: str - """The Databricks secret key reference for an AI21Labs API key.""" + ai21labs_api_key: Optional[str] = None + """The Databricks secret key reference for an AI21 Labs API key. If you prefer to paste your API + key directly, see `ai21labs_api_key_plaintext`. You must provide an API key using one of the + following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.""" + + ai21labs_api_key_plaintext: Optional[str] = None + """An AI21 Labs API key provided as a plaintext string. If you prefer to reference your key using + Databricks Secrets, see `ai21labs_api_key`. You must provide an API key using one of the + following fields: `ai21labs_api_key` or `ai21labs_api_key_plaintext`.""" def as_dict(self) -> dict: """Serializes the Ai21LabsConfig into a dictionary suitable for use as a JSON request body.""" body = {} if self.ai21labs_api_key is not None: body['ai21labs_api_key'] = self.ai21labs_api_key + if self.ai21labs_api_key_plaintext is not None: + body['ai21labs_api_key_plaintext'] = self.ai21labs_api_key_plaintext return body @classmethod def from_dict(cls, d: Dict[str, any]) -> Ai21LabsConfig: """Deserializes the Ai21LabsConfig from a dictionary.""" - return cls(ai21labs_api_key=d.get('ai21labs_api_key', None)) + return cls(ai21labs_api_key=d.get('ai21labs_api_key', None), + ai21labs_api_key_plaintext=d.get('ai21labs_api_key_plaintext', None)) @dataclass @@ -45,24 +55,44 @@ class AmazonBedrockConfig: aws_region: str """The AWS region to use. Bedrock has to be enabled there.""" - aws_access_key_id: str - """The Databricks secret key reference for an AWS Access Key ID with permissions to interact with - Bedrock services.""" - - aws_secret_access_key: str - """The Databricks secret key reference for an AWS Secret Access Key paired with the access key ID, - with permissions to interact with Bedrock services.""" - bedrock_provider: AmazonBedrockConfigBedrockProvider """The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.""" + aws_access_key_id: Optional[str] = None + """The Databricks secret key reference for an AWS access key ID with permissions to interact with + Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id`. You + must provide an API key using one of the following fields: `aws_access_key_id` or + `aws_access_key_id_plaintext`.""" + + aws_access_key_id_plaintext: Optional[str] = None + """An AWS access key ID with permissions to interact with Bedrock services provided as a plaintext + string. If you prefer to reference your key using Databricks Secrets, see `aws_access_key_id`. + You must provide an API key using one of the following fields: `aws_access_key_id` or + `aws_access_key_id_plaintext`.""" + + aws_secret_access_key: Optional[str] = None + """The Databricks secret key reference for an AWS secret access key paired with the access key ID, + with permissions to interact with Bedrock services. If you prefer to paste your API key + directly, see `aws_secret_access_key_plaintext`. You must provide an API key using one of the + following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.""" + + aws_secret_access_key_plaintext: Optional[str] = None + """An AWS secret access key paired with the access key ID, with permissions to interact with + Bedrock services provided as a plaintext string. If you prefer to reference your key using + Databricks Secrets, see `aws_secret_access_key`. You must provide an API key using one of the + following fields: `aws_secret_access_key` or `aws_secret_access_key_plaintext`.""" + def as_dict(self) -> dict: """Serializes the AmazonBedrockConfig into a dictionary suitable for use as a JSON request body.""" body = {} if self.aws_access_key_id is not None: body['aws_access_key_id'] = self.aws_access_key_id + if self.aws_access_key_id_plaintext is not None: + body['aws_access_key_id_plaintext'] = self.aws_access_key_id_plaintext if self.aws_region is not None: body['aws_region'] = self.aws_region if self.aws_secret_access_key is not None: body['aws_secret_access_key'] = self.aws_secret_access_key + if self.aws_secret_access_key_plaintext is not None: + body['aws_secret_access_key_plaintext'] = self.aws_secret_access_key_plaintext if self.bedrock_provider is not None: body['bedrock_provider'] = self.bedrock_provider.value return body @@ -70,8 +100,10 @@ def as_dict(self) -> dict: def from_dict(cls, d: Dict[str, any]) -> AmazonBedrockConfig: """Deserializes the AmazonBedrockConfig from a dictionary.""" return cls(aws_access_key_id=d.get('aws_access_key_id', None), + aws_access_key_id_plaintext=d.get('aws_access_key_id_plaintext', None), aws_region=d.get('aws_region', None), aws_secret_access_key=d.get('aws_secret_access_key', None), + aws_secret_access_key_plaintext=d.get('aws_secret_access_key_plaintext', None), bedrock_provider=_enum(d, 'bedrock_provider', AmazonBedrockConfigBedrockProvider)) @@ -87,19 +119,29 @@ class AmazonBedrockConfigBedrockProvider(Enum): @dataclass class AnthropicConfig: - anthropic_api_key: str - """The Databricks secret key reference for an Anthropic API key.""" + anthropic_api_key: Optional[str] = None + """The Databricks secret key reference for an Anthropic API key. If you prefer to paste your API + key directly, see `anthropic_api_key_plaintext`. You must provide an API key using one of the + following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.""" + + anthropic_api_key_plaintext: Optional[str] = None + """The Anthropic API key provided as a plaintext string. If you prefer to reference your key using + Databricks Secrets, see `anthropic_api_key`. You must provide an API key using one of the + following fields: `anthropic_api_key` or `anthropic_api_key_plaintext`.""" def as_dict(self) -> dict: """Serializes the AnthropicConfig into a dictionary suitable for use as a JSON request body.""" body = {} if self.anthropic_api_key is not None: body['anthropic_api_key'] = self.anthropic_api_key + if self.anthropic_api_key_plaintext is not None: + body['anthropic_api_key_plaintext'] = self.anthropic_api_key_plaintext return body @classmethod def from_dict(cls, d: Dict[str, any]) -> AnthropicConfig: """Deserializes the AnthropicConfig from a dictionary.""" - return cls(anthropic_api_key=d.get('anthropic_api_key', None)) + return cls(anthropic_api_key=d.get('anthropic_api_key', None), + anthropic_api_key_plaintext=d.get('anthropic_api_key_plaintext', None)) @dataclass @@ -249,7 +291,6 @@ def from_dict(cls, d: Dict[str, any]) -> AppDeploymentArtifacts: class AppDeploymentMode(Enum): AUTO_SYNC = 'AUTO_SYNC' - MODE_UNSPECIFIED = 'MODE_UNSPECIFIED' SNAPSHOT = 'SNAPSHOT' @@ -257,7 +298,6 @@ class AppDeploymentState(Enum): FAILED = 'FAILED' IN_PROGRESS = 'IN_PROGRESS' - STATE_UNSPECIFIED = 'STATE_UNSPECIFIED' STOPPED = 'STOPPED' SUCCEEDED = 'SUCCEEDED' @@ -308,7 +348,6 @@ class AppState(Enum): IDLE = 'IDLE' RUNNING = 'RUNNING' STARTING = 'STARTING' - STATE_UNSPECIFIED = 'STATE_UNSPECIFIED' @dataclass @@ -467,19 +506,35 @@ class ChatMessageRole(Enum): @dataclass class CohereConfig: - cohere_api_key: str - """The Databricks secret key reference for a Cohere API key.""" + cohere_api_base: Optional[str] = None + """This is an optional field to provide a customized base URL for the Cohere API. If left + unspecified, the standard Cohere base URL is used.""" + + cohere_api_key: Optional[str] = None + """The Databricks secret key reference for a Cohere API key. If you prefer to paste your API key + directly, see `cohere_api_key_plaintext`. You must provide an API key using one of the following + fields: `cohere_api_key` or `cohere_api_key_plaintext`.""" + + cohere_api_key_plaintext: Optional[str] = None + """The Cohere API key provided as a plaintext string. If you prefer to reference your key using + Databricks Secrets, see `cohere_api_key`. You must provide an API key using one of the following + fields: `cohere_api_key` or `cohere_api_key_plaintext`.""" def as_dict(self) -> dict: """Serializes the CohereConfig into a dictionary suitable for use as a JSON request body.""" body = {} + if self.cohere_api_base is not None: body['cohere_api_base'] = self.cohere_api_base if self.cohere_api_key is not None: body['cohere_api_key'] = self.cohere_api_key + if self.cohere_api_key_plaintext is not None: + body['cohere_api_key_plaintext'] = self.cohere_api_key_plaintext return body @classmethod def from_dict(cls, d: Dict[str, any]) -> CohereConfig: """Deserializes the CohereConfig from a dictionary.""" - return cls(cohere_api_key=d.get('cohere_api_key', None)) + return cls(cohere_api_base=d.get('cohere_api_base', None), + cohere_api_key=d.get('cohere_api_key', None), + cohere_api_key_plaintext=d.get('cohere_api_key_plaintext', None)) @dataclass @@ -576,19 +631,30 @@ def from_dict(cls, d: Dict[str, any]) -> CreateServingEndpoint: @dataclass class DatabricksModelServingConfig: - databricks_api_token: str - """The Databricks secret key reference for a Databricks API token that corresponds to a user or - service principal with Can Query access to the model serving endpoint pointed to by this - external model.""" - databricks_workspace_url: str """The URL of the Databricks workspace containing the model serving endpoint pointed to by this external model.""" + databricks_api_token: Optional[str] = None + """The Databricks secret key reference for a Databricks API token that corresponds to a user or + service principal with Can Query access to the model serving endpoint pointed to by this + external model. If you prefer to paste your API key directly, see + `databricks_api_token_plaintext`. You must provide an API key using one of the following fields: + `databricks_api_token` or `databricks_api_token_plaintext`.""" + + databricks_api_token_plaintext: Optional[str] = None + """The Databricks API token that corresponds to a user or service principal with Can Query access + to the model serving endpoint pointed to by this external model provided as a plaintext string. + If you prefer to reference your key using Databricks Secrets, see `databricks_api_token`. You + must provide an API key using one of the following fields: `databricks_api_token` or + `databricks_api_token_plaintext`.""" + def as_dict(self) -> dict: """Serializes the DatabricksModelServingConfig into a dictionary suitable for use as a JSON request body.""" body = {} if self.databricks_api_token is not None: body['databricks_api_token'] = self.databricks_api_token + if self.databricks_api_token_plaintext is not None: + body['databricks_api_token_plaintext'] = self.databricks_api_token_plaintext if self.databricks_workspace_url is not None: body['databricks_workspace_url'] = self.databricks_workspace_url return body @@ -597,6 +663,7 @@ def as_dict(self) -> dict: def from_dict(cls, d: Dict[str, any]) -> DatabricksModelServingConfig: """Deserializes the DatabricksModelServingConfig from a dictionary.""" return cls(databricks_api_token=d.get('databricks_api_token', None), + databricks_api_token_plaintext=d.get('databricks_api_token_plaintext', None), databricks_workspace_url=d.get('databricks_workspace_url', None)) @@ -849,6 +916,7 @@ class EndpointStateConfigUpdate(Enum): IN_PROGRESS = 'IN_PROGRESS' NOT_UPDATING = 'NOT_UPDATING' + UPDATE_CANCELED = 'UPDATE_CANCELED' UPDATE_FAILED = 'UPDATE_FAILED' @@ -924,8 +992,8 @@ def from_dict(cls, d: Dict[str, any]) -> ExportMetricsResponse: class ExternalModel: provider: ExternalModelProvider """The name of the provider for the external model. Currently, the supported providers are - 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'openai', and - 'palm'.",""" + 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', + 'google-cloud-vertex-ai', 'openai', and 'palm'.",""" name: str """The name of the external model.""" @@ -948,6 +1016,9 @@ class ExternalModel: databricks_model_serving_config: Optional[DatabricksModelServingConfig] = None """Databricks Model Serving Config. Only required if the provider is 'databricks-model-serving'.""" + google_cloud_vertex_ai_config: Optional[GoogleCloudVertexAiConfig] = None + """Google Cloud Vertex AI Config. Only required if the provider is 'google-cloud-vertex-ai'.""" + openai_config: Optional[OpenAiConfig] = None """OpenAI Config. Only required if the provider is 'openai'.""" @@ -963,6 +1034,8 @@ def as_dict(self) -> dict: if self.cohere_config: body['cohere_config'] = self.cohere_config.as_dict() if self.databricks_model_serving_config: body['databricks_model_serving_config'] = self.databricks_model_serving_config.as_dict() + if self.google_cloud_vertex_ai_config: + body['google_cloud_vertex_ai_config'] = self.google_cloud_vertex_ai_config.as_dict() if self.name is not None: body['name'] = self.name if self.openai_config: body['openai_config'] = self.openai_config.as_dict() if self.palm_config: body['palm_config'] = self.palm_config.as_dict() @@ -979,6 +1052,8 @@ def from_dict(cls, d: Dict[str, any]) -> ExternalModel: cohere_config=_from_dict(d, 'cohere_config', CohereConfig), databricks_model_serving_config=_from_dict(d, 'databricks_model_serving_config', DatabricksModelServingConfig), + google_cloud_vertex_ai_config=_from_dict(d, 'google_cloud_vertex_ai_config', + GoogleCloudVertexAiConfig), name=d.get('name', None), openai_config=_from_dict(d, 'openai_config', OpenAiConfig), palm_config=_from_dict(d, 'palm_config', PaLmConfig), @@ -988,14 +1063,15 @@ def from_dict(cls, d: Dict[str, any]) -> ExternalModel: class ExternalModelProvider(Enum): """The name of the provider for the external model. Currently, the supported providers are - 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'openai', and - 'palm'.",""" + 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', + 'google-cloud-vertex-ai', 'openai', and 'palm'.",""" AI21LABS = 'ai21labs' AMAZON_BEDROCK = 'amazon-bedrock' ANTHROPIC = 'anthropic' COHERE = 'cohere' DATABRICKS_MODEL_SERVING = 'databricks-model-serving' + GOOGLE_CLOUD_VERTEX_AI = 'google-cloud-vertex-ai' OPENAI = 'openai' PALM = 'palm' @@ -1093,6 +1169,51 @@ def from_dict(cls, d: Dict[str, any]) -> GetServingEndpointPermissionLevelsRespo permission_levels=_repeated_dict(d, 'permission_levels', ServingEndpointPermissionsDescription)) +@dataclass +class GoogleCloudVertexAiConfig: + private_key: Optional[str] = None + """The Databricks secret key reference for a private key for the service account which has access + to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys]. + If you prefer to paste your API key directly, see `private_key_plaintext`. You must provide an + API key using one of the following fields: `private_key` or `private_key_plaintext` + + [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys""" + + private_key_plaintext: Optional[str] = None + """The private key for the service account which has access to the Google Cloud Vertex AI Service + provided as a plaintext secret. See [Best practices for managing service account keys]. If you + prefer to reference your key using Databricks Secrets, see `private_key`. You must provide an + API key using one of the following fields: `private_key` or `private_key_plaintext`. + + [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys""" + + project_id: Optional[str] = None + """This is the Google Cloud project id that the service account is associated with.""" + + region: Optional[str] = None + """This is the region for the Google Cloud Vertex AI Service. See [supported regions] for more + details. Some models are only available in specific regions. + + [supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations""" + + def as_dict(self) -> dict: + """Serializes the GoogleCloudVertexAiConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.private_key is not None: body['private_key'] = self.private_key + if self.private_key_plaintext is not None: body['private_key_plaintext'] = self.private_key_plaintext + if self.project_id is not None: body['project_id'] = self.project_id + if self.region is not None: body['region'] = self.region + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GoogleCloudVertexAiConfig: + """Deserializes the GoogleCloudVertexAiConfig from a dictionary.""" + return cls(private_key=d.get('private_key', None), + private_key_plaintext=d.get('private_key_plaintext', None), + project_id=d.get('project_id', None), + region=d.get('region', None)) + + @dataclass class ListAppDeploymentsResponse: app_deployments: Optional[List[AppDeployment]] = None @@ -1175,19 +1296,35 @@ class OpenAiConfig: """This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID.""" microsoft_entra_client_secret: Optional[str] = None - """The Databricks secret key reference for the Microsoft Entra Client Secret that is only required - for Azure AD OpenAI.""" + """The Databricks secret key reference for a client secret used for Microsoft Entra ID + authentication. If you prefer to paste your client secret directly, see + `microsoft_entra_client_secret_plaintext`. You must provide an API key using one of the + following fields: `microsoft_entra_client_secret` or `microsoft_entra_client_secret_plaintext`.""" + + microsoft_entra_client_secret_plaintext: Optional[str] = None + """The client secret used for Microsoft Entra ID authentication provided as a plaintext string. If + you prefer to reference your key using Databricks Secrets, see `microsoft_entra_client_secret`. + You must provide an API key using one of the following fields: `microsoft_entra_client_secret` + or `microsoft_entra_client_secret_plaintext`.""" microsoft_entra_tenant_id: Optional[str] = None """This field is only required for Azure AD OpenAI and is the Microsoft Entra Tenant ID.""" openai_api_base: Optional[str] = None - """This is the base URL for the OpenAI API (default: "https://api.openai.com/v1"). For Azure - OpenAI, this field is required, and is the base URL for the Azure OpenAI API service provided by - Azure.""" + """This is a field to provide a customized base URl for the OpenAI API. For Azure OpenAI, this + field is required, and is the base URL for the Azure OpenAI API service provided by Azure. For + other OpenAI API types, this field is optional, and if left unspecified, the standard OpenAI + base URL is used.""" openai_api_key: Optional[str] = None - """The Databricks secret key reference for an OpenAI or Azure OpenAI API key.""" + """The Databricks secret key reference for an OpenAI API key using the OpenAI or Azure service. If + you prefer to paste your API key directly, see `openai_api_key_plaintext`. You must provide an + API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.""" + + openai_api_key_plaintext: Optional[str] = None + """The OpenAI API key using the OpenAI or Azure service provided as a plaintext string. If you + prefer to reference your key using Databricks Secrets, see `openai_api_key`. You must provide an + API key using one of the following fields: `openai_api_key` or `openai_api_key_plaintext`.""" openai_api_type: Optional[str] = None """This is an optional field to specify the type of OpenAI API to use. For Azure OpenAI, this field @@ -1213,10 +1350,14 @@ def as_dict(self) -> dict: body['microsoft_entra_client_id'] = self.microsoft_entra_client_id if self.microsoft_entra_client_secret is not None: body['microsoft_entra_client_secret'] = self.microsoft_entra_client_secret + if self.microsoft_entra_client_secret_plaintext is not None: + body['microsoft_entra_client_secret_plaintext'] = self.microsoft_entra_client_secret_plaintext if self.microsoft_entra_tenant_id is not None: body['microsoft_entra_tenant_id'] = self.microsoft_entra_tenant_id if self.openai_api_base is not None: body['openai_api_base'] = self.openai_api_base if self.openai_api_key is not None: body['openai_api_key'] = self.openai_api_key + if self.openai_api_key_plaintext is not None: + body['openai_api_key_plaintext'] = self.openai_api_key_plaintext if self.openai_api_type is not None: body['openai_api_type'] = self.openai_api_type if self.openai_api_version is not None: body['openai_api_version'] = self.openai_api_version if self.openai_deployment_name is not None: @@ -1229,9 +1370,12 @@ def from_dict(cls, d: Dict[str, any]) -> OpenAiConfig: """Deserializes the OpenAiConfig from a dictionary.""" return cls(microsoft_entra_client_id=d.get('microsoft_entra_client_id', None), microsoft_entra_client_secret=d.get('microsoft_entra_client_secret', None), + microsoft_entra_client_secret_plaintext=d.get('microsoft_entra_client_secret_plaintext', + None), microsoft_entra_tenant_id=d.get('microsoft_entra_tenant_id', None), openai_api_base=d.get('openai_api_base', None), openai_api_key=d.get('openai_api_key', None), + openai_api_key_plaintext=d.get('openai_api_key_plaintext', None), openai_api_type=d.get('openai_api_type', None), openai_api_version=d.get('openai_api_version', None), openai_deployment_name=d.get('openai_deployment_name', None), @@ -1240,19 +1384,29 @@ def from_dict(cls, d: Dict[str, any]) -> OpenAiConfig: @dataclass class PaLmConfig: - palm_api_key: str - """The Databricks secret key reference for a PaLM API key.""" + palm_api_key: Optional[str] = None + """The Databricks secret key reference for a PaLM API key. If you prefer to paste your API key + directly, see `palm_api_key_plaintext`. You must provide an API key using one of the following + fields: `palm_api_key` or `palm_api_key_plaintext`.""" + + palm_api_key_plaintext: Optional[str] = None + """The PaLM API key provided as a plaintext string. If you prefer to reference your key using + Databricks Secrets, see `palm_api_key`. You must provide an API key using one of the following + fields: `palm_api_key` or `palm_api_key_plaintext`.""" def as_dict(self) -> dict: """Serializes the PaLmConfig into a dictionary suitable for use as a JSON request body.""" body = {} if self.palm_api_key is not None: body['palm_api_key'] = self.palm_api_key + if self.palm_api_key_plaintext is not None: + body['palm_api_key_plaintext'] = self.palm_api_key_plaintext return body @classmethod def from_dict(cls, d: Dict[str, any]) -> PaLmConfig: """Deserializes the PaLmConfig from a dictionary.""" - return cls(palm_api_key=d.get('palm_api_key', None)) + return cls(palm_api_key=d.get('palm_api_key', None), + palm_api_key_plaintext=d.get('palm_api_key_plaintext', None)) @dataclass @@ -1584,11 +1738,10 @@ class ServedEntityInput: external_model: Optional[ExternalModel] = None """The external model to be served. NOTE: Only one of external_model and (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with - the latter set being used for custom model serving for a Databricks registered model. When an - external_model is present, the served entities list can only have one served_entity object. For - an existing endpoint with external_model, it can not be updated to an endpoint without + the latter set being used for custom model serving for a Databricks registered model. For an + existing endpoint with external_model, it cannot be updated to an endpoint without external_model. If the endpoint is created without external_model, users cannot update it to add - external_model later.""" + external_model later. The task type of all external models within an endpoint must be the same.""" instance_profile_arn: Optional[str] = None """ARN of the instance profile that the served entity uses to access AWS resources.""" @@ -2858,7 +3011,8 @@ def wait_get_serving_endpoint_not_updating( callback: Optional[Callable[[ServingEndpointDetailed], None]] = None) -> ServingEndpointDetailed: deadline = time.time() + timeout.total_seconds() target_states = (EndpointStateConfigUpdate.NOT_UPDATING, ) - failure_states = (EndpointStateConfigUpdate.UPDATE_FAILED, ) + failure_states = (EndpointStateConfigUpdate.UPDATE_FAILED, EndpointStateConfigUpdate.UPDATE_CANCELED, + ) status_message = 'polling...' attempt = 1 while time.time() < deadline: diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py index b02323848..d5593a1e1 100755 --- a/databricks/sdk/service/settings.py +++ b/databricks/sdk/service/settings.py @@ -147,7 +147,6 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageMaintenanceWin class ClusterAutoRestartMessageMaintenanceWindowDayOfWeek(Enum): - DAY_OF_WEEK_UNSPECIFIED = 'DAY_OF_WEEK_UNSPECIFIED' FRIDAY = 'FRIDAY' MONDAY = 'MONDAY' SATURDAY = 'SATURDAY' @@ -192,7 +191,6 @@ class ClusterAutoRestartMessageMaintenanceWindowWeekDayFrequency(Enum): SECOND_AND_FOURTH_OF_MONTH = 'SECOND_AND_FOURTH_OF_MONTH' SECOND_OF_MONTH = 'SECOND_OF_MONTH' THIRD_OF_MONTH = 'THIRD_OF_MONTH' - WEEK_DAY_FREQUENCY_UNSPECIFIED = 'WEEK_DAY_FREQUENCY_UNSPECIFIED' @dataclass @@ -281,7 +279,7 @@ def from_dict(cls, d: Dict[str, any]) -> ComplianceSecurityProfileSetting: class ComplianceStandard(Enum): """Compliance stardard for SHIELD customers""" - COMPLIANCE_STANDARD_UNSPECIFIED = 'COMPLIANCE_STANDARD_UNSPECIFIED' + CANADA_PROTECTED_B = 'CANADA_PROTECTED_B' CYBER_ESSENTIAL_PLUS = 'CYBER_ESSENTIAL_PLUS' FEDRAMP_HIGH = 'FEDRAMP_HIGH' FEDRAMP_IL5 = 'FEDRAMP_IL5' @@ -293,6 +291,38 @@ class ComplianceStandard(Enum): PCI_DSS = 'PCI_DSS' +@dataclass +class Config: + email: Optional[EmailConfig] = None + + generic_webhook: Optional[GenericWebhookConfig] = None + + microsoft_teams: Optional[MicrosoftTeamsConfig] = None + + pagerduty: Optional[PagerdutyConfig] = None + + slack: Optional[SlackConfig] = None + + def as_dict(self) -> dict: + """Serializes the Config into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.email: body['email'] = self.email.as_dict() + if self.generic_webhook: body['generic_webhook'] = self.generic_webhook.as_dict() + if self.microsoft_teams: body['microsoft_teams'] = self.microsoft_teams.as_dict() + if self.pagerduty: body['pagerduty'] = self.pagerduty.as_dict() + if self.slack: body['slack'] = self.slack.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> Config: + """Deserializes the Config from a dictionary.""" + return cls(email=_from_dict(d, 'email', EmailConfig), + generic_webhook=_from_dict(d, 'generic_webhook', GenericWebhookConfig), + microsoft_teams=_from_dict(d, 'microsoft_teams', MicrosoftTeamsConfig), + pagerduty=_from_dict(d, 'pagerduty', PagerdutyConfig), + slack=_from_dict(d, 'slack', SlackConfig)) + + @dataclass class CreateIpAccessList: """Details required to configure a block list or allow list.""" @@ -367,6 +397,27 @@ def from_dict(cls, d: Dict[str, any]) -> CreateNetworkConnectivityConfigRequest: return cls(name=d.get('name', None), region=d.get('region', None)) +@dataclass +class CreateNotificationDestinationRequest: + config: Optional[Config] = None + """The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.""" + + display_name: Optional[str] = None + """The display name for the notification destination.""" + + def as_dict(self) -> dict: + """Serializes the CreateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.config: body['config'] = self.config.as_dict() + if self.display_name is not None: body['display_name'] = self.display_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateNotificationDestinationRequest: + """Deserializes the CreateNotificationDestinationRequest from a dictionary.""" + return cls(config=_from_dict(d, 'config', Config), display_name=d.get('display_name', None)) + + @dataclass class CreateOboTokenRequest: """Configuration details for creating on-behalf tokens.""" @@ -705,6 +756,46 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteRestrictWorkspaceAdminsSettingRes return cls(etag=d.get('etag', None)) +class DestinationType(Enum): + + EMAIL = 'EMAIL' + MICROSOFT_TEAMS = 'MICROSOFT_TEAMS' + PAGERDUTY = 'PAGERDUTY' + SLACK = 'SLACK' + WEBHOOK = 'WEBHOOK' + + +@dataclass +class EmailConfig: + addresses: Optional[List[str]] = None + """Email addresses to notify.""" + + def as_dict(self) -> dict: + """Serializes the EmailConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.addresses: body['addresses'] = [v for v in self.addresses] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EmailConfig: + """Deserializes the EmailConfig from a dictionary.""" + return cls(addresses=d.get('addresses', None)) + + +@dataclass +class Empty: + + def as_dict(self) -> dict: + """Serializes the Empty into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> Empty: + """Deserializes the Empty from a dictionary.""" + return cls() + + @dataclass class EnhancedSecurityMonitoring: """SHIELD feature: ESM""" @@ -920,6 +1011,48 @@ def from_dict(cls, d: Dict[str, any]) -> FetchIpAccessListResponse: return cls(ip_access_list=_from_dict(d, 'ip_access_list', IpAccessListInfo)) +@dataclass +class GenericWebhookConfig: + password: Optional[str] = None + """[Input-Only][Optional] Password for webhook.""" + + password_set: Optional[bool] = None + """[Output-Only] Whether password is set.""" + + url: Optional[str] = None + """[Input-Only] URL for webhook.""" + + url_set: Optional[bool] = None + """[Output-Only] Whether URL is set.""" + + username: Optional[str] = None + """[Input-Only][Optional] Username for webhook.""" + + username_set: Optional[bool] = None + """[Output-Only] Whether username is set.""" + + def as_dict(self) -> dict: + """Serializes the GenericWebhookConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.password is not None: body['password'] = self.password + if self.password_set is not None: body['password_set'] = self.password_set + if self.url is not None: body['url'] = self.url + if self.url_set is not None: body['url_set'] = self.url_set + if self.username is not None: body['username'] = self.username + if self.username_set is not None: body['username_set'] = self.username_set + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GenericWebhookConfig: + """Deserializes the GenericWebhookConfig from a dictionary.""" + return cls(password=d.get('password', None), + password_set=d.get('password_set', None), + url=d.get('url', None), + url_set=d.get('url_set', None), + username=d.get('username', None), + username_set=d.get('username_set', None)) + + @dataclass class GetIpAccessListResponse: ip_access_list: Optional[IpAccessListInfo] = None @@ -1118,6 +1251,54 @@ def from_dict(cls, d: Dict[str, any]) -> ListNetworkConnectivityConfigurationsRe next_page_token=d.get('next_page_token', None)) +@dataclass +class ListNotificationDestinationsResponse: + next_page_token: Optional[str] = None + """Page token for next of results.""" + + results: Optional[List[ListNotificationDestinationsResult]] = None + + def as_dict(self) -> dict: + """Serializes the ListNotificationDestinationsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = [v.as_dict() for v in self.results] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListNotificationDestinationsResponse: + """Deserializes the ListNotificationDestinationsResponse from a dictionary.""" + return cls(next_page_token=d.get('next_page_token', None), + results=_repeated_dict(d, 'results', ListNotificationDestinationsResult)) + + +@dataclass +class ListNotificationDestinationsResult: + destination_type: Optional[DestinationType] = None + """[Output-only] The type of the notification destination. The type can not be changed once set.""" + + display_name: Optional[str] = None + """The display name for the notification destination.""" + + id: Optional[str] = None + """UUID identifying notification destination.""" + + def as_dict(self) -> dict: + """Serializes the ListNotificationDestinationsResult into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.destination_type is not None: body['destination_type'] = self.destination_type.value + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListNotificationDestinationsResult: + """Deserializes the ListNotificationDestinationsResult from a dictionary.""" + return cls(destination_type=_enum(d, 'destination_type', DestinationType), + display_name=d.get('display_name', None), + id=d.get('id', None)) + + @dataclass class ListPublicTokensResponse: token_infos: Optional[List[PublicTokenInfo]] = None @@ -1164,6 +1345,27 @@ class ListType(Enum): BLOCK = 'BLOCK' +@dataclass +class MicrosoftTeamsConfig: + url: Optional[str] = None + """[Input-Only] URL for Microsoft Teams.""" + + url_set: Optional[bool] = None + """[Output-Only] Whether URL is set.""" + + def as_dict(self) -> dict: + """Serializes the MicrosoftTeamsConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.url is not None: body['url'] = self.url + if self.url_set is not None: body['url_set'] = self.url_set + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> MicrosoftTeamsConfig: + """Deserializes the MicrosoftTeamsConfig from a dictionary.""" + return cls(url=d.get('url', None), url_set=d.get('url_set', None)) + + @dataclass class NccAwsStableIpRule: """The stable AWS IP CIDR blocks. You can use these to configure the firewall of your resources to @@ -1450,6 +1652,61 @@ def from_dict(cls, d: Dict[str, any]) -> NetworkConnectivityConfiguration: updated_time=d.get('updated_time', None)) +@dataclass +class NotificationDestination: + config: Optional[Config] = None + """The configuration for the notification destination. Will be exactly one of the nested configs. + Only returns for users with workspace admin permissions.""" + + destination_type: Optional[DestinationType] = None + """[Output-only] The type of the notification destination. The type can not be changed once set.""" + + display_name: Optional[str] = None + """The display name for the notification destination.""" + + id: Optional[str] = None + """UUID identifying notification destination.""" + + def as_dict(self) -> dict: + """Serializes the NotificationDestination into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.config: body['config'] = self.config.as_dict() + if self.destination_type is not None: body['destination_type'] = self.destination_type.value + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> NotificationDestination: + """Deserializes the NotificationDestination from a dictionary.""" + return cls(config=_from_dict(d, 'config', Config), + destination_type=_enum(d, 'destination_type', DestinationType), + display_name=d.get('display_name', None), + id=d.get('id', None)) + + +@dataclass +class PagerdutyConfig: + integration_key: Optional[str] = None + """[Input-Only] Integration key for PagerDuty.""" + + integration_key_set: Optional[bool] = None + """[Output-Only] Whether integration key is set.""" + + def as_dict(self) -> dict: + """Serializes the PagerdutyConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.integration_key is not None: body['integration_key'] = self.integration_key + if self.integration_key_set is not None: body['integration_key_set'] = self.integration_key_set + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> PagerdutyConfig: + """Deserializes the PagerdutyConfig from a dictionary.""" + return cls(integration_key=d.get('integration_key', None), + integration_key_set=d.get('integration_key_set', None)) + + @dataclass class PartitionId: """Partition by workspace or account""" @@ -1642,7 +1899,6 @@ class RestrictWorkspaceAdminsMessageStatus(Enum): ALLOW_ALL = 'ALLOW_ALL' RESTRICT_TOKENS_AND_JOB_RUN_AS = 'RESTRICT_TOKENS_AND_JOB_RUN_AS' - STATUS_UNSPECIFIED = 'STATUS_UNSPECIFIED' @dataclass @@ -1726,6 +1982,27 @@ def from_dict(cls, d: Dict[str, any]) -> SetStatusResponse: return cls() +@dataclass +class SlackConfig: + url: Optional[str] = None + """[Input-Only] URL for Slack destination.""" + + url_set: Optional[bool] = None + """[Output-Only] Whether URL is set.""" + + def as_dict(self) -> dict: + """Serializes the SlackConfig into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.url is not None: body['url'] = self.url + if self.url_set is not None: body['url_set'] = self.url_set + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> SlackConfig: + """Deserializes the SlackConfig from a dictionary.""" + return cls(url=d.get('url', None), url_set=d.get('url_set', None)) + + @dataclass class StringMessage: value: Optional[str] = None @@ -2189,6 +2466,32 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateIpAccessList: list_type=_enum(d, 'list_type', ListType)) +@dataclass +class UpdateNotificationDestinationRequest: + config: Optional[Config] = None + """The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.""" + + display_name: Optional[str] = None + """The display name for the notification destination.""" + + id: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the UpdateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.config: body['config'] = self.config.as_dict() + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateNotificationDestinationRequest: + """Deserializes the UpdateNotificationDestinationRequest from a dictionary.""" + return cls(config=_from_dict(d, 'config', Config), + display_name=d.get('display_name', None), + id=d.get('id', None)) + + @dataclass class UpdatePersonalComputeSettingRequest: """Details required to update a setting.""" @@ -3402,6 +3705,122 @@ def list_private_endpoint_rules( query['page_token'] = json['next_page_token'] +class NotificationDestinationsAPI: + """The notification destinations API lets you programmatically manage a workspace's notification + destinations. Notification destinations are used to send notifications for query alerts and jobs to + destinations outside of Databricks. Only workspace admins can create, update, and delete notification + destinations.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, + *, + config: Optional[Config] = None, + display_name: Optional[str] = None) -> NotificationDestination: + """Create a notification destination. + + Creates a notification destination. Requires workspace admin permissions. + + :param config: :class:`Config` (optional) + The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. + :param display_name: str (optional) + The display name for the notification destination. + + :returns: :class:`NotificationDestination` + """ + body = {} + if config is not None: body['config'] = config.as_dict() + if display_name is not None: body['display_name'] = display_name + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', '/api/2.0/notification-destinations', body=body, headers=headers) + return NotificationDestination.from_dict(res) + + def delete(self, id: str): + """Delete a notification destination. + + Deletes a notification destination. Requires workspace admin permissions. + + :param id: str + + + """ + + headers = {'Accept': 'application/json', } + + self._api.do('DELETE', f'/api/2.0/notification-destinations/{id}', headers=headers) + + def get(self, id: str) -> NotificationDestination: + """Get a notification destination. + + Gets a notification destination. + + :param id: str + + :returns: :class:`NotificationDestination` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', f'/api/2.0/notification-destinations/{id}', headers=headers) + return NotificationDestination.from_dict(res) + + def list(self, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[ListNotificationDestinationsResult]: + """List notification destinations. + + Lists notification destinations. + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`ListNotificationDestinationsResult` + """ + + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', '/api/2.0/notification-destinations', query=query, headers=headers) + if 'results' in json: + for v in json['results']: + yield ListNotificationDestinationsResult.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def update(self, + id: str, + *, + config: Optional[Config] = None, + display_name: Optional[str] = None) -> NotificationDestination: + """Update a notification destination. + + Updates a notification destination. Requires workspace admin permissions. At least one field is + required in the request body. + + :param id: str + :param config: :class:`Config` (optional) + The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. + :param display_name: str (optional) + The display name for the notification destination. + + :returns: :class:`NotificationDestination` + """ + body = {} + if config is not None: body['config'] = config.as_dict() + if display_name is not None: body['display_name'] = display_name + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PATCH', f'/api/2.0/notification-destinations/{id}', body=body, headers=headers) + return NotificationDestination.from_dict(res) + + class PersonalComputeAPI: """The Personal Compute enablement setting lets you control which users can use the Personal Compute default policy to create compute resources. By default all users in all workspaces have access (ON), but you can diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index d716fad93..fc411ff83 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -788,6 +788,7 @@ class Privilege(Enum): CREATE_VIEW = 'CREATE_VIEW' CREATE_VOLUME = 'CREATE_VOLUME' EXECUTE = 'EXECUTE' + MANAGE = 'MANAGE' MANAGE_ALLOWLIST = 'MANAGE_ALLOWLIST' MODIFY = 'MODIFY' READ_FILES = 'READ_FILES' diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index b363ab7d2..bcb46bb50 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -46,69 +46,206 @@ def from_dict(cls, d: Dict[str, any]) -> AccessControl: @dataclass class Alert: - created_at: Optional[str] = None - """Timestamp when the alert was created.""" + condition: Optional[AlertCondition] = None + """Trigger conditions of the alert.""" - id: Optional[str] = None - """Alert ID.""" + create_time: Optional[str] = None + """The timestamp indicating when the alert was created.""" - last_triggered_at: Optional[str] = None - """Timestamp when the alert was last triggered.""" + custom_body: Optional[str] = None + """Custom body of alert notification, if it exists. See [here] for custom templating instructions. + + [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - name: Optional[str] = None - """Name of the alert.""" + custom_subject: Optional[str] = None + """Custom subject of alert notification, if it exists. This can include email subject entries and + Slack notification headers, for example. See [here] for custom templating instructions. + + [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - options: Optional[AlertOptions] = None - """Alert configuration options.""" + display_name: Optional[str] = None + """The display name of the alert.""" - parent: Optional[str] = None - """The identifier of the workspace folder containing the object.""" + id: Optional[str] = None + """UUID identifying the alert.""" - query: Optional[AlertQuery] = None + lifecycle_state: Optional[LifecycleState] = None + """The workspace state of the alert. Used for tracking trashed status.""" - rearm: Optional[int] = None - """Number of seconds after being triggered before the alert rearms itself and can be triggered - again. If `null`, alert will never be triggered again.""" + owner_user_name: Optional[str] = None + """The owner's username. This field is set to "Unavailable" if the user has been deleted.""" + + parent_path: Optional[str] = None + """The workspace path of the folder containing the alert.""" + + query_id: Optional[str] = None + """UUID of the query attached to the alert.""" + + seconds_to_retrigger: Optional[int] = None + """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it + can be triggered again. If 0 or not specified, the alert will not be triggered again.""" state: Optional[AlertState] = None - """State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated - and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions).""" + """Current state of the alert's trigger status. This field is set to UNKNOWN if the alert has not + yet been evaluated or ran into an error during the last evaluation.""" - updated_at: Optional[str] = None - """Timestamp when the alert was last updated.""" + trigger_time: Optional[str] = None + """Timestamp when the alert was last triggered, if the alert has been triggered before.""" - user: Optional[User] = None + update_time: Optional[str] = None + """The timestamp indicating when the alert was updated.""" def as_dict(self) -> dict: """Serializes the Alert into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at + if self.condition: body['condition'] = self.condition.as_dict() + if self.create_time is not None: body['create_time'] = self.create_time + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name if self.id is not None: body['id'] = self.id - if self.last_triggered_at is not None: body['last_triggered_at'] = self.last_triggered_at - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options.as_dict() - if self.parent is not None: body['parent'] = self.parent - if self.query: body['query'] = self.query.as_dict() - if self.rearm is not None: body['rearm'] = self.rearm + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger if self.state is not None: body['state'] = self.state.value - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.user: body['user'] = self.user.as_dict() + if self.trigger_time is not None: body['trigger_time'] = self.trigger_time + if self.update_time is not None: body['update_time'] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, any]) -> Alert: """Deserializes the Alert from a dictionary.""" - return cls(created_at=d.get('created_at', None), + return cls(condition=_from_dict(d, 'condition', AlertCondition), + create_time=d.get('create_time', None), + custom_body=d.get('custom_body', None), + custom_subject=d.get('custom_subject', None), + display_name=d.get('display_name', None), id=d.get('id', None), - last_triggered_at=d.get('last_triggered_at', None), - name=d.get('name', None), - options=_from_dict(d, 'options', AlertOptions), - parent=d.get('parent', None), - query=_from_dict(d, 'query', AlertQuery), - rearm=d.get('rearm', None), + lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), + owner_user_name=d.get('owner_user_name', None), + parent_path=d.get('parent_path', None), + query_id=d.get('query_id', None), + seconds_to_retrigger=d.get('seconds_to_retrigger', None), state=_enum(d, 'state', AlertState), - updated_at=d.get('updated_at', None), - user=_from_dict(d, 'user', User)) + trigger_time=d.get('trigger_time', None), + update_time=d.get('update_time', None)) + + +@dataclass +class AlertCondition: + empty_result_state: Optional[AlertState] = None + """Alert state if result is empty.""" + + op: Optional[AlertOperator] = None + """Operator used for comparison in alert evaluation.""" + + operand: Optional[AlertConditionOperand] = None + """Name of the column from the query result to use for comparison in alert evaluation.""" + + threshold: Optional[AlertConditionThreshold] = None + """Threshold value used for comparison in alert evaluation.""" + + def as_dict(self) -> dict: + """Serializes the AlertCondition into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state.value + if self.op is not None: body['op'] = self.op.value + if self.operand: body['operand'] = self.operand.as_dict() + if self.threshold: body['threshold'] = self.threshold.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AlertCondition: + """Deserializes the AlertCondition from a dictionary.""" + return cls(empty_result_state=_enum(d, 'empty_result_state', AlertState), + op=_enum(d, 'op', AlertOperator), + operand=_from_dict(d, 'operand', AlertConditionOperand), + threshold=_from_dict(d, 'threshold', AlertConditionThreshold)) + + +@dataclass +class AlertConditionOperand: + column: Optional[AlertOperandColumn] = None + + def as_dict(self) -> dict: + """Serializes the AlertConditionOperand into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.column: body['column'] = self.column.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AlertConditionOperand: + """Deserializes the AlertConditionOperand from a dictionary.""" + return cls(column=_from_dict(d, 'column', AlertOperandColumn)) + + +@dataclass +class AlertConditionThreshold: + value: Optional[AlertOperandValue] = None + + def as_dict(self) -> dict: + """Serializes the AlertConditionThreshold into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.value: body['value'] = self.value.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AlertConditionThreshold: + """Deserializes the AlertConditionThreshold from a dictionary.""" + return cls(value=_from_dict(d, 'value', AlertOperandValue)) + + +@dataclass +class AlertOperandColumn: + name: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the AlertOperandColumn into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.name is not None: body['name'] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AlertOperandColumn: + """Deserializes the AlertOperandColumn from a dictionary.""" + return cls(name=d.get('name', None)) + + +@dataclass +class AlertOperandValue: + bool_value: Optional[bool] = None + + double_value: Optional[float] = None + + string_value: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the AlertOperandValue into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.bool_value is not None: body['bool_value'] = self.bool_value + if self.double_value is not None: body['double_value'] = self.double_value + if self.string_value is not None: body['string_value'] = self.string_value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AlertOperandValue: + """Deserializes the AlertOperandValue from a dictionary.""" + return cls(bool_value=d.get('bool_value', None), + double_value=d.get('double_value', None), + string_value=d.get('string_value', None)) + + +class AlertOperator(Enum): + + EQUAL = 'EQUAL' + GREATER_THAN = 'GREATER_THAN' + GREATER_THAN_OR_EQUAL = 'GREATER_THAN_OR_EQUAL' + IS_NULL = 'IS_NULL' + LESS_THAN = 'LESS_THAN' + LESS_THAN_OR_EQUAL = 'LESS_THAN_OR_EQUAL' + NOT_EQUAL = 'NOT_EQUAL' @dataclass @@ -259,12 +396,10 @@ def from_dict(cls, d: Dict[str, any]) -> AlertQuery: class AlertState(Enum): - """State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated - and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions).""" - OK = 'ok' - TRIGGERED = 'triggered' - UNKNOWN = 'unknown' + OK = 'OK' + TRIGGERED = 'TRIGGERED' + UNKNOWN = 'UNKNOWN' @dataclass @@ -338,10 +473,10 @@ def from_dict(cls, d: Dict[str, any]) -> Channel: @dataclass class ChannelInfo: - """Channel information for the SQL warehouse at the time of query execution""" + """Details about a Channel.""" dbsql_version: Optional[str] = None - """DBSQL Version the channel is mapped to""" + """DB SQL Version the Channel is mapped to.""" name: Optional[ChannelName] = None """Name of the channel""" @@ -360,7 +495,6 @@ def from_dict(cls, d: Dict[str, any]) -> ChannelInfo: class ChannelName(Enum): - """Name of the channel""" CHANNEL_NAME_CURRENT = 'CHANNEL_NAME_CURRENT' CHANNEL_NAME_CUSTOM = 'CHANNEL_NAME_CUSTOM' @@ -369,6 +503,29 @@ class ChannelName(Enum): CHANNEL_NAME_UNSPECIFIED = 'CHANNEL_NAME_UNSPECIFIED' +@dataclass +class ClientCallContext: + """Client code that triggered the request""" + + file_name: Optional[EncodedText] = None + """File name that contains the last line that triggered the request.""" + + line_number: Optional[int] = None + """Last line number within a file or notebook cell that triggered the request.""" + + def as_dict(self) -> dict: + """Serializes the ClientCallContext into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.file_name: body['file_name'] = self.file_name.as_dict() + if self.line_number is not None: body['line_number'] = self.line_number + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ClientCallContext: + """Deserializes the ClientCallContext from a dictionary.""" + return cls(file_name=_from_dict(d, 'file_name', EncodedText), line_number=d.get('line_number', None)) + + @dataclass class ColumnInfo: name: Optional[str] = None @@ -443,6 +600,68 @@ class ColumnInfoTypeName(Enum): USER_DEFINED_TYPE = 'USER_DEFINED_TYPE' +@dataclass +class ContextFilter: + dbsql_alert_id: Optional[str] = None + """Databricks SQL Alert id""" + + dbsql_dashboard_id: Optional[str] = None + """Databricks SQL Dashboard id""" + + dbsql_query_id: Optional[str] = None + """Databricks SQL Query id""" + + dbsql_session_id: Optional[str] = None + """Databricks SQL Query session id""" + + job_id: Optional[str] = None + """Databricks Workflows id""" + + job_run_id: Optional[str] = None + """Databricks Workflows task run id""" + + lakeview_dashboard_id: Optional[str] = None + """Databricks Lakeview Dashboard id""" + + notebook_cell_run_id: Optional[str] = None + """Databricks Notebook runnableCommandId""" + + notebook_id: Optional[str] = None + """Databricks Notebook id""" + + statement_ids: Optional[List[str]] = None + """Databricks Query History statement ids.""" + + def as_dict(self) -> dict: + """Serializes the ContextFilter into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.dbsql_alert_id is not None: body['dbsql_alert_id'] = self.dbsql_alert_id + if self.dbsql_dashboard_id is not None: body['dbsql_dashboard_id'] = self.dbsql_dashboard_id + if self.dbsql_query_id is not None: body['dbsql_query_id'] = self.dbsql_query_id + if self.dbsql_session_id is not None: body['dbsql_session_id'] = self.dbsql_session_id + if self.job_id is not None: body['job_id'] = self.job_id + if self.job_run_id is not None: body['job_run_id'] = self.job_run_id + if self.lakeview_dashboard_id is not None: body['lakeview_dashboard_id'] = self.lakeview_dashboard_id + if self.notebook_cell_run_id is not None: body['notebook_cell_run_id'] = self.notebook_cell_run_id + if self.notebook_id is not None: body['notebook_id'] = self.notebook_id + if self.statement_ids: body['statement_ids'] = [v for v in self.statement_ids] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ContextFilter: + """Deserializes the ContextFilter from a dictionary.""" + return cls(dbsql_alert_id=d.get('dbsql_alert_id', None), + dbsql_dashboard_id=d.get('dbsql_dashboard_id', None), + dbsql_query_id=d.get('dbsql_query_id', None), + dbsql_session_id=d.get('dbsql_session_id', None), + job_id=d.get('job_id', None), + job_run_id=d.get('job_run_id', None), + lakeview_dashboard_id=d.get('lakeview_dashboard_id', None), + notebook_cell_run_id=d.get('notebook_cell_run_id', None), + notebook_id=d.get('notebook_id', None), + statement_ids=d.get('statement_ids', None)) + + @dataclass class CreateAlert: name: str @@ -482,98 +701,304 @@ def from_dict(cls, d: Dict[str, any]) -> CreateAlert: @dataclass -class CreateWarehouseRequest: - auto_stop_mins: Optional[int] = None - """The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) - before it is automatically stopped. - - Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. - - Defaults to 120 mins""" +class CreateAlertRequest: + alert: Optional[CreateAlertRequestAlert] = None - channel: Optional[Channel] = None - """Channel Details""" + def as_dict(self) -> dict: + """Serializes the CreateAlertRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.alert: body['alert'] = self.alert.as_dict() + return body - cluster_size: Optional[str] = None - """Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows - you to run larger queries on it. If you want to increase the number of concurrent queries, - please tune max_num_clusters. - - Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large - - 4X-Large""" + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateAlertRequest: + """Deserializes the CreateAlertRequest from a dictionary.""" + return cls(alert=_from_dict(d, 'alert', CreateAlertRequestAlert)) - creator_name: Optional[str] = None - """warehouse creator name""" - enable_photon: Optional[bool] = None - """Configures whether the warehouse should use Photon optimized clusters. +@dataclass +class CreateAlertRequestAlert: + condition: Optional[AlertCondition] = None + """Trigger conditions of the alert.""" + + custom_body: Optional[str] = None + """Custom body of alert notification, if it exists. See [here] for custom templating instructions. - Defaults to false.""" + [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - enable_serverless_compute: Optional[bool] = None - """Configures whether the warehouse should use serverless compute""" + custom_subject: Optional[str] = None + """Custom subject of alert notification, if it exists. This can include email subject entries and + Slack notification headers, for example. See [here] for custom templating instructions. + + [here]: https://docs.databricks.com/sql/user/alerts/index.html""" - instance_profile_arn: Optional[str] = None - """Deprecated. Instance profile used to pass IAM role to the cluster""" + display_name: Optional[str] = None + """The display name of the alert.""" - max_num_clusters: Optional[int] = None - """Maximum number of clusters that the autoscaler will create to handle concurrent queries. - - Supported values: - Must be >= min_num_clusters - Must be <= 30. - - Defaults to min_clusters if unset.""" + parent_path: Optional[str] = None + """The workspace path of the folder containing the alert.""" - min_num_clusters: Optional[int] = None - """Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing - this will ensure that a larger number of clusters are always running and therefore may reduce - the cold start time for new queries. This is similar to reserved vs. revocable cores in a - resource manager. - - Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) - - Defaults to 1""" + query_id: Optional[str] = None + """UUID of the query attached to the alert.""" - name: Optional[str] = None - """Logical name for the cluster. - - Supported values: - Must be unique within an org. - Must be less than 100 characters.""" + seconds_to_retrigger: Optional[int] = None + """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it + can be triggered again. If 0 or not specified, the alert will not be triggered again.""" - spot_instance_policy: Optional[SpotInstancePolicy] = None - """Configurations whether the warehouse should use spot instances.""" + def as_dict(self) -> dict: + """Serializes the CreateAlertRequestAlert into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.condition: body['condition'] = self.condition.as_dict() + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger + return body - tags: Optional[EndpointTags] = None - """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS - volumes) associated with this SQL warehouse. - - Supported values: - Number of tags < 45.""" + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateAlertRequestAlert: + """Deserializes the CreateAlertRequestAlert from a dictionary.""" + return cls(condition=_from_dict(d, 'condition', AlertCondition), + custom_body=d.get('custom_body', None), + custom_subject=d.get('custom_subject', None), + display_name=d.get('display_name', None), + parent_path=d.get('parent_path', None), + query_id=d.get('query_id', None), + seconds_to_retrigger=d.get('seconds_to_retrigger', None)) - warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None - """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` - and also set the field `enable_serverless_compute` to `true`.""" + +@dataclass +class CreateQueryRequest: + query: Optional[CreateQueryRequestQuery] = None def as_dict(self) -> dict: - """Serializes the CreateWarehouseRequest into a dictionary suitable for use as a JSON request body.""" + """Serializes the CreateQueryRequest into a dictionary suitable for use as a JSON request body.""" body = {} - if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins - if self.channel: body['channel'] = self.channel.as_dict() - if self.cluster_size is not None: body['cluster_size'] = self.cluster_size - if self.creator_name is not None: body['creator_name'] = self.creator_name - if self.enable_photon is not None: body['enable_photon'] = self.enable_photon - if self.enable_serverless_compute is not None: - body['enable_serverless_compute'] = self.enable_serverless_compute - if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn - if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters - if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters - if self.name is not None: body['name'] = self.name - if self.spot_instance_policy is not None: - body['spot_instance_policy'] = self.spot_instance_policy.value - if self.tags: body['tags'] = self.tags.as_dict() - if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value + if self.query: body['query'] = self.query.as_dict() return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> CreateWarehouseRequest: - """Deserializes the CreateWarehouseRequest from a dictionary.""" + def from_dict(cls, d: Dict[str, any]) -> CreateQueryRequest: + """Deserializes the CreateQueryRequest from a dictionary.""" + return cls(query=_from_dict(d, 'query', CreateQueryRequestQuery)) + + +@dataclass +class CreateQueryRequestQuery: + apply_auto_limit: Optional[bool] = None + """Whether to apply a 1000 row limit to the query result.""" + + catalog: Optional[str] = None + """Name of the catalog where this query will be executed.""" + + description: Optional[str] = None + """General description that conveys additional information about this query such as usage notes.""" + + display_name: Optional[str] = None + """Display name of the query that appears in list views, widget headings, and on the query page.""" + + parameters: Optional[List[QueryParameter]] = None + """List of query parameter definitions.""" + + parent_path: Optional[str] = None + """Workspace path of the workspace folder containing the object.""" + + query_text: Optional[str] = None + """Text of the query to be run.""" + + run_as_mode: Optional[RunAsMode] = None + """Sets the "Run as" role for the object.""" + + schema: Optional[str] = None + """Name of the schema where this query will be executed.""" + + tags: Optional[List[str]] = None + + warehouse_id: Optional[str] = None + """ID of the SQL warehouse attached to the query.""" + + def as_dict(self) -> dict: + """Serializes the CreateQueryRequestQuery into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value + if self.schema is not None: body['schema'] = self.schema + if self.tags: body['tags'] = [v for v in self.tags] + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateQueryRequestQuery: + """Deserializes the CreateQueryRequestQuery from a dictionary.""" + return cls(apply_auto_limit=d.get('apply_auto_limit', None), + catalog=d.get('catalog', None), + description=d.get('description', None), + display_name=d.get('display_name', None), + parameters=_repeated_dict(d, 'parameters', QueryParameter), + parent_path=d.get('parent_path', None), + query_text=d.get('query_text', None), + run_as_mode=_enum(d, 'run_as_mode', RunAsMode), + schema=d.get('schema', None), + tags=d.get('tags', None), + warehouse_id=d.get('warehouse_id', None)) + + +@dataclass +class CreateVisualizationRequest: + visualization: Optional[CreateVisualizationRequestVisualization] = None + + def as_dict(self) -> dict: + """Serializes the CreateVisualizationRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.visualization: body['visualization'] = self.visualization.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateVisualizationRequest: + """Deserializes the CreateVisualizationRequest from a dictionary.""" + return cls(visualization=_from_dict(d, 'visualization', CreateVisualizationRequestVisualization)) + + +@dataclass +class CreateVisualizationRequestVisualization: + display_name: Optional[str] = None + """The display name of the visualization.""" + + query_id: Optional[str] = None + """UUID of the query that the visualization is attached to.""" + + serialized_options: Optional[str] = None + """The visualization options varies widely from one visualization type to the next and is + unsupported. Databricks does not recommend modifying visualization options directly.""" + + serialized_query_plan: Optional[str] = None + """The visualization query plan varies widely from one visualization type to the next and is + unsupported. Databricks does not recommend modifying the visualization query plan directly.""" + + type: Optional[str] = None + """The type of visualization: counter, table, funnel, and so on.""" + + def as_dict(self) -> dict: + """Serializes the CreateVisualizationRequestVisualization into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.display_name is not None: body['display_name'] = self.display_name + if self.query_id is not None: body['query_id'] = self.query_id + if self.serialized_options is not None: body['serialized_options'] = self.serialized_options + if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan + if self.type is not None: body['type'] = self.type + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateVisualizationRequestVisualization: + """Deserializes the CreateVisualizationRequestVisualization from a dictionary.""" + return cls(display_name=d.get('display_name', None), + query_id=d.get('query_id', None), + serialized_options=d.get('serialized_options', None), + serialized_query_plan=d.get('serialized_query_plan', None), + type=d.get('type', None)) + + +@dataclass +class CreateWarehouseRequest: + auto_stop_mins: Optional[int] = None + """The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) + before it is automatically stopped. + + Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop. + + Defaults to 120 mins""" + + channel: Optional[Channel] = None + """Channel Details""" + + cluster_size: Optional[str] = None + """Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows + you to run larger queries on it. If you want to increase the number of concurrent queries, + please tune max_num_clusters. + + Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large + - 4X-Large""" + + creator_name: Optional[str] = None + """warehouse creator name""" + + enable_photon: Optional[bool] = None + """Configures whether the warehouse should use Photon optimized clusters. + + Defaults to false.""" + + enable_serverless_compute: Optional[bool] = None + """Configures whether the warehouse should use serverless compute""" + + instance_profile_arn: Optional[str] = None + """Deprecated. Instance profile used to pass IAM role to the cluster""" + + max_num_clusters: Optional[int] = None + """Maximum number of clusters that the autoscaler will create to handle concurrent queries. + + Supported values: - Must be >= min_num_clusters - Must be <= 30. + + Defaults to min_clusters if unset.""" + + min_num_clusters: Optional[int] = None + """Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing + this will ensure that a larger number of clusters are always running and therefore may reduce + the cold start time for new queries. This is similar to reserved vs. revocable cores in a + resource manager. + + Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30) + + Defaults to 1""" + + name: Optional[str] = None + """Logical name for the cluster. + + Supported values: - Must be unique within an org. - Must be less than 100 characters.""" + + spot_instance_policy: Optional[SpotInstancePolicy] = None + """Configurations whether the warehouse should use spot instances.""" + + tags: Optional[EndpointTags] = None + """A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS + volumes) associated with this SQL warehouse. + + Supported values: - Number of tags < 45.""" + + warehouse_type: Optional[CreateWarehouseRequestWarehouseType] = None + """Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` + and also set the field `enable_serverless_compute` to `true`.""" + + def as_dict(self) -> dict: + """Serializes the CreateWarehouseRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins + if self.channel: body['channel'] = self.channel.as_dict() + if self.cluster_size is not None: body['cluster_size'] = self.cluster_size + if self.creator_name is not None: body['creator_name'] = self.creator_name + if self.enable_photon is not None: body['enable_photon'] = self.enable_photon + if self.enable_serverless_compute is not None: + body['enable_serverless_compute'] = self.enable_serverless_compute + if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters + if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters + if self.name is not None: body['name'] = self.name + if self.spot_instance_policy is not None: + body['spot_instance_policy'] = self.spot_instance_policy.value + if self.tags: body['tags'] = self.tags.as_dict() + if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateWarehouseRequest: + """Deserializes the CreateWarehouseRequest from a dictionary.""" return cls(auto_stop_mins=d.get('auto_stop_mins', None), channel=_from_dict(d, 'channel', Channel), cluster_size=d.get('cluster_size', None), @@ -914,6 +1339,121 @@ def from_dict(cls, d: Dict[str, any]) -> DataSource: warehouse_id=d.get('warehouse_id', None)) +class DatePrecision(Enum): + + DAY_PRECISION = 'DAY_PRECISION' + MINUTE_PRECISION = 'MINUTE_PRECISION' + SECOND_PRECISION = 'SECOND_PRECISION' + + +@dataclass +class DateRange: + start: str + + end: str + + def as_dict(self) -> dict: + """Serializes the DateRange into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.end is not None: body['end'] = self.end + if self.start is not None: body['start'] = self.start + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> DateRange: + """Deserializes the DateRange from a dictionary.""" + return cls(end=d.get('end', None), start=d.get('start', None)) + + +@dataclass +class DateRangeValue: + date_range_value: Optional[DateRange] = None + """Manually specified date-time range value.""" + + dynamic_date_range_value: Optional[DateRangeValueDynamicDateRange] = None + """Dynamic date-time range value based on current date-time.""" + + precision: Optional[DatePrecision] = None + """Date-time precision to format the value into when the query is run. Defaults to DAY_PRECISION + (YYYY-MM-DD).""" + + start_day_of_week: Optional[int] = None + + def as_dict(self) -> dict: + """Serializes the DateRangeValue into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.date_range_value: body['date_range_value'] = self.date_range_value.as_dict() + if self.dynamic_date_range_value is not None: + body['dynamic_date_range_value'] = self.dynamic_date_range_value.value + if self.precision is not None: body['precision'] = self.precision.value + if self.start_day_of_week is not None: body['start_day_of_week'] = self.start_day_of_week + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> DateRangeValue: + """Deserializes the DateRangeValue from a dictionary.""" + return cls(date_range_value=_from_dict(d, 'date_range_value', DateRange), + dynamic_date_range_value=_enum(d, 'dynamic_date_range_value', + DateRangeValueDynamicDateRange), + precision=_enum(d, 'precision', DatePrecision), + start_day_of_week=d.get('start_day_of_week', None)) + + +class DateRangeValueDynamicDateRange(Enum): + + LAST_12_MONTHS = 'LAST_12_MONTHS' + LAST_14_DAYS = 'LAST_14_DAYS' + LAST_24_HOURS = 'LAST_24_HOURS' + LAST_30_DAYS = 'LAST_30_DAYS' + LAST_60_DAYS = 'LAST_60_DAYS' + LAST_7_DAYS = 'LAST_7_DAYS' + LAST_8_HOURS = 'LAST_8_HOURS' + LAST_90_DAYS = 'LAST_90_DAYS' + LAST_HOUR = 'LAST_HOUR' + LAST_MONTH = 'LAST_MONTH' + LAST_WEEK = 'LAST_WEEK' + LAST_YEAR = 'LAST_YEAR' + THIS_MONTH = 'THIS_MONTH' + THIS_WEEK = 'THIS_WEEK' + THIS_YEAR = 'THIS_YEAR' + TODAY = 'TODAY' + YESTERDAY = 'YESTERDAY' + + +@dataclass +class DateValue: + date_value: Optional[str] = None + """Manually specified date-time value.""" + + dynamic_date_value: Optional[DateValueDynamicDate] = None + """Dynamic date-time value based on current date-time.""" + + precision: Optional[DatePrecision] = None + """Date-time precision to format the value into when the query is run. Defaults to DAY_PRECISION + (YYYY-MM-DD).""" + + def as_dict(self) -> dict: + """Serializes the DateValue into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.date_value is not None: body['date_value'] = self.date_value + if self.dynamic_date_value is not None: body['dynamic_date_value'] = self.dynamic_date_value.value + if self.precision is not None: body['precision'] = self.precision.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> DateValue: + """Deserializes the DateValue from a dictionary.""" + return cls(date_value=d.get('date_value', None), + dynamic_date_value=_enum(d, 'dynamic_date_value', DateValueDynamicDate), + precision=_enum(d, 'precision', DatePrecision)) + + +class DateValueDynamicDate(Enum): + + NOW = 'NOW' + YESTERDAY = 'YESTERDAY' + + @dataclass class DeleteResponse: @@ -1141,6 +1681,50 @@ def from_dict(cls, d: Dict[str, any]) -> EditWarehouseResponse: return cls() +@dataclass +class Empty: + """Represents an empty message, similar to google.protobuf.Empty, which is not available in the + firm right now.""" + + def as_dict(self) -> dict: + """Serializes the Empty into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> Empty: + """Deserializes the Empty from a dictionary.""" + return cls() + + +@dataclass +class EncodedText: + encoding: Optional[EncodedTextEncoding] = None + """Carry text data in different form.""" + + text: Optional[str] = None + """text data""" + + def as_dict(self) -> dict: + """Serializes the EncodedText into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.encoding is not None: body['encoding'] = self.encoding.value + if self.text is not None: body['text'] = self.text + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EncodedText: + """Deserializes the EncodedText from a dictionary.""" + return cls(encoding=_enum(d, 'encoding', EncodedTextEncoding), text=d.get('text', None)) + + +class EncodedTextEncoding(Enum): + """Carry text data in different form.""" + + BASE64 = 'BASE64' + PLAIN = 'PLAIN' + + @dataclass class EndpointConfPair: key: Optional[str] = None @@ -1385,6 +1969,33 @@ def from_dict(cls, d: Dict[str, any]) -> EndpointTags: return cls(custom_tags=_repeated_dict(d, 'custom_tags', EndpointTagPair)) +@dataclass +class EnumValue: + enum_options: Optional[str] = None + """List of valid query parameter values, newline delimited.""" + + multi_values_options: Optional[MultiValuesOptions] = None + """If specified, allows multiple values to be selected for this parameter.""" + + values: Optional[List[str]] = None + """List of selected query parameter values.""" + + def as_dict(self) -> dict: + """Serializes the EnumValue into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.enum_options is not None: body['enum_options'] = self.enum_options + if self.multi_values_options: body['multi_values_options'] = self.multi_values_options.as_dict() + if self.values: body['values'] = [v for v in self.values] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EnumValue: + """Deserializes the EnumValue from a dictionary.""" + return cls(enum_options=d.get('enum_options', None), + multi_values_options=_from_dict(d, 'multi_values_options', MultiValuesOptions), + values=d.get('values', None)) + + @dataclass class ExecuteStatementRequest: statement: str @@ -1567,47 +2178,10 @@ class ExecuteStatementRequestOnWaitTimeout(Enum): @dataclass -class ExecuteStatementResponse: - manifest: Optional[ResultManifest] = None - """The result manifest provides schema and metadata for the result set.""" - - result: Optional[ResultData] = None - """Contains the result data of a single chunk when using `INLINE` disposition. When using - `EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide presigned - URLs to the result data in cloud storage. Exactly one of these alternatives is used. (While the - `external_links` array prepares the API to return multiple links in a single response. Currently - only a single link is returned.)""" - - statement_id: Optional[str] = None - """The statement ID is returned upon successfully submitting a SQL statement, and is a required - reference for all subsequent calls.""" - - status: Optional[StatementStatus] = None - """The status response includes execution state and if relevant, error information.""" - - def as_dict(self) -> dict: - """Serializes the ExecuteStatementResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.manifest: body['manifest'] = self.manifest.as_dict() - if self.result: body['result'] = self.result.as_dict() - if self.statement_id is not None: body['statement_id'] = self.statement_id - if self.status: body['status'] = self.status.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> ExecuteStatementResponse: - """Deserializes the ExecuteStatementResponse from a dictionary.""" - return cls(manifest=_from_dict(d, 'manifest', ResultManifest), - result=_from_dict(d, 'result', ResultData), - statement_id=d.get('statement_id', None), - status=_from_dict(d, 'status', StatementStatus)) - - -@dataclass -class ExternalLink: - byte_count: Optional[int] = None - """The number of bytes in the result chunk. This field is not available when using `INLINE` - disposition.""" +class ExternalLink: + byte_count: Optional[int] = None + """The number of bytes in the result chunk. This field is not available when using `INLINE` + disposition.""" chunk_index: Optional[int] = None """The position within the sequence of result set chunks.""" @@ -1706,43 +2280,6 @@ def from_dict(cls, d: Dict[str, any]) -> GetResponse: object_type=_enum(d, 'object_type', ObjectType)) -@dataclass -class GetStatementResponse: - manifest: Optional[ResultManifest] = None - """The result manifest provides schema and metadata for the result set.""" - - result: Optional[ResultData] = None - """Contains the result data of a single chunk when using `INLINE` disposition. When using - `EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide presigned - URLs to the result data in cloud storage. Exactly one of these alternatives is used. (While the - `external_links` array prepares the API to return multiple links in a single response. Currently - only a single link is returned.)""" - - statement_id: Optional[str] = None - """The statement ID is returned upon successfully submitting a SQL statement, and is a required - reference for all subsequent calls.""" - - status: Optional[StatementStatus] = None - """The status response includes execution state and if relevant, error information.""" - - def as_dict(self) -> dict: - """Serializes the GetStatementResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.manifest: body['manifest'] = self.manifest.as_dict() - if self.result: body['result'] = self.result.as_dict() - if self.statement_id is not None: body['statement_id'] = self.statement_id - if self.status: body['status'] = self.status.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> GetStatementResponse: - """Deserializes the GetStatementResponse from a dictionary.""" - return cls(manifest=_from_dict(d, 'manifest', ResultManifest), - result=_from_dict(d, 'result', ResultData), - statement_id=d.get('statement_id', None), - status=_from_dict(d, 'status', StatementStatus)) - - @dataclass class GetWarehousePermissionLevelsResponse: permission_levels: Optional[List[WarehousePermissionsDescription]] = None @@ -1988,6 +2525,386 @@ class GetWorkspaceWarehouseConfigResponseSecurityPolicy(Enum): PASSTHROUGH = 'PASSTHROUGH' +@dataclass +class LegacyAlert: + created_at: Optional[str] = None + """Timestamp when the alert was created.""" + + id: Optional[str] = None + """Alert ID.""" + + last_triggered_at: Optional[str] = None + """Timestamp when the alert was last triggered.""" + + name: Optional[str] = None + """Name of the alert.""" + + options: Optional[AlertOptions] = None + """Alert configuration options.""" + + parent: Optional[str] = None + """The identifier of the workspace folder containing the object.""" + + query: Optional[AlertQuery] = None + + rearm: Optional[int] = None + """Number of seconds after being triggered before the alert rearms itself and can be triggered + again. If `null`, alert will never be triggered again.""" + + state: Optional[LegacyAlertState] = None + """State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated + and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions).""" + + updated_at: Optional[str] = None + """Timestamp when the alert was last updated.""" + + user: Optional[User] = None + + def as_dict(self) -> dict: + """Serializes the LegacyAlert into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.created_at is not None: body['created_at'] = self.created_at + if self.id is not None: body['id'] = self.id + if self.last_triggered_at is not None: body['last_triggered_at'] = self.last_triggered_at + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options.as_dict() + if self.parent is not None: body['parent'] = self.parent + if self.query: body['query'] = self.query.as_dict() + if self.rearm is not None: body['rearm'] = self.rearm + if self.state is not None: body['state'] = self.state.value + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.user: body['user'] = self.user.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> LegacyAlert: + """Deserializes the LegacyAlert from a dictionary.""" + return cls(created_at=d.get('created_at', None), + id=d.get('id', None), + last_triggered_at=d.get('last_triggered_at', None), + name=d.get('name', None), + options=_from_dict(d, 'options', AlertOptions), + parent=d.get('parent', None), + query=_from_dict(d, 'query', AlertQuery), + rearm=d.get('rearm', None), + state=_enum(d, 'state', LegacyAlertState), + updated_at=d.get('updated_at', None), + user=_from_dict(d, 'user', User)) + + +class LegacyAlertState(Enum): + """State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated + and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions).""" + + OK = 'ok' + TRIGGERED = 'triggered' + UNKNOWN = 'unknown' + + +@dataclass +class LegacyQuery: + can_edit: Optional[bool] = None + """Describes whether the authenticated user is allowed to edit the definition of this query.""" + + created_at: Optional[str] = None + """The timestamp when this query was created.""" + + data_source_id: Optional[str] = None + """Data source ID maps to the ID of the data source used by the resource and is distinct from the + warehouse ID. [Learn more] + + [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" + + description: Optional[str] = None + """General description that conveys additional information about this query such as usage notes.""" + + id: Optional[str] = None + """Query ID.""" + + is_archived: Optional[bool] = None + """Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear + in search results. If this boolean is `true`, the `options` property for this query includes a + `moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days.""" + + is_draft: Optional[bool] = None + """Whether the query is a draft. Draft queries only appear in list views for their owners. + Visualizations from draft queries cannot appear on dashboards.""" + + is_favorite: Optional[bool] = None + """Whether this query object appears in the current user's favorites list. This flag determines + whether the star icon for favorites is selected.""" + + is_safe: Optional[bool] = None + """Text parameter types are not safe from SQL injection for all types of data source. Set this + Boolean parameter to `true` if a query either does not use any text type parameters or uses a + data source type where text type parameters are handled safely.""" + + last_modified_by: Optional[User] = None + + last_modified_by_id: Optional[int] = None + """The ID of the user who last saved changes to this query.""" + + latest_query_data_id: Optional[str] = None + """If there is a cached result for this query and user, this field includes the query result ID. If + this query uses parameters, this field is always null.""" + + name: Optional[str] = None + """The title of this query that appears in list views, widget headings, and on the query page.""" + + options: Optional[QueryOptions] = None + + parent: Optional[str] = None + """The identifier of the workspace folder containing the object.""" + + permission_tier: Optional[PermissionLevel] = None + """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query + * `CAN_MANAGE`: Can manage the query""" + + query: Optional[str] = None + """The text of the query to be run.""" + + query_hash: Optional[str] = None + """A SHA-256 hash of the query text along with the authenticated user ID.""" + + run_as_role: Optional[RunAsRole] = None + """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as + viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" + + tags: Optional[List[str]] = None + + updated_at: Optional[str] = None + """The timestamp at which this query was last updated.""" + + user: Optional[User] = None + + user_id: Optional[int] = None + """The ID of the user who owns the query.""" + + visualizations: Optional[List[LegacyVisualization]] = None + + def as_dict(self) -> dict: + """Serializes the LegacyQuery into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.can_edit is not None: body['can_edit'] = self.can_edit + if self.created_at is not None: body['created_at'] = self.created_at + if self.data_source_id is not None: body['data_source_id'] = self.data_source_id + if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id + if self.is_archived is not None: body['is_archived'] = self.is_archived + if self.is_draft is not None: body['is_draft'] = self.is_draft + if self.is_favorite is not None: body['is_favorite'] = self.is_favorite + if self.is_safe is not None: body['is_safe'] = self.is_safe + if self.last_modified_by: body['last_modified_by'] = self.last_modified_by.as_dict() + if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id + if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options.as_dict() + if self.parent is not None: body['parent'] = self.parent + if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value + if self.query is not None: body['query'] = self.query + if self.query_hash is not None: body['query_hash'] = self.query_hash + if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value + if self.tags: body['tags'] = [v for v in self.tags] + if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.user: body['user'] = self.user.as_dict() + if self.user_id is not None: body['user_id'] = self.user_id + if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> LegacyQuery: + """Deserializes the LegacyQuery from a dictionary.""" + return cls(can_edit=d.get('can_edit', None), + created_at=d.get('created_at', None), + data_source_id=d.get('data_source_id', None), + description=d.get('description', None), + id=d.get('id', None), + is_archived=d.get('is_archived', None), + is_draft=d.get('is_draft', None), + is_favorite=d.get('is_favorite', None), + is_safe=d.get('is_safe', None), + last_modified_by=_from_dict(d, 'last_modified_by', User), + last_modified_by_id=d.get('last_modified_by_id', None), + latest_query_data_id=d.get('latest_query_data_id', None), + name=d.get('name', None), + options=_from_dict(d, 'options', QueryOptions), + parent=d.get('parent', None), + permission_tier=_enum(d, 'permission_tier', PermissionLevel), + query=d.get('query', None), + query_hash=d.get('query_hash', None), + run_as_role=_enum(d, 'run_as_role', RunAsRole), + tags=d.get('tags', None), + updated_at=d.get('updated_at', None), + user=_from_dict(d, 'user', User), + user_id=d.get('user_id', None), + visualizations=_repeated_dict(d, 'visualizations', LegacyVisualization)) + + +@dataclass +class LegacyVisualization: + """The visualization description API changes frequently and is unsupported. You can duplicate a + visualization by copying description objects received _from the API_ and then using them to + create a new one with a POST request to the same endpoint. Databricks does not recommend + constructing ad-hoc visualizations entirely in JSON.""" + + created_at: Optional[str] = None + + description: Optional[str] = None + """A short description of this visualization. This is not displayed in the UI.""" + + id: Optional[str] = None + """The UUID for this visualization.""" + + name: Optional[str] = None + """The name of the visualization that appears on dashboards and the query screen.""" + + options: Optional[Any] = None + """The options object varies widely from one visualization type to the next and is unsupported. + Databricks does not recommend modifying visualization settings in JSON.""" + + query: Optional[LegacyQuery] = None + + type: Optional[str] = None + """The type of visualization: chart, table, pivot table, and so on.""" + + updated_at: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the LegacyVisualization into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.created_at is not None: body['created_at'] = self.created_at + if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id + if self.name is not None: body['name'] = self.name + if self.options: body['options'] = self.options + if self.query: body['query'] = self.query.as_dict() + if self.type is not None: body['type'] = self.type + if self.updated_at is not None: body['updated_at'] = self.updated_at + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> LegacyVisualization: + """Deserializes the LegacyVisualization from a dictionary.""" + return cls(created_at=d.get('created_at', None), + description=d.get('description', None), + id=d.get('id', None), + name=d.get('name', None), + options=d.get('options', None), + query=_from_dict(d, 'query', LegacyQuery), + type=d.get('type', None), + updated_at=d.get('updated_at', None)) + + +class LifecycleState(Enum): + + ACTIVE = 'ACTIVE' + TRASHED = 'TRASHED' + + +@dataclass +class ListAlertsResponse: + next_page_token: Optional[str] = None + + results: Optional[List[ListAlertsResponseAlert]] = None + + def as_dict(self) -> dict: + """Serializes the ListAlertsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = [v.as_dict() for v in self.results] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListAlertsResponse: + """Deserializes the ListAlertsResponse from a dictionary.""" + return cls(next_page_token=d.get('next_page_token', None), + results=_repeated_dict(d, 'results', ListAlertsResponseAlert)) + + +@dataclass +class ListAlertsResponseAlert: + condition: Optional[AlertCondition] = None + """Trigger conditions of the alert.""" + + create_time: Optional[str] = None + """The timestamp indicating when the alert was created.""" + + custom_body: Optional[str] = None + """Custom body of alert notification, if it exists. See [here] for custom templating instructions. + + [here]: https://docs.databricks.com/sql/user/alerts/index.html""" + + custom_subject: Optional[str] = None + """Custom subject of alert notification, if it exists. This can include email subject entries and + Slack notification headers, for example. See [here] for custom templating instructions. + + [here]: https://docs.databricks.com/sql/user/alerts/index.html""" + + display_name: Optional[str] = None + """The display name of the alert.""" + + id: Optional[str] = None + """UUID identifying the alert.""" + + lifecycle_state: Optional[LifecycleState] = None + """The workspace state of the alert. Used for tracking trashed status.""" + + owner_user_name: Optional[str] = None + """The owner's username. This field is set to "Unavailable" if the user has been deleted.""" + + query_id: Optional[str] = None + """UUID of the query attached to the alert.""" + + seconds_to_retrigger: Optional[int] = None + """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it + can be triggered again. If 0 or not specified, the alert will not be triggered again.""" + + state: Optional[AlertState] = None + """Current state of the alert's trigger status. This field is set to UNKNOWN if the alert has not + yet been evaluated or ran into an error during the last evaluation.""" + + trigger_time: Optional[str] = None + """Timestamp when the alert was last triggered, if the alert has been triggered before.""" + + update_time: Optional[str] = None + """The timestamp indicating when the alert was updated.""" + + def as_dict(self) -> dict: + """Serializes the ListAlertsResponseAlert into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.condition: body['condition'] = self.condition.as_dict() + if self.create_time is not None: body['create_time'] = self.create_time + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger + if self.state is not None: body['state'] = self.state.value + if self.trigger_time is not None: body['trigger_time'] = self.trigger_time + if self.update_time is not None: body['update_time'] = self.update_time + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListAlertsResponseAlert: + """Deserializes the ListAlertsResponseAlert from a dictionary.""" + return cls(condition=_from_dict(d, 'condition', AlertCondition), + create_time=d.get('create_time', None), + custom_body=d.get('custom_body', None), + custom_subject=d.get('custom_subject', None), + display_name=d.get('display_name', None), + id=d.get('id', None), + lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), + owner_user_name=d.get('owner_user_name', None), + query_id=d.get('query_id', None), + seconds_to_retrigger=d.get('seconds_to_retrigger', None), + state=_enum(d, 'state', AlertState), + trigger_time=d.get('trigger_time', None), + update_time=d.get('update_time', None)) + + class ListOrder(Enum): CREATED_AT = 'created_at' @@ -2020,6 +2937,118 @@ def from_dict(cls, d: Dict[str, any]) -> ListQueriesResponse: res=_repeated_dict(d, 'res', QueryInfo)) +@dataclass +class ListQueryObjectsResponse: + next_page_token: Optional[str] = None + + results: Optional[List[ListQueryObjectsResponseQuery]] = None + + def as_dict(self) -> dict: + """Serializes the ListQueryObjectsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = [v.as_dict() for v in self.results] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListQueryObjectsResponse: + """Deserializes the ListQueryObjectsResponse from a dictionary.""" + return cls(next_page_token=d.get('next_page_token', None), + results=_repeated_dict(d, 'results', ListQueryObjectsResponseQuery)) + + +@dataclass +class ListQueryObjectsResponseQuery: + apply_auto_limit: Optional[bool] = None + """Whether to apply a 1000 row limit to the query result.""" + + catalog: Optional[str] = None + """Name of the catalog where this query will be executed.""" + + create_time: Optional[str] = None + """Timestamp when this query was created.""" + + description: Optional[str] = None + """General description that conveys additional information about this query such as usage notes.""" + + display_name: Optional[str] = None + """Display name of the query that appears in list views, widget headings, and on the query page.""" + + id: Optional[str] = None + """UUID identifying the query.""" + + last_modifier_user_name: Optional[str] = None + """Username of the user who last saved changes to this query.""" + + lifecycle_state: Optional[LifecycleState] = None + """Indicates whether the query is trashed.""" + + owner_user_name: Optional[str] = None + """Username of the user that owns the query.""" + + parameters: Optional[List[QueryParameter]] = None + """List of query parameter definitions.""" + + query_text: Optional[str] = None + """Text of the query to be run.""" + + run_as_mode: Optional[RunAsMode] = None + """Sets the "Run as" role for the object.""" + + schema: Optional[str] = None + """Name of the schema where this query will be executed.""" + + tags: Optional[List[str]] = None + + update_time: Optional[str] = None + """Timestamp when this query was last updated.""" + + warehouse_id: Optional[str] = None + """ID of the SQL warehouse attached to the query.""" + + def as_dict(self) -> dict: + """Serializes the ListQueryObjectsResponseQuery into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.create_time is not None: body['create_time'] = self.create_time + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.id is not None: body['id'] = self.id + if self.last_modifier_user_name is not None: + body['last_modifier_user_name'] = self.last_modifier_user_name + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value + if self.schema is not None: body['schema'] = self.schema + if self.tags: body['tags'] = [v for v in self.tags] + if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListQueryObjectsResponseQuery: + """Deserializes the ListQueryObjectsResponseQuery from a dictionary.""" + return cls(apply_auto_limit=d.get('apply_auto_limit', None), + catalog=d.get('catalog', None), + create_time=d.get('create_time', None), + description=d.get('description', None), + display_name=d.get('display_name', None), + id=d.get('id', None), + last_modifier_user_name=d.get('last_modifier_user_name', None), + lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), + owner_user_name=d.get('owner_user_name', None), + parameters=_repeated_dict(d, 'parameters', QueryParameter), + query_text=d.get('query_text', None), + run_as_mode=_enum(d, 'run_as_mode', RunAsMode), + schema=d.get('schema', None), + tags=d.get('tags', None), + update_time=d.get('update_time', None), + warehouse_id=d.get('warehouse_id', None)) + + @dataclass class ListResponse: count: Optional[int] = None @@ -2052,6 +3081,26 @@ def from_dict(cls, d: Dict[str, any]) -> ListResponse: results=_repeated_dict(d, 'results', Dashboard)) +@dataclass +class ListVisualizationsForQueryResponse: + next_page_token: Optional[str] = None + + results: Optional[List[Visualization]] = None + + def as_dict(self) -> dict: + """Serializes the ListVisualizationsForQueryResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.results: body['results'] = [v.as_dict() for v in self.results] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListVisualizationsForQueryResponse: + """Deserializes the ListVisualizationsForQueryResponse from a dictionary.""" + return cls(next_page_token=d.get('next_page_token', None), + results=_repeated_dict(d, 'results', Visualization)) + + @dataclass class ListWarehousesResponse: warehouses: Optional[List[EndpointInfo]] = None @@ -2071,9 +3120,6 @@ def from_dict(cls, d: Dict[str, any]) -> ListWarehousesResponse: @dataclass class MultiValuesOptions: - """If specified, allows multiple values to be selected for this parameter. Only applies to dropdown - list and query-based dropdown list parameters.""" - prefix: Optional[str] = None """Character that prefixes each selected parameter value.""" @@ -2099,6 +3145,22 @@ def from_dict(cls, d: Dict[str, any]) -> MultiValuesOptions: suffix=d.get('suffix', None)) +@dataclass +class NumericValue: + value: Optional[float] = None + + def as_dict(self) -> dict: + """Serializes the NumericValue into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.value is not None: body['value'] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> NumericValue: + """Deserializes the NumericValue from a dictionary.""" + return cls(value=d.get('value', None)) + + class ObjectType(Enum): """A singular noun object type.""" @@ -2223,7 +3285,7 @@ class PermissionLevel(Enum): class PlansState(Enum): - """Whether plans exist for the execution, or the reason why they are missing""" + """Possible Reasons for which we have not saved plans in the database""" EMPTY = 'EMPTY' EXISTS = 'EXISTS' @@ -2233,143 +3295,128 @@ class PlansState(Enum): UNKNOWN = 'UNKNOWN' -@dataclass -class Query: - can_edit: Optional[bool] = None - """Describes whether the authenticated user is allowed to edit the definition of this query.""" - - created_at: Optional[str] = None - """The timestamp when this query was created.""" - - data_source_id: Optional[str] = None - """Data source ID maps to the ID of the data source used by the resource and is distinct from the - warehouse ID. [Learn more] - - [Learn more]: https://docs.databricks.com/api/workspace/datasources/list""" - - description: Optional[str] = None - """General description that conveys additional information about this query such as usage notes.""" - - id: Optional[str] = None - """Query ID.""" - - is_archived: Optional[bool] = None - """Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear - in search results. If this boolean is `true`, the `options` property for this query includes a - `moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days.""" +@dataclass +class Query: + apply_auto_limit: Optional[bool] = None + """Whether to apply a 1000 row limit to the query result.""" - is_draft: Optional[bool] = None - """Whether the query is a draft. Draft queries only appear in list views for their owners. - Visualizations from draft queries cannot appear on dashboards.""" + catalog: Optional[str] = None + """Name of the catalog where this query will be executed.""" - is_favorite: Optional[bool] = None - """Whether this query object appears in the current user's favorites list. This flag determines - whether the star icon for favorites is selected.""" + create_time: Optional[str] = None + """Timestamp when this query was created.""" - is_safe: Optional[bool] = None - """Text parameter types are not safe from SQL injection for all types of data source. Set this - Boolean parameter to `true` if a query either does not use any text type parameters or uses a - data source type where text type parameters are handled safely.""" + description: Optional[str] = None + """General description that conveys additional information about this query such as usage notes.""" - last_modified_by: Optional[User] = None + display_name: Optional[str] = None + """Display name of the query that appears in list views, widget headings, and on the query page.""" - last_modified_by_id: Optional[int] = None - """The ID of the user who last saved changes to this query.""" + id: Optional[str] = None + """UUID identifying the query.""" - latest_query_data_id: Optional[str] = None - """If there is a cached result for this query and user, this field includes the query result ID. If - this query uses parameters, this field is always null.""" + last_modifier_user_name: Optional[str] = None + """Username of the user who last saved changes to this query.""" - name: Optional[str] = None - """The title of this query that appears in list views, widget headings, and on the query page.""" + lifecycle_state: Optional[LifecycleState] = None + """Indicates whether the query is trashed.""" - options: Optional[QueryOptions] = None + owner_user_name: Optional[str] = None + """Username of the user that owns the query.""" - parent: Optional[str] = None - """The identifier of the workspace folder containing the object.""" + parameters: Optional[List[QueryParameter]] = None + """List of query parameter definitions.""" - permission_tier: Optional[PermissionLevel] = None - """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query - * `CAN_MANAGE`: Can manage the query""" + parent_path: Optional[str] = None + """Workspace path of the workspace folder containing the object.""" - query: Optional[str] = None - """The text of the query to be run.""" + query_text: Optional[str] = None + """Text of the query to be run.""" - query_hash: Optional[str] = None - """A SHA-256 hash of the query text along with the authenticated user ID.""" + run_as_mode: Optional[RunAsMode] = None + """Sets the "Run as" role for the object.""" - run_as_role: Optional[RunAsRole] = None - """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as - viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" + schema: Optional[str] = None + """Name of the schema where this query will be executed.""" tags: Optional[List[str]] = None - updated_at: Optional[str] = None - """The timestamp at which this query was last updated.""" - - user: Optional[User] = None - - user_id: Optional[int] = None - """The ID of the user who owns the query.""" + update_time: Optional[str] = None + """Timestamp when this query was last updated.""" - visualizations: Optional[List[Visualization]] = None + warehouse_id: Optional[str] = None + """ID of the SQL warehouse attached to the query.""" def as_dict(self) -> dict: """Serializes the Query into a dictionary suitable for use as a JSON request body.""" body = {} - if self.can_edit is not None: body['can_edit'] = self.can_edit - if self.created_at is not None: body['created_at'] = self.created_at - if self.data_source_id is not None: body['data_source_id'] = self.data_source_id + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.create_time is not None: body['create_time'] = self.create_time if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name if self.id is not None: body['id'] = self.id - if self.is_archived is not None: body['is_archived'] = self.is_archived - if self.is_draft is not None: body['is_draft'] = self.is_draft - if self.is_favorite is not None: body['is_favorite'] = self.is_favorite - if self.is_safe is not None: body['is_safe'] = self.is_safe - if self.last_modified_by: body['last_modified_by'] = self.last_modified_by.as_dict() - if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id - if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options.as_dict() - if self.parent is not None: body['parent'] = self.parent - if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value - if self.query is not None: body['query'] = self.query - if self.query_hash is not None: body['query_hash'] = self.query_hash - if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value + if self.last_modifier_user_name is not None: + body['last_modifier_user_name'] = self.last_modifier_user_name + if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.parent_path is not None: body['parent_path'] = self.parent_path + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value + if self.schema is not None: body['schema'] = self.schema if self.tags: body['tags'] = [v for v in self.tags] - if self.updated_at is not None: body['updated_at'] = self.updated_at - if self.user: body['user'] = self.user.as_dict() - if self.user_id is not None: body['user_id'] = self.user_id - if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations] + if self.update_time is not None: body['update_time'] = self.update_time + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id return body @classmethod def from_dict(cls, d: Dict[str, any]) -> Query: """Deserializes the Query from a dictionary.""" - return cls(can_edit=d.get('can_edit', None), - created_at=d.get('created_at', None), - data_source_id=d.get('data_source_id', None), + return cls(apply_auto_limit=d.get('apply_auto_limit', None), + catalog=d.get('catalog', None), + create_time=d.get('create_time', None), description=d.get('description', None), + display_name=d.get('display_name', None), id=d.get('id', None), - is_archived=d.get('is_archived', None), - is_draft=d.get('is_draft', None), - is_favorite=d.get('is_favorite', None), - is_safe=d.get('is_safe', None), - last_modified_by=_from_dict(d, 'last_modified_by', User), - last_modified_by_id=d.get('last_modified_by_id', None), - latest_query_data_id=d.get('latest_query_data_id', None), - name=d.get('name', None), - options=_from_dict(d, 'options', QueryOptions), - parent=d.get('parent', None), - permission_tier=_enum(d, 'permission_tier', PermissionLevel), - query=d.get('query', None), - query_hash=d.get('query_hash', None), - run_as_role=_enum(d, 'run_as_role', RunAsRole), + last_modifier_user_name=d.get('last_modifier_user_name', None), + lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState), + owner_user_name=d.get('owner_user_name', None), + parameters=_repeated_dict(d, 'parameters', QueryParameter), + parent_path=d.get('parent_path', None), + query_text=d.get('query_text', None), + run_as_mode=_enum(d, 'run_as_mode', RunAsMode), + schema=d.get('schema', None), tags=d.get('tags', None), - updated_at=d.get('updated_at', None), - user=_from_dict(d, 'user', User), - user_id=d.get('user_id', None), - visualizations=_repeated_dict(d, 'visualizations', Visualization)) + update_time=d.get('update_time', None), + warehouse_id=d.get('warehouse_id', None)) + + +@dataclass +class QueryBackedValue: + multi_values_options: Optional[MultiValuesOptions] = None + """If specified, allows multiple values to be selected for this parameter.""" + + query_id: Optional[str] = None + """UUID of the query that provides the parameter values.""" + + values: Optional[List[str]] = None + """List of selected query parameter values.""" + + def as_dict(self) -> dict: + """Serializes the QueryBackedValue into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.multi_values_options: body['multi_values_options'] = self.multi_values_options.as_dict() + if self.query_id is not None: body['query_id'] = self.query_id + if self.values: body['values'] = [v for v in self.values] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> QueryBackedValue: + """Deserializes the QueryBackedValue from a dictionary.""" + return cls(multi_values_options=_from_dict(d, 'multi_values_options', MultiValuesOptions), + query_id=d.get('query_id', None), + values=d.get('values', None)) @dataclass @@ -2430,12 +3477,11 @@ def from_dict(cls, d: Dict[str, any]) -> QueryEditContent: @dataclass class QueryFilter: - """A filter to limit query history results. This field is optional.""" + context_filter: Optional[ContextFilter] = None + """Filter by one or more property describing where the query was generated""" query_start_time_range: Optional[TimeRange] = None - - statement_ids: Optional[List[str]] = None - """A list of statement IDs.""" + """A range filter for query submitted time. The time range must be <= 30 days.""" statuses: Optional[List[QueryStatus]] = None @@ -2448,8 +3494,8 @@ class QueryFilter: def as_dict(self) -> dict: """Serializes the QueryFilter into a dictionary suitable for use as a JSON request body.""" body = {} + if self.context_filter: body['context_filter'] = self.context_filter.as_dict() if self.query_start_time_range: body['query_start_time_range'] = self.query_start_time_range.as_dict() - if self.statement_ids: body['statement_ids'] = [v for v in self.statement_ids] if self.statuses: body['statuses'] = [v.value for v in self.statuses] if self.user_ids: body['user_ids'] = [v for v in self.user_ids] if self.warehouse_ids: body['warehouse_ids'] = [v for v in self.warehouse_ids] @@ -2458,8 +3504,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryFilter: """Deserializes the QueryFilter from a dictionary.""" - return cls(query_start_time_range=_from_dict(d, 'query_start_time_range', TimeRange), - statement_ids=d.get('statement_ids', None), + return cls(context_filter=_from_dict(d, 'context_filter', ContextFilter), + query_start_time_range=_from_dict(d, 'query_start_time_range', TimeRange), statuses=_repeated_enum(d, 'statuses', QueryStatus), user_ids=d.get('user_ids', None), warehouse_ids=d.get('warehouse_ids', None)) @@ -2467,11 +3513,8 @@ def from_dict(cls, d: Dict[str, any]) -> QueryFilter: @dataclass class QueryInfo: - can_subscribe_to_live_query: Optional[bool] = None - """Reserved for internal use.""" - channel_used: Optional[ChannelInfo] = None - """Channel information for the SQL warehouse at the time of query execution""" + """SQL Warehouse channel information at the time of query execution""" duration: Optional[int] = None """Total execution time of the statement ( excluding result fetch time ).""" @@ -2509,6 +3552,8 @@ class QueryInfo: query_id: Optional[str] = None """The query ID.""" + query_source: Optional[QuerySource] = None + query_start_time_ms: Optional[int] = None """The time the query started.""" @@ -2519,15 +3564,17 @@ class QueryInfo: """The number of results returned by the query.""" spark_ui_url: Optional[str] = None - """URL to the query plan.""" + """URL to the Spark UI query plan.""" statement_type: Optional[QueryStatementType] = None """Type of statement for this query""" status: Optional[QueryStatus] = None - """Query status with one the following values: * `QUEUED`: Query has been received and queued. * - `RUNNING`: Query has started. * `CANCELED`: Query has been cancelled by the user. * `FAILED`: - Query has failed. * `FINISHED`: Query has completed.""" + """Query status with one the following values: + + - `QUEUED`: Query has been received and queued. - `RUNNING`: Query has started. - `CANCELED`: + Query has been cancelled by the user. - `FAILED`: Query has failed. - `FINISHED`: Query has + completed.""" user_id: Optional[int] = None """The ID of the user who ran the query.""" @@ -2541,8 +3588,6 @@ class QueryInfo: def as_dict(self) -> dict: """Serializes the QueryInfo into a dictionary suitable for use as a JSON request body.""" body = {} - if self.can_subscribe_to_live_query is not None: - body['canSubscribeToLiveQuery'] = self.can_subscribe_to_live_query if self.channel_used: body['channel_used'] = self.channel_used.as_dict() if self.duration is not None: body['duration'] = self.duration if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id @@ -2556,6 +3601,7 @@ def as_dict(self) -> dict: if self.plans_state is not None: body['plans_state'] = self.plans_state.value if self.query_end_time_ms is not None: body['query_end_time_ms'] = self.query_end_time_ms if self.query_id is not None: body['query_id'] = self.query_id + if self.query_source: body['query_source'] = self.query_source.as_dict() if self.query_start_time_ms is not None: body['query_start_time_ms'] = self.query_start_time_ms if self.query_text is not None: body['query_text'] = self.query_text if self.rows_produced is not None: body['rows_produced'] = self.rows_produced @@ -2570,8 +3616,7 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryInfo: """Deserializes the QueryInfo from a dictionary.""" - return cls(can_subscribe_to_live_query=d.get('canSubscribeToLiveQuery', None), - channel_used=_from_dict(d, 'channel_used', ChannelInfo), + return cls(channel_used=_from_dict(d, 'channel_used', ChannelInfo), duration=d.get('duration', None), endpoint_id=d.get('endpoint_id', None), error_message=d.get('error_message', None), @@ -2584,6 +3629,7 @@ def from_dict(cls, d: Dict[str, any]) -> QueryInfo: plans_state=_enum(d, 'plans_state', PlansState), query_end_time_ms=d.get('query_end_time_ms', None), query_id=d.get('query_id', None), + query_source=_from_dict(d, 'query_source', QuerySource), query_start_time_ms=d.get('query_start_time_ms', None), query_text=d.get('query_text', None), rows_produced=d.get('rows_produced', None), @@ -2606,7 +3652,7 @@ class QueryList: page_size: Optional[int] = None """The number of queries per page.""" - results: Optional[List[Query]] = None + results: Optional[List[LegacyQuery]] = None """List of queries returned.""" def as_dict(self) -> dict: @@ -2624,12 +3670,13 @@ def from_dict(cls, d: Dict[str, any]) -> QueryList: return cls(count=d.get('count', None), page=d.get('page', None), page_size=d.get('page_size', None), - results=_repeated_dict(d, 'results', Query)) + results=_repeated_dict(d, 'results', LegacyQuery)) @dataclass class QueryMetrics: - """Metrics about query execution.""" + """A query metric that encapsulates a set of measurements for a single query. Metrics come from the + driver and are stored in the history service database.""" compilation_time_ms: Optional[int] = None """Time spent loading metadata and optimizing the query, in milliseconds.""" @@ -2637,9 +3684,6 @@ class QueryMetrics: execution_time_ms: Optional[int] = None """Time spent executing the query, in milliseconds.""" - metadata_time_ms: Optional[int] = None - """Reserved for internal use.""" - network_sent_bytes: Optional[int] = None """Total amount of data sent over the network between executor nodes during shuffle, in bytes.""" @@ -2650,9 +3694,6 @@ class QueryMetrics: photon_total_time_ms: Optional[int] = None """Total execution time for all individual Photon query engine tasks in the query, in milliseconds.""" - planning_time_ms: Optional[int] = None - """Reserved for internal use.""" - provisioning_queue_start_timestamp: Optional[int] = None """Timestamp of when the query was enqueued waiting for a cluster to be provisioned for the warehouse. This field is optional and will not appear if the query skipped the provisioning @@ -2667,9 +3708,6 @@ class QueryMetrics: query_compilation_start_timestamp: Optional[int] = None """Timestamp of when the underlying compute started compilation of the query.""" - query_execution_time_ms: Optional[int] = None - """Reserved for internal use.""" - read_bytes: Optional[int] = None """Total size of data read by the query, in bytes.""" @@ -2677,7 +3715,7 @@ class QueryMetrics: """Size of persistent data read from the cache, in bytes.""" read_files_count: Optional[int] = None - """Number of files read after pruning.""" + """Number of files read after pruning""" read_partitions_count: Optional[int] = None """Number of partitions read after pruning.""" @@ -2689,7 +3727,7 @@ class QueryMetrics: """Time spent fetching the query results after the execution finished, in milliseconds.""" result_from_cache: Optional[bool] = None - """true if the query result was fetched from cache, false otherwise.""" + """`true` if the query result was fetched from cache, `false` otherwise.""" rows_produced_count: Optional[int] = None """Total number of rows returned by the query.""" @@ -2714,20 +3752,16 @@ def as_dict(self) -> dict: body = {} if self.compilation_time_ms is not None: body['compilation_time_ms'] = self.compilation_time_ms if self.execution_time_ms is not None: body['execution_time_ms'] = self.execution_time_ms - if self.metadata_time_ms is not None: body['metadata_time_ms'] = self.metadata_time_ms if self.network_sent_bytes is not None: body['network_sent_bytes'] = self.network_sent_bytes if self.overloading_queue_start_timestamp is not None: body['overloading_queue_start_timestamp'] = self.overloading_queue_start_timestamp if self.photon_total_time_ms is not None: body['photon_total_time_ms'] = self.photon_total_time_ms - if self.planning_time_ms is not None: body['planning_time_ms'] = self.planning_time_ms if self.provisioning_queue_start_timestamp is not None: body['provisioning_queue_start_timestamp'] = self.provisioning_queue_start_timestamp if self.pruned_bytes is not None: body['pruned_bytes'] = self.pruned_bytes if self.pruned_files_count is not None: body['pruned_files_count'] = self.pruned_files_count if self.query_compilation_start_timestamp is not None: body['query_compilation_start_timestamp'] = self.query_compilation_start_timestamp - if self.query_execution_time_ms is not None: - body['query_execution_time_ms'] = self.query_execution_time_ms if self.read_bytes is not None: body['read_bytes'] = self.read_bytes if self.read_cache_bytes is not None: body['read_cache_bytes'] = self.read_cache_bytes if self.read_files_count is not None: body['read_files_count'] = self.read_files_count @@ -2748,16 +3782,13 @@ def from_dict(cls, d: Dict[str, any]) -> QueryMetrics: """Deserializes the QueryMetrics from a dictionary.""" return cls(compilation_time_ms=d.get('compilation_time_ms', None), execution_time_ms=d.get('execution_time_ms', None), - metadata_time_ms=d.get('metadata_time_ms', None), network_sent_bytes=d.get('network_sent_bytes', None), overloading_queue_start_timestamp=d.get('overloading_queue_start_timestamp', None), photon_total_time_ms=d.get('photon_total_time_ms', None), - planning_time_ms=d.get('planning_time_ms', None), provisioning_queue_start_timestamp=d.get('provisioning_queue_start_timestamp', None), pruned_bytes=d.get('pruned_bytes', None), pruned_files_count=d.get('pruned_files_count', None), query_compilation_start_timestamp=d.get('query_compilation_start_timestamp', None), - query_execution_time_ms=d.get('query_execution_time_ms', None), read_bytes=d.get('read_bytes', None), read_cache_bytes=d.get('read_cache_bytes', None), read_files_count=d.get('read_files_count', None), @@ -2805,6 +3836,59 @@ def from_dict(cls, d: Dict[str, any]) -> QueryOptions: schema=d.get('schema', None)) +@dataclass +class QueryParameter: + date_range_value: Optional[DateRangeValue] = None + """Date-range query parameter value. Can only specify one of `dynamic_date_range_value` or + `date_range_value`.""" + + date_value: Optional[DateValue] = None + """Date query parameter value. Can only specify one of `dynamic_date_value` or `date_value`.""" + + enum_value: Optional[EnumValue] = None + """Dropdown query parameter value.""" + + name: Optional[str] = None + """Literal parameter marker that appears between double curly braces in the query text.""" + + numeric_value: Optional[NumericValue] = None + """Numeric query parameter value.""" + + query_backed_value: Optional[QueryBackedValue] = None + """Query-based dropdown query parameter value.""" + + text_value: Optional[TextValue] = None + """Text query parameter value.""" + + title: Optional[str] = None + """Text displayed in the user-facing parameter widget in the UI.""" + + def as_dict(self) -> dict: + """Serializes the QueryParameter into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.date_range_value: body['date_range_value'] = self.date_range_value.as_dict() + if self.date_value: body['date_value'] = self.date_value.as_dict() + if self.enum_value: body['enum_value'] = self.enum_value.as_dict() + if self.name is not None: body['name'] = self.name + if self.numeric_value: body['numeric_value'] = self.numeric_value.as_dict() + if self.query_backed_value: body['query_backed_value'] = self.query_backed_value.as_dict() + if self.text_value: body['text_value'] = self.text_value.as_dict() + if self.title is not None: body['title'] = self.title + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> QueryParameter: + """Deserializes the QueryParameter from a dictionary.""" + return cls(date_range_value=_from_dict(d, 'date_range_value', DateRangeValue), + date_value=_from_dict(d, 'date_value', DateValue), + enum_value=_from_dict(d, 'enum_value', EnumValue), + name=d.get('name', None), + numeric_value=_from_dict(d, 'numeric_value', NumericValue), + query_backed_value=_from_dict(d, 'query_backed_value', QueryBackedValue), + text_value=_from_dict(d, 'text_value', TextValue), + title=d.get('title', None)) + + @dataclass class QueryPostContent: data_source_id: Optional[str] = None @@ -2862,8 +3946,187 @@ def from_dict(cls, d: Dict[str, any]) -> QueryPostContent: tags=d.get('tags', None)) +@dataclass +class QuerySource: + alert_id: Optional[str] = None + """UUID""" + + client_call_context: Optional[ClientCallContext] = None + """Client code that triggered the request""" + + command_id: Optional[str] = None + """Id associated with a notebook cell""" + + command_run_id: Optional[str] = None + """Id associated with a notebook run or execution""" + + dashboard_id: Optional[str] = None + """UUID""" + + dashboard_v3_id: Optional[str] = None + """UUID for Lakeview Dashboards, separate from DBSQL Dashboards (dashboard_id)""" + + driver_info: Optional[QuerySourceDriverInfo] = None + + entry_point: Optional[QuerySourceEntryPoint] = None + """Spark service that received and processed the query""" + + genie_space_id: Optional[str] = None + """UUID for Genie space""" + + is_cloud_fetch: Optional[bool] = None + + is_databricks_sql_exec_api: Optional[bool] = None + + job_id: Optional[str] = None + + job_managed_by: Optional[QuerySourceJobManager] = None + """With background compute, jobs can be managed by different internal teams. When not specified, + not a background compute job When specified and the value is not JOBS, it is a background + compute job""" + + notebook_id: Optional[str] = None + + pipeline_id: Optional[str] = None + """Id associated with a DLT pipeline""" + + pipeline_update_id: Optional[str] = None + """Id associated with a DLT update""" + + query_tags: Optional[str] = None + """String provided by a customer that'll help them identify the query""" + + run_id: Optional[str] = None + """Id associated with a job run or execution""" + + runnable_command_id: Optional[str] = None + """Id associated with a notebook cell run or execution""" + + scheduled_by: Optional[QuerySourceTrigger] = None + + serverless_channel_info: Optional[ServerlessChannelInfo] = None + + source_query_id: Optional[str] = None + """UUID""" + + def as_dict(self) -> dict: + """Serializes the QuerySource into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.alert_id is not None: body['alert_id'] = self.alert_id + if self.client_call_context: body['client_call_context'] = self.client_call_context.as_dict() + if self.command_id is not None: body['command_id'] = self.command_id + if self.command_run_id is not None: body['command_run_id'] = self.command_run_id + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.dashboard_v3_id is not None: body['dashboard_v3_id'] = self.dashboard_v3_id + if self.driver_info: body['driver_info'] = self.driver_info.as_dict() + if self.entry_point is not None: body['entry_point'] = self.entry_point.value + if self.genie_space_id is not None: body['genie_space_id'] = self.genie_space_id + if self.is_cloud_fetch is not None: body['is_cloud_fetch'] = self.is_cloud_fetch + if self.is_databricks_sql_exec_api is not None: + body['is_databricks_sql_exec_api'] = self.is_databricks_sql_exec_api + if self.job_id is not None: body['job_id'] = self.job_id + if self.job_managed_by is not None: body['job_managed_by'] = self.job_managed_by.value + if self.notebook_id is not None: body['notebook_id'] = self.notebook_id + if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id + if self.pipeline_update_id is not None: body['pipeline_update_id'] = self.pipeline_update_id + if self.query_tags is not None: body['query_tags'] = self.query_tags + if self.run_id is not None: body['run_id'] = self.run_id + if self.runnable_command_id is not None: body['runnable_command_id'] = self.runnable_command_id + if self.scheduled_by is not None: body['scheduled_by'] = self.scheduled_by.value + if self.serverless_channel_info: + body['serverless_channel_info'] = self.serverless_channel_info.as_dict() + if self.source_query_id is not None: body['source_query_id'] = self.source_query_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> QuerySource: + """Deserializes the QuerySource from a dictionary.""" + return cls(alert_id=d.get('alert_id', None), + client_call_context=_from_dict(d, 'client_call_context', ClientCallContext), + command_id=d.get('command_id', None), + command_run_id=d.get('command_run_id', None), + dashboard_id=d.get('dashboard_id', None), + dashboard_v3_id=d.get('dashboard_v3_id', None), + driver_info=_from_dict(d, 'driver_info', QuerySourceDriverInfo), + entry_point=_enum(d, 'entry_point', QuerySourceEntryPoint), + genie_space_id=d.get('genie_space_id', None), + is_cloud_fetch=d.get('is_cloud_fetch', None), + is_databricks_sql_exec_api=d.get('is_databricks_sql_exec_api', None), + job_id=d.get('job_id', None), + job_managed_by=_enum(d, 'job_managed_by', QuerySourceJobManager), + notebook_id=d.get('notebook_id', None), + pipeline_id=d.get('pipeline_id', None), + pipeline_update_id=d.get('pipeline_update_id', None), + query_tags=d.get('query_tags', None), + run_id=d.get('run_id', None), + runnable_command_id=d.get('runnable_command_id', None), + scheduled_by=_enum(d, 'scheduled_by', QuerySourceTrigger), + serverless_channel_info=_from_dict(d, 'serverless_channel_info', ServerlessChannelInfo), + source_query_id=d.get('source_query_id', None)) + + +@dataclass +class QuerySourceDriverInfo: + bi_tool_entry: Optional[str] = None + + driver_name: Optional[str] = None + + simba_branding_vendor: Optional[str] = None + + version_number: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the QuerySourceDriverInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.bi_tool_entry is not None: body['bi_tool_entry'] = self.bi_tool_entry + if self.driver_name is not None: body['driver_name'] = self.driver_name + if self.simba_branding_vendor is not None: body['simba_branding_vendor'] = self.simba_branding_vendor + if self.version_number is not None: body['version_number'] = self.version_number + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> QuerySourceDriverInfo: + """Deserializes the QuerySourceDriverInfo from a dictionary.""" + return cls(bi_tool_entry=d.get('bi_tool_entry', None), + driver_name=d.get('driver_name', None), + simba_branding_vendor=d.get('simba_branding_vendor', None), + version_number=d.get('version_number', None)) + + +class QuerySourceEntryPoint(Enum): + """Spark service that received and processed the query""" + + DLT = 'DLT' + SPARK_CONNECT = 'SPARK_CONNECT' + THRIFT_SERVER = 'THRIFT_SERVER' + + +class QuerySourceJobManager(Enum): + """Copied from elastic-spark-common/api/messages/manager.proto with enum values changed by 1 to + accommodate JOB_MANAGER_UNSPECIFIED""" + + APP_SYSTEM_TABLE = 'APP_SYSTEM_TABLE' + AUTOML = 'AUTOML' + AUTO_MAINTENANCE = 'AUTO_MAINTENANCE' + CLEAN_ROOMS = 'CLEAN_ROOMS' + DATA_MONITORING = 'DATA_MONITORING' + DATA_SHARING = 'DATA_SHARING' + ENCRYPTION = 'ENCRYPTION' + FABRIC_CRAWLER = 'FABRIC_CRAWLER' + JOBS = 'JOBS' + LAKEVIEW = 'LAKEVIEW' + MANAGED_RAG = 'MANAGED_RAG' + SCHEDULED_MV_REFRESH = 'SCHEDULED_MV_REFRESH' + TESTING = 'TESTING' + + +class QuerySourceTrigger(Enum): + + MANUAL = 'MANUAL' + SCHEDULED = 'SCHEDULED' + + class QueryStatementType(Enum): - """Type of statement for this query""" ALTER = 'ALTER' ANALYZE = 'ANALYZE' @@ -2890,15 +4153,16 @@ class QueryStatementType(Enum): class QueryStatus(Enum): - """Query status with one the following values: * `QUEUED`: Query has been received and queued. * - `RUNNING`: Query has started. * `CANCELED`: Query has been cancelled by the user. * `FAILED`: - Query has failed. * `FINISHED`: Query has completed.""" + """Statuses which are also used by OperationStatus in runtime""" CANCELED = 'CANCELED' + COMPILED = 'COMPILED' + COMPILING = 'COMPILING' FAILED = 'FAILED' FINISHED = 'FINISHED' QUEUED = 'QUEUED' RUNNING = 'RUNNING' + STARTED = 'STARTED' @dataclass @@ -3071,6 +4335,12 @@ def from_dict(cls, d: Dict[str, any]) -> ResultSchema: return cls(column_count=d.get('column_count', None), columns=_repeated_dict(d, 'columns', ColumnInfo)) +class RunAsMode(Enum): + + OWNER = 'OWNER' + VIEWER = 'VIEWER' + + class RunAsRole(Enum): """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior)""" @@ -3079,6 +4349,23 @@ class RunAsRole(Enum): VIEWER = 'viewer' +@dataclass +class ServerlessChannelInfo: + name: Optional[ChannelName] = None + """Name of the Channel""" + + def as_dict(self) -> dict: + """Serializes the ServerlessChannelInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.name is not None: body['name'] = self.name.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ServerlessChannelInfo: + """Deserializes the ServerlessChannelInfo from a dictionary.""" + return cls(name=_enum(d, 'name', ChannelName)) + + @dataclass class ServiceError: error_code: Optional[ServiceErrorCode] = None @@ -3292,9 +4579,46 @@ def as_dict(self) -> dict: return body @classmethod - def from_dict(cls, d: Dict[str, any]) -> StatementParameterListItem: - """Deserializes the StatementParameterListItem from a dictionary.""" - return cls(name=d.get('name', None), type=d.get('type', None), value=d.get('value', None)) + def from_dict(cls, d: Dict[str, any]) -> StatementParameterListItem: + """Deserializes the StatementParameterListItem from a dictionary.""" + return cls(name=d.get('name', None), type=d.get('type', None), value=d.get('value', None)) + + +@dataclass +class StatementResponse: + manifest: Optional[ResultManifest] = None + """The result manifest provides schema and metadata for the result set.""" + + result: Optional[ResultData] = None + """Contains the result data of a single chunk when using `INLINE` disposition. When using + `EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide presigned + URLs to the result data in cloud storage. Exactly one of these alternatives is used. (While the + `external_links` array prepares the API to return multiple links in a single response. Currently + only a single link is returned.)""" + + statement_id: Optional[str] = None + """The statement ID is returned upon successfully submitting a SQL statement, and is a required + reference for all subsequent calls.""" + + status: Optional[StatementStatus] = None + """The status response includes execution state and if relevant, error information.""" + + def as_dict(self) -> dict: + """Serializes the StatementResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.manifest: body['manifest'] = self.manifest.as_dict() + if self.result: body['result'] = self.result.as_dict() + if self.statement_id is not None: body['statement_id'] = self.statement_id + if self.status: body['status'] = self.status.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> StatementResponse: + """Deserializes the StatementResponse from a dictionary.""" + return cls(manifest=_from_dict(d, 'manifest', ResultManifest), + result=_from_dict(d, 'result', ResultData), + statement_id=d.get('statement_id', None), + status=_from_dict(d, 'status', StatementStatus)) class StatementState(Enum): @@ -3502,13 +4826,29 @@ class TerminationReasonType(Enum): SUCCESS = 'SUCCESS' +@dataclass +class TextValue: + value: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the TextValue into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.value is not None: body['value'] = self.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> TextValue: + """Deserializes the TextValue from a dictionary.""" + return cls(value=d.get('value', None)) + + @dataclass class TimeRange: end_time_ms: Optional[int] = None - """Limit results to queries that started before this time.""" + """The end time in milliseconds.""" start_time_ms: Optional[int] = None - """Limit results to queries that started after this time.""" + """The start time in milliseconds.""" def as_dict(self) -> dict: """Serializes the TimeRange into a dictionary suitable for use as a JSON request body.""" @@ -3540,6 +4880,179 @@ def from_dict(cls, d: Dict[str, any]) -> TransferOwnershipObjectId: return cls(new_owner=d.get('new_owner', None)) +@dataclass +class UpdateAlertRequest: + update_mask: str + """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of + the setting payload will be updated. The field mask needs to be supplied as single string. To + specify multiple fields in the field mask, use comma as the separator (no space).""" + + alert: Optional[UpdateAlertRequestAlert] = None + + id: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the UpdateAlertRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.alert: body['alert'] = self.alert.as_dict() + if self.id is not None: body['id'] = self.id + if self.update_mask is not None: body['update_mask'] = self.update_mask + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequest: + """Deserializes the UpdateAlertRequest from a dictionary.""" + return cls(alert=_from_dict(d, 'alert', UpdateAlertRequestAlert), + id=d.get('id', None), + update_mask=d.get('update_mask', None)) + + +@dataclass +class UpdateAlertRequestAlert: + condition: Optional[AlertCondition] = None + """Trigger conditions of the alert.""" + + custom_body: Optional[str] = None + """Custom body of alert notification, if it exists. See [here] for custom templating instructions. + + [here]: https://docs.databricks.com/sql/user/alerts/index.html""" + + custom_subject: Optional[str] = None + """Custom subject of alert notification, if it exists. This can include email subject entries and + Slack notification headers, for example. See [here] for custom templating instructions. + + [here]: https://docs.databricks.com/sql/user/alerts/index.html""" + + display_name: Optional[str] = None + """The display name of the alert.""" + + owner_user_name: Optional[str] = None + """The owner's username. This field is set to "Unavailable" if the user has been deleted.""" + + query_id: Optional[str] = None + """UUID of the query attached to the alert.""" + + seconds_to_retrigger: Optional[int] = None + """Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it + can be triggered again. If 0 or not specified, the alert will not be triggered again.""" + + def as_dict(self) -> dict: + """Serializes the UpdateAlertRequestAlert into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.condition: body['condition'] = self.condition.as_dict() + if self.custom_body is not None: body['custom_body'] = self.custom_body + if self.custom_subject is not None: body['custom_subject'] = self.custom_subject + if self.display_name is not None: body['display_name'] = self.display_name + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.query_id is not None: body['query_id'] = self.query_id + if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequestAlert: + """Deserializes the UpdateAlertRequestAlert from a dictionary.""" + return cls(condition=_from_dict(d, 'condition', AlertCondition), + custom_body=d.get('custom_body', None), + custom_subject=d.get('custom_subject', None), + display_name=d.get('display_name', None), + owner_user_name=d.get('owner_user_name', None), + query_id=d.get('query_id', None), + seconds_to_retrigger=d.get('seconds_to_retrigger', None)) + + +@dataclass +class UpdateQueryRequest: + update_mask: str + """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of + the setting payload will be updated. The field mask needs to be supplied as single string. To + specify multiple fields in the field mask, use comma as the separator (no space).""" + + id: Optional[str] = None + + query: Optional[UpdateQueryRequestQuery] = None + + def as_dict(self) -> dict: + """Serializes the UpdateQueryRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.id is not None: body['id'] = self.id + if self.query: body['query'] = self.query.as_dict() + if self.update_mask is not None: body['update_mask'] = self.update_mask + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateQueryRequest: + """Deserializes the UpdateQueryRequest from a dictionary.""" + return cls(id=d.get('id', None), + query=_from_dict(d, 'query', UpdateQueryRequestQuery), + update_mask=d.get('update_mask', None)) + + +@dataclass +class UpdateQueryRequestQuery: + apply_auto_limit: Optional[bool] = None + """Whether to apply a 1000 row limit to the query result.""" + + catalog: Optional[str] = None + """Name of the catalog where this query will be executed.""" + + description: Optional[str] = None + """General description that conveys additional information about this query such as usage notes.""" + + display_name: Optional[str] = None + """Display name of the query that appears in list views, widget headings, and on the query page.""" + + owner_user_name: Optional[str] = None + """Username of the user that owns the query.""" + + parameters: Optional[List[QueryParameter]] = None + """List of query parameter definitions.""" + + query_text: Optional[str] = None + """Text of the query to be run.""" + + run_as_mode: Optional[RunAsMode] = None + """Sets the "Run as" role for the object.""" + + schema: Optional[str] = None + """Name of the schema where this query will be executed.""" + + tags: Optional[List[str]] = None + + warehouse_id: Optional[str] = None + """ID of the SQL warehouse attached to the query.""" + + def as_dict(self) -> dict: + """Serializes the UpdateQueryRequestQuery into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit + if self.catalog is not None: body['catalog'] = self.catalog + if self.description is not None: body['description'] = self.description + if self.display_name is not None: body['display_name'] = self.display_name + if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name + if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters] + if self.query_text is not None: body['query_text'] = self.query_text + if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value + if self.schema is not None: body['schema'] = self.schema + if self.tags: body['tags'] = [v for v in self.tags] + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateQueryRequestQuery: + """Deserializes the UpdateQueryRequestQuery from a dictionary.""" + return cls(apply_auto_limit=d.get('apply_auto_limit', None), + catalog=d.get('catalog', None), + description=d.get('description', None), + display_name=d.get('display_name', None), + owner_user_name=d.get('owner_user_name', None), + parameters=_repeated_dict(d, 'parameters', QueryParameter), + query_text=d.get('query_text', None), + run_as_mode=_enum(d, 'run_as_mode', RunAsMode), + schema=d.get('schema', None), + tags=d.get('tags', None), + warehouse_id=d.get('warehouse_id', None)) + + @dataclass class UpdateResponse: @@ -3554,6 +5067,67 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateResponse: return cls() +@dataclass +class UpdateVisualizationRequest: + update_mask: str + """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of + the setting payload will be updated. The field mask needs to be supplied as single string. To + specify multiple fields in the field mask, use comma as the separator (no space).""" + + id: Optional[str] = None + + visualization: Optional[UpdateVisualizationRequestVisualization] = None + + def as_dict(self) -> dict: + """Serializes the UpdateVisualizationRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.id is not None: body['id'] = self.id + if self.update_mask is not None: body['update_mask'] = self.update_mask + if self.visualization: body['visualization'] = self.visualization.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateVisualizationRequest: + """Deserializes the UpdateVisualizationRequest from a dictionary.""" + return cls(id=d.get('id', None), + update_mask=d.get('update_mask', None), + visualization=_from_dict(d, 'visualization', UpdateVisualizationRequestVisualization)) + + +@dataclass +class UpdateVisualizationRequestVisualization: + display_name: Optional[str] = None + """The display name of the visualization.""" + + serialized_options: Optional[str] = None + """The visualization options varies widely from one visualization type to the next and is + unsupported. Databricks does not recommend modifying visualization options directly.""" + + serialized_query_plan: Optional[str] = None + """The visualization query plan varies widely from one visualization type to the next and is + unsupported. Databricks does not recommend modifying the visualization query plan directly.""" + + type: Optional[str] = None + """The type of visualization: counter, table, funnel, and so on.""" + + def as_dict(self) -> dict: + """Serializes the UpdateVisualizationRequestVisualization into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.display_name is not None: body['display_name'] = self.display_name + if self.serialized_options is not None: body['serialized_options'] = self.serialized_options + if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan + if self.type is not None: body['type'] = self.type + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateVisualizationRequestVisualization: + """Deserializes the UpdateVisualizationRequestVisualization from a dictionary.""" + return cls(display_name=d.get('display_name', None), + serialized_options=d.get('serialized_options', None), + serialized_query_plan=d.get('serialized_query_plan', None), + type=d.get('type', None)) + + @dataclass class User: email: Optional[str] = None @@ -3578,57 +5152,56 @@ def from_dict(cls, d: Dict[str, any]) -> User: @dataclass class Visualization: - """The visualization description API changes frequently and is unsupported. You can duplicate a - visualization by copying description objects received _from the API_ and then using them to - create a new one with a POST request to the same endpoint. Databricks does not recommend - constructing ad-hoc visualizations entirely in JSON.""" - - created_at: Optional[str] = None + create_time: Optional[str] = None + """The timestamp indicating when the visualization was created.""" - description: Optional[str] = None - """A short description of this visualization. This is not displayed in the UI.""" + display_name: Optional[str] = None + """The display name of the visualization.""" id: Optional[str] = None - """The UUID for this visualization.""" + """UUID identifying the visualization.""" - name: Optional[str] = None - """The name of the visualization that appears on dashboards and the query screen.""" + query_id: Optional[str] = None + """UUID of the query that the visualization is attached to.""" - options: Optional[Any] = None - """The options object varies widely from one visualization type to the next and is unsupported. - Databricks does not recommend modifying visualization settings in JSON.""" + serialized_options: Optional[str] = None + """The visualization options varies widely from one visualization type to the next and is + unsupported. Databricks does not recommend modifying visualization options directly.""" - query: Optional[Query] = None + serialized_query_plan: Optional[str] = None + """The visualization query plan varies widely from one visualization type to the next and is + unsupported. Databricks does not recommend modifying the visualization query plan directly.""" type: Optional[str] = None - """The type of visualization: chart, table, pivot table, and so on.""" + """The type of visualization: counter, table, funnel, and so on.""" - updated_at: Optional[str] = None + update_time: Optional[str] = None + """The timestamp indicating when the visualization was updated.""" def as_dict(self) -> dict: """Serializes the Visualization into a dictionary suitable for use as a JSON request body.""" body = {} - if self.created_at is not None: body['created_at'] = self.created_at - if self.description is not None: body['description'] = self.description + if self.create_time is not None: body['create_time'] = self.create_time + if self.display_name is not None: body['display_name'] = self.display_name if self.id is not None: body['id'] = self.id - if self.name is not None: body['name'] = self.name - if self.options: body['options'] = self.options - if self.query: body['query'] = self.query.as_dict() + if self.query_id is not None: body['query_id'] = self.query_id + if self.serialized_options is not None: body['serialized_options'] = self.serialized_options + if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan if self.type is not None: body['type'] = self.type - if self.updated_at is not None: body['updated_at'] = self.updated_at + if self.update_time is not None: body['update_time'] = self.update_time return body @classmethod def from_dict(cls, d: Dict[str, any]) -> Visualization: """Deserializes the Visualization from a dictionary.""" - return cls(created_at=d.get('created_at', None), - description=d.get('description', None), + return cls(create_time=d.get('create_time', None), + display_name=d.get('display_name', None), id=d.get('id', None), - name=d.get('name', None), - options=d.get('options', None), - query=_from_dict(d, 'query', Query), + query_id=d.get('query_id', None), + serialized_options=d.get('serialized_options', None), + serialized_query_plan=d.get('serialized_query_plan', None), type=d.get('type', None), - updated_at=d.get('updated_at', None)) + update_time=d.get('update_time', None)) @dataclass @@ -3843,7 +5416,7 @@ class Widget: options: Optional[WidgetOptions] = None - visualization: Optional[Visualization] = None + visualization: Optional[LegacyVisualization] = None """The visualization description API changes frequently and is unsupported. You can duplicate a visualization by copying description objects received _from the API_ and then using them to create a new one with a POST request to the same endpoint. Databricks does not recommend @@ -3866,7 +5439,7 @@ def from_dict(cls, d: Dict[str, any]) -> Widget: """Deserializes the Widget from a dictionary.""" return cls(id=d.get('id', None), options=_from_dict(d, 'options', WidgetOptions), - visualization=_from_dict(d, 'visualization', Visualization), + visualization=_from_dict(d, 'visualization', LegacyVisualization), width=d.get('width', None)) @@ -3919,55 +5492,161 @@ def from_dict(cls, d: Dict[str, any]) -> WidgetOptions: updated_at=d.get('updated_at', None)) -@dataclass -class WidgetPosition: - """Coordinates of this widget on a dashboard. This portion of the API changes frequently and is - unsupported.""" +@dataclass +class WidgetPosition: + """Coordinates of this widget on a dashboard. This portion of the API changes frequently and is + unsupported.""" + + auto_height: Optional[bool] = None + """reserved for internal use""" + + col: Optional[int] = None + """column in the dashboard grid. Values start with 0""" + + row: Optional[int] = None + """row in the dashboard grid. Values start with 0""" + + size_x: Optional[int] = None + """width of the widget measured in dashboard grid cells""" + + size_y: Optional[int] = None + """height of the widget measured in dashboard grid cells""" + + def as_dict(self) -> dict: + """Serializes the WidgetPosition into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.auto_height is not None: body['autoHeight'] = self.auto_height + if self.col is not None: body['col'] = self.col + if self.row is not None: body['row'] = self.row + if self.size_x is not None: body['sizeX'] = self.size_x + if self.size_y is not None: body['sizeY'] = self.size_y + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> WidgetPosition: + """Deserializes the WidgetPosition from a dictionary.""" + return cls(auto_height=d.get('autoHeight', None), + col=d.get('col', None), + row=d.get('row', None), + size_x=d.get('sizeX', None), + size_y=d.get('sizeY', None)) + + +class AlertsAPI: + """The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that + periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or + notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of + the Jobs API, e.g. :method:jobs/create.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, *, alert: Optional[CreateAlertRequestAlert] = None) -> Alert: + """Create an alert. + + Creates an alert. + + :param alert: :class:`CreateAlertRequestAlert` (optional) + + :returns: :class:`Alert` + """ + body = {} + if alert is not None: body['alert'] = alert.as_dict() + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', '/api/2.0/sql/alerts', body=body, headers=headers) + return Alert.from_dict(res) + + def delete(self, id: str): + """Delete an alert. + + Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and + can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently + deleted after 30 days. + + :param id: str + + + """ + + headers = {'Accept': 'application/json', } + + self._api.do('DELETE', f'/api/2.0/sql/alerts/{id}', headers=headers) - auto_height: Optional[bool] = None - """reserved for internal use""" + def get(self, id: str) -> Alert: + """Get an alert. + + Gets an alert. + + :param id: str + + :returns: :class:`Alert` + """ - col: Optional[int] = None - """column in the dashboard grid. Values start with 0""" + headers = {'Accept': 'application/json', } - row: Optional[int] = None - """row in the dashboard grid. Values start with 0""" + res = self._api.do('GET', f'/api/2.0/sql/alerts/{id}', headers=headers) + return Alert.from_dict(res) - size_x: Optional[int] = None - """width of the widget measured in dashboard grid cells""" + def list(self, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[ListAlertsResponseAlert]: + """List alerts. + + Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API + concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`ListAlertsResponseAlert` + """ - size_y: Optional[int] = None - """height of the widget measured in dashboard grid cells""" + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json', } - def as_dict(self) -> dict: - """Serializes the WidgetPosition into a dictionary suitable for use as a JSON request body.""" + while True: + json = self._api.do('GET', '/api/2.0/sql/alerts', query=query, headers=headers) + if 'results' in json: + for v in json['results']: + yield ListAlertsResponseAlert.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def update(self, id: str, update_mask: str, *, alert: Optional[UpdateAlertRequestAlert] = None) -> Alert: + """Update an alert. + + Updates an alert. + + :param id: str + :param update_mask: str + Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the + setting payload will be updated. The field mask needs to be supplied as single string. To specify + multiple fields in the field mask, use comma as the separator (no space). + :param alert: :class:`UpdateAlertRequestAlert` (optional) + + :returns: :class:`Alert` + """ body = {} - if self.auto_height is not None: body['autoHeight'] = self.auto_height - if self.col is not None: body['col'] = self.col - if self.row is not None: body['row'] = self.row - if self.size_x is not None: body['sizeX'] = self.size_x - if self.size_y is not None: body['sizeY'] = self.size_y - return body + if alert is not None: body['alert'] = alert.as_dict() + if update_mask is not None: body['update_mask'] = update_mask + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - @classmethod - def from_dict(cls, d: Dict[str, any]) -> WidgetPosition: - """Deserializes the WidgetPosition from a dictionary.""" - return cls(auto_height=d.get('autoHeight', None), - col=d.get('col', None), - row=d.get('row', None), - size_x=d.get('sizeX', None), - size_y=d.get('sizeY', None)) + res = self._api.do('PATCH', f'/api/2.0/sql/alerts/{id}', body=body, headers=headers) + return Alert.from_dict(res) -class AlertsAPI: +class AlertsLegacyAPI: """The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources""" + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" def __init__(self, api_client): self._api = api_client @@ -3978,15 +5657,14 @@ def create(self, query_id: str, *, parent: Optional[str] = None, - rearm: Optional[int] = None) -> Alert: + rearm: Optional[int] = None) -> LegacyAlert: """Create an alert. Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a condition of its result, and notifies users or notification destinations if the condition was met. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create + instead. :param name: str Name of the alert. @@ -4000,7 +5678,7 @@ def create(self, Number of seconds after being triggered before the alert rearms itself and can be triggered again. If `null`, alert will never be triggered again. - :returns: :class:`Alert` + :returns: :class:`LegacyAlert` """ body = {} if name is not None: body['name'] = name @@ -4011,7 +5689,7 @@ def create(self, headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', '/api/2.0/preview/sql/alerts', body=body, headers=headers) - return Alert.from_dict(res) + return LegacyAlert.from_dict(res) def delete(self, alert_id: str): """Delete an alert. @@ -4019,9 +5697,8 @@ def delete(self, alert_id: str): Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike queries and dashboards, alerts cannot be moved to the trash. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete + instead. :param alert_id: str @@ -4032,41 +5709,39 @@ def delete(self, alert_id: str): self._api.do('DELETE', f'/api/2.0/preview/sql/alerts/{alert_id}', headers=headers) - def get(self, alert_id: str) -> Alert: + def get(self, alert_id: str) -> LegacyAlert: """Get an alert. Gets an alert. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get + instead. :param alert_id: str - :returns: :class:`Alert` + :returns: :class:`LegacyAlert` """ headers = {'Accept': 'application/json', } res = self._api.do('GET', f'/api/2.0/preview/sql/alerts/{alert_id}', headers=headers) - return Alert.from_dict(res) + return LegacyAlert.from_dict(res) - def list(self) -> Iterator[Alert]: + def list(self) -> Iterator[LegacyAlert]: """Get alerts. Gets a list of alerts. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list + instead. - :returns: Iterator over :class:`Alert` + :returns: Iterator over :class:`LegacyAlert` """ headers = {'Accept': 'application/json', } res = self._api.do('GET', '/api/2.0/preview/sql/alerts', headers=headers) - return [Alert.from_dict(v) for v in res] + return [LegacyAlert.from_dict(v) for v in res] def update(self, alert_id: str, @@ -4079,9 +5754,8 @@ def update(self, Updates an alert. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update + instead. :param alert_id: str :param name: str @@ -4381,9 +6055,7 @@ class DataSourcesAPI: advise you to use any text editor, REST client, or `grep` to search the response from this API for the name of your SQL warehouse as it appears in Databricks SQL. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources""" + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" def __init__(self, api_client): self._api = api_client @@ -4395,9 +6067,8 @@ def list(self) -> Iterator[DataSource]: API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new queries against it. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:warehouses/list + instead. :returns: Iterator over :class:`DataSource` """ @@ -4421,9 +6092,7 @@ class DbsqlPermissionsAPI: - `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`) - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources""" + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" def __init__(self, api_client): self._api = api_client @@ -4433,10 +6102,6 @@ def get(self, object_type: ObjectTypePlural, object_id: str) -> GetResponse: Gets a JSON representation of the access control list (ACL) for a specified object. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - :param object_type: :class:`ObjectTypePlural` The type of object permissions to check. :param object_id: str @@ -4462,10 +6127,6 @@ def set(self, Sets the access control list (ACL) for a specified object. This operation will complete rewrite the ACL. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - :param object_type: :class:`ObjectTypePlural` The type of object permission to set. :param object_id: str @@ -4494,9 +6155,8 @@ def transfer_ownership(self, Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use + :method:queries/update and :method:alerts/update respectively instead. :param object_type: :class:`OwnableObjectType` The type of object on which to change ownership. @@ -4519,13 +6179,151 @@ def transfer_ownership(self, class QueriesAPI: + """The queries API can be used to perform CRUD operations on queries. A query is a Databricks SQL object that + includes the target SQL warehouse, query text, name, description, tags, and parameters. Queries can be + scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, *, query: Optional[CreateQueryRequestQuery] = None) -> Query: + """Create a query. + + Creates a query. + + :param query: :class:`CreateQueryRequestQuery` (optional) + + :returns: :class:`Query` + """ + body = {} + if query is not None: body['query'] = query.as_dict() + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', '/api/2.0/sql/queries', body=body, headers=headers) + return Query.from_dict(res) + + def delete(self, id: str): + """Delete a query. + + Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and + cannot be used for alerts. You can restore a trashed query through the UI. A trashed query is + permanently deleted after 30 days. + + :param id: str + + + """ + + headers = {'Accept': 'application/json', } + + self._api.do('DELETE', f'/api/2.0/sql/queries/{id}', headers=headers) + + def get(self, id: str) -> Query: + """Get a query. + + Gets a query. + + :param id: str + + :returns: :class:`Query` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', f'/api/2.0/sql/queries/{id}', headers=headers) + return Query.from_dict(res) + + def list(self, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[ListQueryObjectsResponseQuery]: + """List queries. + + Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API + concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`ListQueryObjectsResponseQuery` + """ + + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', '/api/2.0/sql/queries', query=query, headers=headers) + if 'results' in json: + for v in json['results']: + yield ListQueryObjectsResponseQuery.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def list_visualizations(self, + id: str, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[Visualization]: + """List visualizations on a query. + + Gets a list of visualizations on a query. + + :param id: str + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`Visualization` + """ + + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', + f'/api/2.0/sql/queries/{id}/visualizations', + query=query, + headers=headers) + if 'results' in json: + for v in json['results']: + yield Visualization.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def update(self, id: str, update_mask: str, *, query: Optional[UpdateQueryRequestQuery] = None) -> Query: + """Update a query. + + Updates a query. + + :param id: str + :param update_mask: str + Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the + setting payload will be updated. The field mask needs to be supplied as single string. To specify + multiple fields in the field mask, use comma as the separator (no space). + :param query: :class:`UpdateQueryRequestQuery` (optional) + + :returns: :class:`Query` + """ + body = {} + if query is not None: body['query'] = query.as_dict() + if update_mask is not None: body['update_mask'] = update_mask + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PATCH', f'/api/2.0/sql/queries/{id}', body=body, headers=headers) + return Query.from_dict(res) + + +class QueriesLegacyAPI: """These endpoints are used for CRUD operations on query definitions. Query definitions include the target SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources""" + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" def __init__(self, api_client): self._api = api_client @@ -4539,7 +6337,7 @@ def create(self, parent: Optional[str] = None, query: Optional[str] = None, run_as_role: Optional[RunAsRole] = None, - tags: Optional[List[str]] = None) -> Query: + tags: Optional[List[str]] = None) -> LegacyQuery: """Create a new query definition. Creates a new query definition. Queries created with this endpoint belong to the authenticated user @@ -4551,9 +6349,8 @@ def create(self, **Note**: You cannot add a visualization until you create the query. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/create + instead. :param data_source_id: str (optional) Data source ID maps to the ID of the data source used by the resource and is distinct from the @@ -4577,7 +6374,7 @@ def create(self, viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - :returns: :class:`Query` + :returns: :class:`LegacyQuery` """ body = {} if data_source_id is not None: body['data_source_id'] = data_source_id @@ -4591,7 +6388,7 @@ def create(self, headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', '/api/2.0/preview/sql/queries', body=body, headers=headers) - return Query.from_dict(res) + return LegacyQuery.from_dict(res) def delete(self, query_id: str): """Delete a query. @@ -4599,9 +6396,8 @@ def delete(self, query_id: str): Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and they cannot be used for alerts. The trash is deleted after 30 days. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete + instead. :param query_id: str @@ -4612,32 +6408,31 @@ def delete(self, query_id: str): self._api.do('DELETE', f'/api/2.0/preview/sql/queries/{query_id}', headers=headers) - def get(self, query_id: str) -> Query: + def get(self, query_id: str) -> LegacyQuery: """Get a query definition. Retrieve a query object definition along with contextual permissions information about the currently authenticated user. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get + instead. :param query_id: str - :returns: :class:`Query` + :returns: :class:`LegacyQuery` """ headers = {'Accept': 'application/json', } res = self._api.do('GET', f'/api/2.0/preview/sql/queries/{query_id}', headers=headers) - return Query.from_dict(res) + return LegacyQuery.from_dict(res) def list(self, *, order: Optional[str] = None, page: Optional[int] = None, page_size: Optional[int] = None, - q: Optional[str] = None) -> Iterator[Query]: + q: Optional[str] = None) -> Iterator[LegacyQuery]: """Get a list of queries. Gets a list of queries. Optionally, this list can be filtered by a search term. @@ -4645,9 +6440,8 @@ def list(self, **Warning**: Calling this API concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/list + instead. :param order: str (optional) Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order @@ -4670,7 +6464,7 @@ def list(self, :param q: str (optional) Full text search term - :returns: Iterator over :class:`Query` + :returns: Iterator over :class:`LegacyQuery` """ query = {} @@ -4691,7 +6485,7 @@ def list(self, if i in seen: continue seen.add(i) - yield Query.from_dict(v) + yield LegacyQuery.from_dict(v) if 'results' not in json or not json['results']: return query['page'] += 1 @@ -4702,9 +6496,7 @@ def restore(self, query_id: str): Restore a query that has been moved to the trash. A restored query appears in list views and searches. You can use restored queries for alerts. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. :param query_id: str @@ -4724,16 +6516,15 @@ def update(self, options: Optional[Any] = None, query: Optional[str] = None, run_as_role: Optional[RunAsRole] = None, - tags: Optional[List[str]] = None) -> Query: + tags: Optional[List[str]] = None) -> LegacyQuery: """Change a query definition. Modify this query definition. **Note**: You cannot undo this operation. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/update + instead. :param query_id: str :param data_source_id: str (optional) @@ -4756,7 +6547,7 @@ def update(self, viewer" behavior) or `"owner"` (signifying "run as owner" behavior) :param tags: List[str] (optional) - :returns: :class:`Query` + :returns: :class:`LegacyQuery` """ body = {} if data_source_id is not None: body['data_source_id'] = data_source_id @@ -4769,11 +6560,12 @@ def update(self, headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', f'/api/2.0/preview/sql/queries/{query_id}', body=body, headers=headers) - return Query.from_dict(res) + return LegacyQuery.from_dict(res) class QueryHistoryAPI: - """Access the history of queries through SQL warehouses.""" + """A service responsible for storing and retrieving the list of queries run against SQL endpoints, serverless + compute, and DLT.""" def __init__(self, api_client): self._api = api_client @@ -4781,49 +6573,109 @@ def __init__(self, api_client): def list(self, *, filter_by: Optional[QueryFilter] = None, - include_metrics: Optional[bool] = None, max_results: Optional[int] = None, - page_token: Optional[str] = None) -> Iterator[QueryInfo]: + page_token: Optional[str] = None) -> ListQueriesResponse: """List Queries. - List the history of queries through SQL warehouses. + List the history of queries through SQL warehouses, serverless compute, and DLT. - You can filter by user ID, warehouse ID, status, and time range. + You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are + returned first (up to max_results in request). The pagination token returned in response can be used + to list subsequent query statuses. :param filter_by: :class:`QueryFilter` (optional) A filter to limit query history results. This field is optional. - :param include_metrics: bool (optional) - Whether to include metrics about query. :param max_results: int (optional) - Limit the number of results returned in one page. The default is 100. + Limit the number of results returned in one page. Must be less than 1000 and the default is 100. :param page_token: str (optional) A token that can be used to get the next page of results. The token can contains characters that need to be encoded before using it in a URL. For example, the character '+' needs to be replaced by - %2B. + %2B. This field is optional. - :returns: Iterator over :class:`QueryInfo` + :returns: :class:`ListQueriesResponse` """ query = {} if filter_by is not None: query['filter_by'] = filter_by.as_dict() - if include_metrics is not None: query['include_metrics'] = include_metrics if max_results is not None: query['max_results'] = max_results if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - while True: - json = self._api.do('GET', '/api/2.0/sql/history/queries', query=query, headers=headers) - if 'res' in json: - for v in json['res']: - yield QueryInfo.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] + res = self._api.do('GET', '/api/2.0/sql/history/queries', query=query, headers=headers) + return ListQueriesResponse.from_dict(res) class QueryVisualizationsAPI: + """This is an evolving API that facilitates the addition and removal of visualizations from existing queries + in the Databricks Workspace. Data structures can change over time.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, + *, + visualization: Optional[CreateVisualizationRequestVisualization] = None) -> Visualization: + """Add a visualization to a query. + + Adds a visualization to a query. + + :param visualization: :class:`CreateVisualizationRequestVisualization` (optional) + + :returns: :class:`Visualization` + """ + body = {} + if visualization is not None: body['visualization'] = visualization.as_dict() + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', '/api/2.0/sql/visualizations', body=body, headers=headers) + return Visualization.from_dict(res) + + def delete(self, id: str): + """Remove a visualization. + + Removes a visualization. + + :param id: str + + + """ + + headers = {'Accept': 'application/json', } + + self._api.do('DELETE', f'/api/2.0/sql/visualizations/{id}', headers=headers) + + def update(self, + id: str, + update_mask: str, + *, + visualization: Optional[UpdateVisualizationRequestVisualization] = None) -> Visualization: + """Update a visualization. + + Updates a visualization. + + :param id: str + :param update_mask: str + Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the + setting payload will be updated. The field mask needs to be supplied as single string. To specify + multiple fields in the field mask, use comma as the separator (no space). + :param visualization: :class:`UpdateVisualizationRequestVisualization` (optional) + + :returns: :class:`Visualization` + """ + body = {} + if update_mask is not None: body['update_mask'] = update_mask + if visualization is not None: body['visualization'] = visualization.as_dict() + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PATCH', f'/api/2.0/sql/visualizations/{id}', body=body, headers=headers) + return Visualization.from_dict(res) + + +class QueryVisualizationsLegacyAPI: """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries - within the Databricks Workspace. Data structures may change over time.""" + within the Databricks Workspace. Data structures may change over time. + + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" def __init__(self, api_client): self._api = api_client @@ -4834,9 +6686,14 @@ def create(self, options: Any, *, description: Optional[str] = None, - name: Optional[str] = None) -> Visualization: + name: Optional[str] = None) -> LegacyVisualization: """Add visualization to a query. + Creates visualization in the query. + + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:queryvisualizations/create instead. + :param query_id: str The identifier returned by :method:queries/create :param type: str @@ -4849,7 +6706,7 @@ def create(self, :param name: str (optional) The name of the visualization that appears on dashboards and the query screen. - :returns: :class:`Visualization` + :returns: :class:`LegacyVisualization` """ body = {} if description is not None: body['description'] = description @@ -4860,11 +6717,16 @@ def create(self, headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', '/api/2.0/preview/sql/visualizations', body=body, headers=headers) - return Visualization.from_dict(res) + return LegacyVisualization.from_dict(res) def delete(self, id: str): """Remove visualization. + Removes a visualization from the query. + + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:queryvisualizations/delete instead. + :param id: str Widget ID returned by :method:queryvizualisations/create @@ -4882,11 +6744,16 @@ def update(self, description: Optional[str] = None, name: Optional[str] = None, options: Optional[Any] = None, - query: Optional[Query] = None, + query: Optional[LegacyQuery] = None, type: Optional[str] = None, - updated_at: Optional[str] = None) -> Visualization: + updated_at: Optional[str] = None) -> LegacyVisualization: """Edit existing visualization. + Updates visualization in the query. + + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:queryvisualizations/update instead. + :param id: str The UUID for this visualization. :param created_at: str (optional) @@ -4897,12 +6764,12 @@ def update(self, :param options: Any (optional) The options object varies widely from one visualization type to the next and is unsupported. Databricks does not recommend modifying visualization settings in JSON. - :param query: :class:`Query` (optional) + :param query: :class:`LegacyQuery` (optional) :param type: str (optional) The type of visualization: chart, table, pivot table, and so on. :param updated_at: str (optional) - :returns: :class:`Visualization` + :returns: :class:`LegacyVisualization` """ body = {} if created_at is not None: body['created_at'] = created_at @@ -4915,7 +6782,7 @@ def update(self, headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', f'/api/2.0/preview/sql/visualizations/{id}', body=body, headers=headers) - return Visualization.from_dict(res) + return LegacyVisualization.from_dict(res) class StatementExecutionAPI: @@ -5034,7 +6901,7 @@ def execute_statement(self, parameters: Optional[List[StatementParameterListItem]] = None, row_limit: Optional[int] = None, schema: Optional[str] = None, - wait_timeout: Optional[str] = None) -> ExecuteStatementResponse: + wait_timeout: Optional[str] = None) -> StatementResponse: """Execute a SQL statement. :param statement: str @@ -5161,7 +7028,7 @@ def execute_statement(self, the statement takes longer to execute, `on_wait_timeout` determines what should happen after the timeout is reached. - :returns: :class:`ExecuteStatementResponse` + :returns: :class:`StatementResponse` """ body = {} if byte_limit is not None: body['byte_limit'] = byte_limit @@ -5178,9 +7045,9 @@ def execute_statement(self, headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } res = self._api.do('POST', '/api/2.0/sql/statements/', body=body, headers=headers) - return ExecuteStatementResponse.from_dict(res) + return StatementResponse.from_dict(res) - def get_statement(self, statement_id: str) -> GetStatementResponse: + def get_statement(self, statement_id: str) -> StatementResponse: """Get status, manifest, and result first chunk. This request can be used to poll for the statement's status. When the `status.state` field is @@ -5195,13 +7062,13 @@ def get_statement(self, statement_id: str) -> GetStatementResponse: The statement ID is returned upon successfully submitting a SQL statement, and is a required reference for all subsequent calls. - :returns: :class:`GetStatementResponse` + :returns: :class:`StatementResponse` """ headers = {'Accept': 'application/json', } res = self._api.do('GET', f'/api/2.0/sql/statements/{statement_id}', headers=headers) - return GetStatementResponse.from_dict(res) + return StatementResponse.from_dict(res) def get_statement_result_chunk_n(self, statement_id: str, chunk_index: int) -> ResultData: """Get result chunk by index. diff --git a/tests/integration/test_sql.py b/tests/integration/test_sql.py index 3e178dc3a..af368609b 100644 --- a/tests/integration/test_sql.py +++ b/tests/integration/test_sql.py @@ -11,5 +11,5 @@ def date_to_ms(date): filter = QueryFilter(query_start_time_range=TimeRange(start_time_ms=date_to_ms('2023-01-01'), end_time_ms=date_to_ms('2023-01-02'))) queries = w.query_history.list(filter_by=filter) - for q in queries: + for q in queries.res: print(q) diff --git a/tests/test_core.py b/tests/test_core.py index 057147159..cc7926a72 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -345,13 +345,13 @@ def test_shares(config, requests_mock): def test_deletes(config, requests_mock): - requests_mock.delete("http://localhost/api/2.0/preview/sql/alerts/alertid", + requests_mock.delete("http://localhost/api/2.0/sql/alerts/alertId", request_headers={"User-Agent": config.user_agent}, text="null", ) w = WorkspaceClient(config=config) - res = w.alerts.delete(alert_id="alertId") + res = w.alerts.delete(id="alertId") assert requests_mock.call_count == 1 assert requests_mock.called From a5d87065616352a85b93823f3309d3368b77a682 Mon Sep 17 00:00:00 2001 From: Vitya Samoilov <108666580+vsamoilov@users.noreply.github.com> Date: Mon, 5 Aug 2024 13:41:41 +0200 Subject: [PATCH 018/136] [Internal] Test that Jobs API endpoints are pinned to 2.1 (#714) ## Changes Added tests to make sure regeneration is not going to break API version pinning: https://github.com/databricks/databricks-sdk-go/pull/993 ## Tests - [x] `make test` run locally - [x] `make fmt` applied - [ ] relevant integration tests applied --- tests/test_jobs.py | 100 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 100 insertions(+) create mode 100644 tests/test_jobs.py diff --git a/tests/test_jobs.py b/tests/test_jobs.py new file mode 100644 index 000000000..50143f193 --- /dev/null +++ b/tests/test_jobs.py @@ -0,0 +1,100 @@ +from databricks.sdk import WorkspaceClient + + +# Test cases below are checking that we pinned API 2.1 for certain endpoints, DO NOT REMOVE OR CHANGE THEM. https://databricks.atlassian.net/browse/JOBS-19298 +def test_jobs_create(config, requests_mock): + requests_mock.post("http://localhost/api/2.1/jobs/create", + request_headers={ + 'Accept': 'application/json', + 'Content-Type': 'application/json', + }, + text="null", + ) + + w = WorkspaceClient(config=config) + w.jobs.create() + + assert requests_mock.call_count == 1 + assert requests_mock.called + + +def test_jobs_update(config, requests_mock): + requests_mock.post("http://localhost/api/2.1/jobs/update", + request_headers={ + 'Accept': 'application/json', + 'Content-Type': 'application/json', + }, + text="null", + ) + + w = WorkspaceClient(config=config) + w.jobs.update(job_id="job_id") + + assert requests_mock.call_count == 1 + assert requests_mock.called + + +def test_jobs_list(config, requests_mock): + requests_mock.get("http://localhost/api/2.1/jobs/list", + request_headers={ + 'Accept': 'application/json', + }, + text="null", + ) + + w = WorkspaceClient(config=config) + for _ in w.jobs.list(): + pass + + assert requests_mock.call_count == 1 + assert requests_mock.called + + +def test_jobs_get(config, requests_mock): + requests_mock.get("http://localhost/api/2.1/jobs/get", + request_headers={ + 'Accept': 'application/json', + }, + text="null", + ) + + w = WorkspaceClient(config=config) + w.jobs.get(job_id="job_id") + + assert requests_mock.call_count == 1 + assert requests_mock.called + + +def test_jobs_reset(config, requests_mock): + requests_mock.post("http://localhost/api/2.1/jobs/reset", + request_headers={ + 'Accept': 'application/json', + 'Content-Type': 'application/json', + }, + text="null", + ) + + w = WorkspaceClient(config=config) + w.jobs.reset(job_id="job_id", new_settings=None) + + assert requests_mock.call_count == 1 + assert requests_mock.called + + +def test_jobs_runs_list(config, requests_mock): + requests_mock.get("http://localhost/api/2.1/jobs/runs/list", + request_headers={ + 'Accept': 'application/json', + }, + text="null", + ) + + w = WorkspaceClient(config=config) + for _ in w.jobs.list_runs(job_id="job_id"): + pass + + assert requests_mock.call_count == 1 + assert requests_mock.called + + +# End of test cases for API 2.1 pinning From dfa4d6057d0125ff4e798c662a5b81caffc11874 Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Tue, 6 Aug 2024 07:16:45 -0400 Subject: [PATCH 019/136] [Fix] Fix test_get_workspace_client and test_runtime_auth_from_jobs (#719) ## Changes This PR fixes the current failing integration tests for the Python SDK, unblocking their release. There are two issues: 1. get_workspace_client fails in our integration tests because we call it with a workspace that is not UC-enabled. Because tests are authenticated as service principals, and it isn't possible to add account-level service principals to non-UC workspaces, this call fails. I address this by running this test against a UC-enabled workspace. 2. test_runtime_auth_from_jobs fails because a new LTS DBR version was released (15.4) that doesn't support DBFS library installations. To address this, I have created two tests: test_runtime_auth_from_jobs_dbfs, which tests native auth using the SDK installed from DBFS up to LTS 14.3, and test_runtime_auth_from_jobs_volumes, which does the same with the SDK installed from a volume. ## Tests All integration tests passed (retriggered the GCP integration test locally after adding single user data security mode). - [ ] `make test` run locally - [ ] `make fmt` applied - [ ] relevant integration tests applied --- tests/integration/conftest.py | 12 ++++++++ tests/integration/test_auth.py | 47 +++++++++++++++++++++++++------- tests/integration/test_client.py | 7 +++-- 3 files changed, 53 insertions(+), 13 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 221cc7d49..f2f6f5314 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -59,6 +59,18 @@ def a(env_or_skip) -> AccountClient: return account_client +@pytest.fixture(scope='session') +def ucacct(env_or_skip) -> AccountClient: + _load_debug_env_if_runs_from_ide('ucacct') + env_or_skip("CLOUD_ENV") + account_client = AccountClient() + if not account_client.config.is_account_client: + pytest.skip("not Databricks Account client") + if 'TEST_METASTORE_ID' not in os.environ: + pytest.skip("not in Unity Catalog Workspace test env") + return account_client + + @pytest.fixture(scope='session') def w(env_or_skip) -> WorkspaceClient: _load_debug_env_if_runs_from_ide('workspace') diff --git a/tests/integration/test_auth.py b/tests/integration/test_auth.py index 4eca0d1d3..0bf7f951d 100644 --- a/tests/integration/test_auth.py +++ b/tests/integration/test_auth.py @@ -5,6 +5,7 @@ import shutil import subprocess import sys +import typing import urllib.parse from functools import partial from pathlib import Path @@ -12,7 +13,7 @@ import pytest from databricks.sdk.service.compute import (ClusterSpec, DataSecurityMode, - Library, ResultType) + Library, ResultType, SparkVersion) from databricks.sdk.service.jobs import NotebookTask, Task, ViewType from databricks.sdk.service.workspace import ImportFormat @@ -84,19 +85,41 @@ def test_runtime_auth_from_interactive_on_uc(ucws, fresh_wheel_file, env_or_skip ucws.clusters.permanent_delete(interactive_cluster.cluster_id) -def test_runtime_auth_from_jobs(w, fresh_wheel_file, env_or_skip, random): - instance_pool_id = env_or_skip('TEST_INSTANCE_POOL_ID') - +def _get_lts_versions(w) -> typing.List[SparkVersion]: v = w.clusters.spark_versions() lts_runtimes = [ x for x in v.versions if 'LTS' in x.name and '-ml' not in x.key and '-photon' not in x.key and '-aarch64' not in x.key ] + return lts_runtimes + + +def test_runtime_auth_from_jobs_volumes(ucws, fresh_wheel_file, env_or_skip, random, volume): + dbr_versions = [v for v in _get_lts_versions(ucws) if int(v.key.split('.')[0]) >= 15] + + volume_wheel = f'{volume}/tmp/wheels/{random(10)}/{fresh_wheel_file.name}' + with fresh_wheel_file.open('rb') as f: + ucws.files.upload(volume_wheel, f) + + lib = Library(whl=volume_wheel) + return _test_runtime_auth_from_jobs_inner(ucws, env_or_skip, random, dbr_versions, lib) + + +def test_runtime_auth_from_jobs_dbfs(w, fresh_wheel_file, env_or_skip, random): + # Library installation from DBFS is not supported past DBR 14.3 + dbr_versions = [v for v in _get_lts_versions(w) if int(v.key.split('.')[0]) < 15] dbfs_wheel = f'/tmp/wheels/{random(10)}/{fresh_wheel_file.name}' with fresh_wheel_file.open('rb') as f: w.dbfs.upload(dbfs_wheel, f) + lib = Library(whl=f'dbfs:{dbfs_wheel}') + return _test_runtime_auth_from_jobs_inner(w, env_or_skip, random, dbr_versions, lib) + + +def _test_runtime_auth_from_jobs_inner(w, env_or_skip, random, dbr_versions, library): + instance_pool_id = env_or_skip('TEST_INSTANCE_POOL_ID') + my_name = w.current_user.me().user_name notebook_path = f'/Users/{my_name}/notebook-native-auth' notebook_content = io.BytesIO(b''' @@ -109,16 +132,20 @@ def test_runtime_auth_from_jobs(w, fresh_wheel_file, env_or_skip, random): w.workspace.upload(notebook_path, notebook_content, language=Language.PYTHON, overwrite=True) tasks = [] - for v in lts_runtimes: + for v in dbr_versions: t = Task(task_key=f'test_{v.key.replace(".", "_")}', notebook_task=NotebookTask(notebook_path=notebook_path), - new_cluster=ClusterSpec(spark_version=v.key, - num_workers=1, - instance_pool_id=instance_pool_id), - libraries=[Library(whl=f'dbfs:{dbfs_wheel}')]) + new_cluster=ClusterSpec( + spark_version=v.key, + num_workers=1, + instance_pool_id=instance_pool_id, + # GCP uses "custom" data security mode by default, which does not support UC. + data_security_mode=DataSecurityMode.SINGLE_USER), + libraries=[library]) tasks.append(t) - run = w.jobs.submit(run_name=f'Runtime Native Auth {random(10)}', tasks=tasks).result() + waiter = w.jobs.submit(run_name=f'Runtime Native Auth {random(10)}', tasks=tasks) + run = waiter.result() for task_key, output in _task_outputs(w, run).items(): assert my_name in output, f'{task_key} does not work with notebook native auth' diff --git a/tests/integration/test_client.py b/tests/integration/test_client.py index 2c4c15ba8..fd46abb47 100644 --- a/tests/integration/test_client.py +++ b/tests/integration/test_client.py @@ -1,10 +1,11 @@ import pytest -def test_get_workspace_client(a, env_or_skip): +def test_get_workspace_client(ucacct, env_or_skip): + # Need to switch to ucacct workspace_id = env_or_skip("TEST_WORKSPACE_ID") - ws = a.workspaces.get(workspace_id) - w = a.get_workspace_client(ws) + ws = ucacct.workspaces.get(workspace_id) + w = ucacct.get_workspace_client(ws) assert w.current_user.me().active From 7d22b4d3727478f0f5dbeb34b7f6fc17a03e31b7 Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Tue, 6 Aug 2024 09:47:46 -0400 Subject: [PATCH 020/136] [Fix] Decrease runtime of recursive workspace listing test (#721) ## Changes The current integration test for recursive workspace listing is very slow because it lists all resources in a very large directory (the integration test user's home folder). To decrease the time this test takes, we can simply create a directory with a file and a subdirectory with another file. This means the test requires only two API calls to complete. ## Tests - [ ] `make test` run locally - [ ] `make fmt` applied - [ ] relevant integration tests applied --- tests/integration/conftest.py | 8 ++++++++ tests/integration/test_workspace.py | 16 +++++++++++++--- 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index f2f6f5314..e9c5430dd 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -116,6 +116,14 @@ def volume(ucws, schema): ucws.volumes.delete(volume.full_name) +@pytest.fixture() +def workspace_dir(w, random): + directory = f'/Users/{w.current_user.me().user_name}/dir-{random(12)}' + w.workspace.mkdirs(directory) + yield directory + w.workspace.delete(directory, recursive=True) + + def _load_debug_env_if_runs_from_ide(key) -> bool: if not _is_in_debug(): return False diff --git a/tests/integration/test_workspace.py b/tests/integration/test_workspace.py index afe77c427..4adbee773 100644 --- a/tests/integration/test_workspace.py +++ b/tests/integration/test_workspace.py @@ -3,11 +3,21 @@ from databricks.sdk.service.workspace import ImportFormat, Language -def test_workspace_recursive_list(w, random): +def test_workspace_recursive_list(w, workspace_dir, random): + # create a file in the directory + file = f'{workspace_dir}/file-{random(12)}.py' + w.workspace.upload(file, io.BytesIO(b'print(1)')) + # create a subdirectory + subdirectory = f'{workspace_dir}/subdir-{random(12)}' + w.workspace.mkdirs(subdirectory) + # create a file in the subdirectory + subfile = f'{subdirectory}/subfile-{random(12)}.py' + w.workspace.upload(subfile, io.BytesIO(b'print(2)')) + # list the directory recursively names = [] - for i in w.workspace.list(f'/Users/{w.current_user.me().user_name}', recursive=True): + for i in w.workspace.list(workspace_dir, recursive=True): names.append(i.path) - assert len(names) > 0 + assert len(names) == 2 def test_workspace_upload_download_notebooks(w, random): From 4105f1f57e317188511c7addbfc0dfaaaa1db96f Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Thu, 8 Aug 2024 10:35:00 -0400 Subject: [PATCH 021/136] [Internal] Add apps package in docgen (#722) ## Changes To enable the release of the Apps package, we need to manually add it to our doc generation. Going forward, this should be added to the internal API specification. ## Tests - [x] Codegen tool runs successfully on commit 88571b688969bc4509fb520d86d161eb20c3d662 of the API specification from this PR. --- docs/gen-client-docs.py | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/gen-client-docs.py b/docs/gen-client-docs.py index e6070b0f9..4b52d817d 100644 --- a/docs/gen-client-docs.py +++ b/docs/gen-client-docs.py @@ -247,6 +247,7 @@ class Generator: Package("vectorsearch", "Vector Search", "Create and query Vector Search indexes"), Package("dashboards", "Dashboards", "Manage Lakeview dashboards"), Package("marketplace", "Marketplace", "Manage AI and analytics assets such as ML models, notebooks, applications in an open marketplace"), + Package("apps", "Apps", "Build custom applications on Databricks"), ] def __init__(self): From c2cc3eadf80b3bc156ef58e74e7dd129766f734b Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Tue, 13 Aug 2024 03:06:46 -0400 Subject: [PATCH 022/136] [Release] Release v0.30.0 (#724) ### New Features and Improvements * Add DataPlane support ([#700](https://github.com/databricks/databricks-sdk-py/pull/700)). * Support partners in SDK ([#648](https://github.com/databricks/databricks-sdk-py/pull/648)). ### Bug Fixes * Check trailing slash in host url ([#681](https://github.com/databricks/databricks-sdk-py/pull/681)). * Decrease runtime of recursive workspace listing test ([#721](https://github.com/databricks/databricks-sdk-py/pull/721)). * Fix test_get_workspace_client and test_runtime_auth_from_jobs ([#719](https://github.com/databricks/databricks-sdk-py/pull/719)). * Infer Azure tenant ID if not set ([#638](https://github.com/databricks/databricks-sdk-py/pull/638)). ### Internal Changes * Add Release tag and Workflow fix ([#704](https://github.com/databricks/databricks-sdk-py/pull/704)). * Add apps package in docgen ([#722](https://github.com/databricks/databricks-sdk-py/pull/722)). * Fix processing of `quoted` titles ([#712](https://github.com/databricks/databricks-sdk-py/pull/712)). * Improve Changelog by grouping changes ([#703](https://github.com/databricks/databricks-sdk-py/pull/703)). * Move PR message validation to a separate workflow ([#707](https://github.com/databricks/databricks-sdk-py/pull/707)). * Test that Jobs API endpoints are pinned to 2.1 ([#714](https://github.com/databricks/databricks-sdk-py/pull/714)). * Trigger the validate workflow in the merge queue ([#709](https://github.com/databricks/databricks-sdk-py/pull/709)). * Update OpenAPI spec ([#715](https://github.com/databricks/databricks-sdk-py/pull/715)). ### Other Changes * Add Windows WorkFlow ([#692](https://github.com/databricks/databricks-sdk-py/pull/692)). * Fix auth tests for windows. ([#697](https://github.com/databricks/databricks-sdk-py/pull/697)). * Fix for cancelled workflow ([#701](https://github.com/databricks/databricks-sdk-py/pull/701)). * Fix test_core for windows ([#702](https://github.com/databricks/databricks-sdk-py/pull/702)). * Fix test_local_io for windows ([#695](https://github.com/databricks/databricks-sdk-py/pull/695)). * Remove duplicate ubuntu tests ([#693](https://github.com/databricks/databricks-sdk-py/pull/693)). * fix windows path ([#660](https://github.com/databricks/databricks-sdk-py/pull/660)) ([#673](https://github.com/databricks/databricks-sdk-py/pull/673)). ### API Changes: * Added `databricks.sdk.service.apps` package. * Added [a.usage_dashboards](https://databricks-sdk-py.readthedocs.io/en/latest/account/usage_dashboards.html) account-level service. * Added [w.alerts_legacy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts_legacy.html) workspace-level service, [w.queries_legacy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries_legacy.html) workspace-level service and [w.query_visualizations_legacy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations_legacy.html) workspace-level service. * Added [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html) workspace-level service. * Added [w.notification_destinations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/notification_destinations.html) workspace-level service. * Added `update()` method for [w.clusters](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/clusters.html) workspace-level service. * Added `list_visualizations()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service. * Added `databricks.sdk.service.catalog.GetBindingsSecurableType` and `databricks.sdk.service.catalog.UpdateBindingsSecurableType` dataclasses. * Added `databricks.sdk.service.billing.ActionConfiguration`, `databricks.sdk.service.billing.ActionConfigurationType`, `databricks.sdk.service.billing.AlertConfiguration`, `databricks.sdk.service.billing.AlertConfigurationQuantityType`, `databricks.sdk.service.billing.AlertConfigurationTimePeriod`, `databricks.sdk.service.billing.AlertConfigurationTriggerType`, `databricks.sdk.service.billing.BudgetConfiguration`, `databricks.sdk.service.billing.BudgetConfigurationFilter`, `databricks.sdk.service.billing.BudgetConfigurationFilterClause`, `databricks.sdk.service.billing.BudgetConfigurationFilterOperator`, `databricks.sdk.service.billing.BudgetConfigurationFilterTagClause`, `databricks.sdk.service.billing.BudgetConfigurationFilterWorkspaceIdClause`, `databricks.sdk.service.billing.CreateBillingUsageDashboardRequest`, `databricks.sdk.service.billing.CreateBillingUsageDashboardResponse`, `databricks.sdk.service.billing.CreateBudgetConfigurationBudget`, `databricks.sdk.service.billing.CreateBudgetConfigurationBudgetActionConfigurations`, `databricks.sdk.service.billing.CreateBudgetConfigurationBudgetAlertConfigurations`, `databricks.sdk.service.billing.CreateBudgetConfigurationRequest`, `databricks.sdk.service.billing.CreateBudgetConfigurationResponse`, `databricks.sdk.service.billing.DeleteBudgetConfigurationRequest`, `any`, `databricks.sdk.service.billing.GetBillingUsageDashboardRequest`, `databricks.sdk.service.billing.GetBillingUsageDashboardResponse`, `databricks.sdk.service.billing.GetBudgetConfigurationRequest`, `databricks.sdk.service.billing.GetBudgetConfigurationResponse`, `databricks.sdk.service.billing.ListBudgetConfigurationsRequest`, `databricks.sdk.service.billing.ListBudgetConfigurationsResponse`, `databricks.sdk.service.billing.UpdateBudgetConfigurationBudget`, `databricks.sdk.service.billing.UpdateBudgetConfigurationRequest`, `databricks.sdk.service.billing.UpdateBudgetConfigurationResponse` and `databricks.sdk.service.billing.UsageDashboardType` dataclasses. * Added `databricks.sdk.service.compute.ListClustersFilterBy`, `databricks.sdk.service.compute.ListClustersSortBy`, `databricks.sdk.service.compute.ListClustersSortByDirection`, `databricks.sdk.service.compute.ListClustersSortByField`, `databricks.sdk.service.compute.UpdateCluster`, `databricks.sdk.service.compute.UpdateClusterResource` and `any` dataclasses. * Added `databricks.sdk.service.dashboards.ExecuteMessageQueryRequest`, `databricks.sdk.service.dashboards.GenieAttachment`, `databricks.sdk.service.dashboards.GenieConversation`, `databricks.sdk.service.dashboards.GenieCreateConversationMessageRequest`, `databricks.sdk.service.dashboards.GenieGetConversationMessageRequest`, `databricks.sdk.service.dashboards.GenieGetMessageQueryResultRequest`, `databricks.sdk.service.dashboards.GenieGetMessageQueryResultResponse`, `databricks.sdk.service.dashboards.GenieMessage`, `databricks.sdk.service.dashboards.GenieStartConversationMessageRequest`, `databricks.sdk.service.dashboards.GenieStartConversationResponse`, `databricks.sdk.service.dashboards.MessageError`, `databricks.sdk.service.dashboards.MessageErrorType`, `databricks.sdk.service.dashboards.MessageStatus`, `databricks.sdk.service.dashboards.QueryAttachment`, `databricks.sdk.service.dashboards.Result` and `databricks.sdk.service.dashboards.TextAttachment` dataclasses. * Added `any`, `databricks.sdk.service.iam.MigratePermissionsRequest` and `databricks.sdk.service.iam.MigratePermissionsResponse` dataclasses. * Added `databricks.sdk.service.oauth2.ListCustomAppIntegrationsRequest` and `databricks.sdk.service.oauth2.ListPublishedAppIntegrationsRequest` dataclasses. * Added `databricks.sdk.service.pipelines.IngestionPipelineDefinition` and `databricks.sdk.service.pipelines.PipelineStateInfoHealth` dataclasses. * Added `databricks.sdk.service.serving.GoogleCloudVertexAiConfig` dataclass. * Added `databricks.sdk.service.settings.Config`, `databricks.sdk.service.settings.CreateNotificationDestinationRequest`, `databricks.sdk.service.settings.DeleteNotificationDestinationRequest`, `databricks.sdk.service.settings.DestinationType`, `databricks.sdk.service.settings.EmailConfig`, `any`, `databricks.sdk.service.settings.GenericWebhookConfig`, `databricks.sdk.service.settings.GetNotificationDestinationRequest`, `databricks.sdk.service.settings.ListNotificationDestinationsRequest`, `databricks.sdk.service.settings.ListNotificationDestinationsResponse`, `databricks.sdk.service.settings.ListNotificationDestinationsResult`, `databricks.sdk.service.settings.MicrosoftTeamsConfig`, `databricks.sdk.service.settings.NotificationDestination`, `databricks.sdk.service.settings.PagerdutyConfig`, `databricks.sdk.service.settings.SlackConfig` and `databricks.sdk.service.settings.UpdateNotificationDestinationRequest` dataclasses. * Added `databricks.sdk.service.sql.AlertCondition`, `databricks.sdk.service.sql.AlertConditionOperand`, `databricks.sdk.service.sql.AlertConditionThreshold`, `databricks.sdk.service.sql.AlertOperandColumn`, `databricks.sdk.service.sql.AlertOperandValue`, `databricks.sdk.service.sql.AlertOperator`, `databricks.sdk.service.sql.ClientCallContext`, `databricks.sdk.service.sql.ContextFilter`, `databricks.sdk.service.sql.CreateAlertRequest`, `databricks.sdk.service.sql.CreateAlertRequestAlert`, `databricks.sdk.service.sql.CreateQueryRequest`, `databricks.sdk.service.sql.CreateQueryRequestQuery`, `databricks.sdk.service.sql.CreateQueryVisualizationsLegacyRequest`, `databricks.sdk.service.sql.CreateVisualizationRequest`, `databricks.sdk.service.sql.CreateVisualizationRequestVisualization`, `databricks.sdk.service.sql.DatePrecision`, `databricks.sdk.service.sql.DateRange`, `databricks.sdk.service.sql.DateRangeValue`, `databricks.sdk.service.sql.DateRangeValueDynamicDateRange`, `databricks.sdk.service.sql.DateValue`, `databricks.sdk.service.sql.DateValueDynamicDate`, `databricks.sdk.service.sql.DeleteAlertsLegacyRequest`, `databricks.sdk.service.sql.DeleteQueriesLegacyRequest`, `databricks.sdk.service.sql.DeleteQueryVisualizationsLegacyRequest`, `databricks.sdk.service.sql.DeleteVisualizationRequest`, `any`, `databricks.sdk.service.sql.EncodedText`, `databricks.sdk.service.sql.EncodedTextEncoding`, `databricks.sdk.service.sql.EnumValue`, `databricks.sdk.service.sql.GetAlertsLegacyRequest`, `databricks.sdk.service.sql.GetQueriesLegacyRequest`, `databricks.sdk.service.sql.LegacyAlert`, `databricks.sdk.service.sql.LegacyAlertState`, `databricks.sdk.service.sql.LegacyQuery`, `databricks.sdk.service.sql.LegacyVisualization`, `databricks.sdk.service.sql.LifecycleState`, `databricks.sdk.service.sql.ListAlertsRequest`, `databricks.sdk.service.sql.ListAlertsResponse`, `databricks.sdk.service.sql.ListAlertsResponseAlert`, `databricks.sdk.service.sql.ListQueriesLegacyRequest`, `databricks.sdk.service.sql.ListQueryObjectsResponse`, `databricks.sdk.service.sql.ListQueryObjectsResponseQuery`, `databricks.sdk.service.sql.ListVisualizationsForQueryRequest`, `databricks.sdk.service.sql.ListVisualizationsForQueryResponse`, `databricks.sdk.service.sql.NumericValue`, `databricks.sdk.service.sql.QueryBackedValue`, `databricks.sdk.service.sql.QueryParameter`, `databricks.sdk.service.sql.QuerySource`, `databricks.sdk.service.sql.QuerySourceDriverInfo`, `databricks.sdk.service.sql.QuerySourceEntryPoint`, `databricks.sdk.service.sql.QuerySourceJobManager`, `databricks.sdk.service.sql.QuerySourceTrigger`, `databricks.sdk.service.sql.RestoreQueriesLegacyRequest`, `databricks.sdk.service.sql.RunAsMode`, `databricks.sdk.service.sql.ServerlessChannelInfo`, `databricks.sdk.service.sql.StatementResponse`, `databricks.sdk.service.sql.TextValue`, `databricks.sdk.service.sql.TrashAlertRequest`, `databricks.sdk.service.sql.TrashQueryRequest`, `databricks.sdk.service.sql.UpdateAlertRequest`, `databricks.sdk.service.sql.UpdateAlertRequestAlert`, `databricks.sdk.service.sql.UpdateQueryRequest`, `databricks.sdk.service.sql.UpdateQueryRequestQuery`, `databricks.sdk.service.sql.UpdateVisualizationRequest` and `databricks.sdk.service.sql.UpdateVisualizationRequestVisualization` dataclasses. * Added `force` field for `databricks.sdk.service.catalog.DeleteSchemaRequest`. * Added `max_results` and `page_token` fields for `databricks.sdk.service.catalog.GetBindingsRequest`. * Added `include_aliases` field for `databricks.sdk.service.catalog.GetByAliasRequest`. * Added `include_aliases` field for `databricks.sdk.service.catalog.GetModelVersionRequest`. * Added `include_aliases` field for `databricks.sdk.service.catalog.GetRegisteredModelRequest`. * Added `max_results` and `page_token` fields for `databricks.sdk.service.catalog.ListSystemSchemasRequest`. * Added `next_page_token` field for `databricks.sdk.service.catalog.ListSystemSchemasResponse`. * Added `aliases` field for `databricks.sdk.service.catalog.ModelVersionInfo`. * Added `next_page_token` field for `databricks.sdk.service.catalog.WorkspaceBindingsResponse`. * Added `version` field for `databricks.sdk.service.compute.GetPolicyFamilyRequest`. * Added `filter_by`, `page_size`, `page_token` and `sort_by` fields for `databricks.sdk.service.compute.ListClustersRequest`. * Added `next_page_token` and `prev_page_token` fields for `databricks.sdk.service.compute.ListClustersResponse`. * Added `page_token` field for `databricks.sdk.service.jobs.GetRunRequest`. * Added `iterations`, `next_page_token` and `prev_page_token` fields for `databricks.sdk.service.jobs.Run`. * Added `create_time`, `created_by`, `creator_username` and `scopes` fields for `databricks.sdk.service.oauth2.GetCustomAppIntegrationOutput`. * Added `next_page_token` field for `databricks.sdk.service.oauth2.GetCustomAppIntegrationsOutput`. * Added `create_time` and `created_by` fields for `databricks.sdk.service.oauth2.GetPublishedAppIntegrationOutput`. * Added `next_page_token` field for `databricks.sdk.service.oauth2.GetPublishedAppIntegrationsOutput`. * Added `enable_local_disk_encryption` field for `databricks.sdk.service.pipelines.PipelineCluster`. * Added `whl` field for `databricks.sdk.service.pipelines.PipelineLibrary`. * Added `health` field for `databricks.sdk.service.pipelines.PipelineStateInfo`. * Added `ai21labs_api_key_plaintext` field for `databricks.sdk.service.serving.Ai21LabsConfig`. * Added `aws_access_key_id_plaintext` and `aws_secret_access_key_plaintext` fields for `databricks.sdk.service.serving.AmazonBedrockConfig`. * Added `anthropic_api_key_plaintext` field for `databricks.sdk.service.serving.AnthropicConfig`. * Added `cohere_api_base` and `cohere_api_key_plaintext` fields for `databricks.sdk.service.serving.CohereConfig`. * Added `databricks_api_token_plaintext` field for `databricks.sdk.service.serving.DatabricksModelServingConfig`. * Added `google_cloud_vertex_ai_config` field for `databricks.sdk.service.serving.ExternalModel`. * Added `microsoft_entra_client_secret_plaintext` and `openai_api_key_plaintext` fields for `databricks.sdk.service.serving.OpenAiConfig`. * Added `palm_api_key_plaintext` field for `databricks.sdk.service.serving.PaLmConfig`. * Added `expiration_time` field for `databricks.sdk.service.sharing.CreateRecipient`. * Added `next_page_token` field for `databricks.sdk.service.sharing.GetRecipientSharePermissionsResponse`. * Added `next_page_token` field for `databricks.sdk.service.sharing.ListProviderSharesResponse`. * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.ListProvidersRequest`. * Added `next_page_token` field for `databricks.sdk.service.sharing.ListProvidersResponse`. * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.ListRecipientsRequest`. * Added `next_page_token` field for `databricks.sdk.service.sharing.ListRecipientsResponse`. * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.ListSharesRequest`. * Added `next_page_token` field for `databricks.sdk.service.sharing.ListSharesResponse`. * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.SharePermissionsRequest`. * Added `expiration_time` field for `databricks.sdk.service.sharing.UpdateRecipient`. * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.UpdateSharePermissions`. * Added `condition`, `create_time`, `custom_body`, `custom_subject`, `display_name`, `lifecycle_state`, `owner_user_name`, `parent_path`, `query_id`, `seconds_to_retrigger`, `trigger_time` and `update_time` fields for `databricks.sdk.service.sql.Alert`. * Added `id` field for `databricks.sdk.service.sql.GetAlertRequest`. * Added `id` field for `databricks.sdk.service.sql.GetQueryRequest`. * Added `page_token` field for `databricks.sdk.service.sql.ListQueriesRequest`. * Added `apply_auto_limit`, `catalog`, `create_time`, `display_name`, `last_modifier_user_name`, `lifecycle_state`, `owner_user_name`, `parameters`, `parent_path`, `query_text`, `run_as_mode`, `schema`, `update_time` and `warehouse_id` fields for `databricks.sdk.service.sql.Query`. * Added `context_filter` field for `databricks.sdk.service.sql.QueryFilter`. * Added `query_source` field for `databricks.sdk.service.sql.QueryInfo`. * Added `create_time`, `display_name`, `query_id`, `serialized_options`, `serialized_query_plan` and `update_time` fields for `databricks.sdk.service.sql.Visualization`. * Changed `create()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `databricks.sdk.service.billing.CreateBudgetConfigurationResponse` dataclass. * Changed `create()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service . New request type is `databricks.sdk.service.billing.CreateBudgetConfigurationRequest` dataclass. * Changed `delete()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service . New request type is `databricks.sdk.service.billing.DeleteBudgetConfigurationRequest` dataclass. * Changed `delete()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `any` dataclass. * Changed `get()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service . New request type is `databricks.sdk.service.billing.GetBudgetConfigurationRequest` dataclass. * Changed `get()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `databricks.sdk.service.billing.GetBudgetConfigurationResponse` dataclass. * Changed `list()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `databricks.sdk.service.billing.ListBudgetConfigurationsResponse` dataclass. * Changed `list()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to require request of `databricks.sdk.service.billing.ListBudgetConfigurationsRequest` dataclass. * Changed `update()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `databricks.sdk.service.billing.UpdateBudgetConfigurationResponse` dataclass. * Changed `update()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service . New request type is `databricks.sdk.service.billing.UpdateBudgetConfigurationRequest` dataclass. * Changed `create()` method for [a.custom_app_integration](https://databricks-sdk-py.readthedocs.io/en/latest/account/custom_app_integration.html) account-level service with new required argument order. * Changed `list()` method for [a.custom_app_integration](https://databricks-sdk-py.readthedocs.io/en/latest/account/custom_app_integration.html) account-level service to require request of `databricks.sdk.service.oauth2.ListCustomAppIntegrationsRequest` dataclass. * Changed `list()` method for [a.published_app_integration](https://databricks-sdk-py.readthedocs.io/en/latest/account/published_app_integration.html) account-level service to require request of `databricks.sdk.service.oauth2.ListPublishedAppIntegrationsRequest` dataclass. * Changed `delete()` method for [a.workspace_assignment](https://databricks-sdk-py.readthedocs.io/en/latest/account/workspace_assignment.html) account-level service to return `any` dataclass. * Changed `update()` method for [a.workspace_assignment](https://databricks-sdk-py.readthedocs.io/en/latest/account/workspace_assignment.html) account-level service with new required argument order. * Changed `create()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service . New request type is `databricks.sdk.service.sql.CreateAlertRequest` dataclass. * Changed `delete()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service to return `any` dataclass. * Changed `delete()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service . New request type is `databricks.sdk.service.sql.TrashAlertRequest` dataclass. * Changed `get()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service with new required argument order. * Changed `list()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service to return `databricks.sdk.service.sql.ListAlertsResponse` dataclass. * Changed `list()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service to require request of `databricks.sdk.service.sql.ListAlertsRequest` dataclass. * Changed `update()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service to return `databricks.sdk.service.sql.Alert` dataclass. * Changed `update()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service . New request type is `databricks.sdk.service.sql.UpdateAlertRequest` dataclass. * Changed `create()` and `edit()` methods for [w.cluster_policies](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/cluster_policies.html) workspace-level service with new required argument order. * Changed `get()` method for [w.model_versions](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/model_versions.html) workspace-level service to return `databricks.sdk.service.catalog.ModelVersionInfo` dataclass. * Changed `migrate_permissions()` method for [w.permission_migration](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/permission_migration.html) workspace-level service . New request type is `databricks.sdk.service.iam.MigratePermissionsRequest` dataclass. * Changed `migrate_permissions()` method for [w.permission_migration](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/permission_migration.html) workspace-level service to return `databricks.sdk.service.iam.MigratePermissionsResponse` dataclass. * Changed `create()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service . New request type is `databricks.sdk.service.sql.CreateQueryRequest` dataclass. * Changed `delete()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service to return `any` dataclass. * Changed `delete()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service . New request type is `databricks.sdk.service.sql.TrashQueryRequest` dataclass. * Changed `get()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service with new required argument order. * Changed `list()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service to return `databricks.sdk.service.sql.ListQueryObjectsResponse` dataclass. * Changed `update()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service . New request type is `databricks.sdk.service.sql.UpdateQueryRequest` dataclass. * Changed `create()` method for [w.query_visualizations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations.html) workspace-level service . New request type is `databricks.sdk.service.sql.CreateVisualizationRequest` dataclass. * Changed `delete()` method for [w.query_visualizations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations.html) workspace-level service to return `any` dataclass. * Changed `delete()` method for [w.query_visualizations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations.html) workspace-level service . New request type is `databricks.sdk.service.sql.DeleteVisualizationRequest` dataclass. * Changed `update()` method for [w.query_visualizations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations.html) workspace-level service . New request type is `databricks.sdk.service.sql.UpdateVisualizationRequest` dataclass. * Changed `list()` method for [w.shares](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/shares.html) workspace-level service to require request of `databricks.sdk.service.sharing.ListSharesRequest` dataclass. * Changed `execute_statement()` and `get_statement()` methods for [w.statement_execution](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/statement_execution.html) workspace-level service to return `databricks.sdk.service.sql.StatementResponse` dataclass. * Changed `securable_type` field for `databricks.sdk.service.catalog.GetBindingsRequest` to `databricks.sdk.service.catalog.GetBindingsSecurableType` dataclass. * Changed `securable_type` field for `databricks.sdk.service.catalog.UpdateWorkspaceBindingsParameters` to `databricks.sdk.service.catalog.UpdateBindingsSecurableType` dataclass. * Changed `name` field for `databricks.sdk.service.compute.CreatePolicy` to no longer be required. * Changed `name` field for `databricks.sdk.service.compute.EditPolicy` to no longer be required. * Changed `policy_family_id` field for `databricks.sdk.service.compute.GetPolicyFamilyRequest` to `str` dataclass. * Changed `policy_families` field for `databricks.sdk.service.compute.ListPolicyFamiliesResponse` to no longer be required. * Changed `definition`, `description`, `name` and `policy_family_id` fields for `databricks.sdk.service.compute.PolicyFamily` to no longer be required. * Changed `permissions` field for `databricks.sdk.service.iam.UpdateWorkspaceAssignments` to no longer be required. * Changed `access_control_list` field for `databricks.sdk.service.jobs.CreateJob` to `databricks.sdk.service.jobs.JobAccessControlRequestList` dataclass. * Changed `access_control_list` field for `databricks.sdk.service.jobs.SubmitRun` to `databricks.sdk.service.jobs.JobAccessControlRequestList` dataclass. * Changed `name` and `redirect_urls` fields for `databricks.sdk.service.oauth2.CreateCustomAppIntegration` to no longer be required. * Changed `ingestion_definition` field for `databricks.sdk.service.pipelines.CreatePipeline` to `databricks.sdk.service.pipelines.IngestionPipelineDefinition` dataclass. * Changed `ingestion_definition` field for `databricks.sdk.service.pipelines.EditPipeline` to `databricks.sdk.service.pipelines.IngestionPipelineDefinition` dataclass. * Changed `ingestion_definition` field for `databricks.sdk.service.pipelines.PipelineSpec` to `databricks.sdk.service.pipelines.IngestionPipelineDefinition` dataclass. * Changed `ai21labs_api_key` field for `databricks.sdk.service.serving.Ai21LabsConfig` to no longer be required. * Changed `aws_access_key_id` and `aws_secret_access_key` fields for `databricks.sdk.service.serving.AmazonBedrockConfig` to no longer be required. * Changed `anthropic_api_key` field for `databricks.sdk.service.serving.AnthropicConfig` to no longer be required. * Changed `cohere_api_key` field for `databricks.sdk.service.serving.CohereConfig` to no longer be required. * Changed `databricks_api_token` field for `databricks.sdk.service.serving.DatabricksModelServingConfig` to no longer be required. * Changed `palm_api_key` field for `databricks.sdk.service.serving.PaLmConfig` to no longer be required. * Changed `tags` field for `databricks.sdk.service.sql.Query` to `databricks.sdk.service.sql.List` dataclass. * Changed `user_ids` and `warehouse_ids` fields for `databricks.sdk.service.sql.QueryFilter` to `databricks.sdk.service.sql.List` dataclass. * Changed `results` field for `databricks.sdk.service.sql.QueryList` to `databricks.sdk.service.sql.LegacyQueryList` dataclass. * Changed `visualization` field for `databricks.sdk.service.sql.Widget` to `databricks.sdk.service.sql.LegacyVisualization` dataclass. * Removed [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. * Removed `restore()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service. * Removed `databricks.sdk.service.marketplace.FilterType`, `databricks.sdk.service.marketplace.ProviderIconFile`, `databricks.sdk.service.marketplace.ProviderIconType`, `databricks.sdk.service.marketplace.ProviderListingSummaryInfo`, `databricks.sdk.service.marketplace.SortBy` and `databricks.sdk.service.marketplace.VisibilityFilter` dataclasses. * Removed `databricks.sdk.service.billing.Budget`, `databricks.sdk.service.billing.BudgetAlert`, `databricks.sdk.service.billing.BudgetList`, `databricks.sdk.service.billing.BudgetWithStatus`, `databricks.sdk.service.billing.BudgetWithStatusStatusDailyItem`, `databricks.sdk.service.billing.DeleteBudgetRequest`, `any`, `databricks.sdk.service.billing.GetBudgetRequest`, `any`, `databricks.sdk.service.billing.WrappedBudget` and `databricks.sdk.service.billing.WrappedBudgetWithStatus` dataclasses. * Removed `any`, `databricks.sdk.service.iam.PermissionMigrationRequest` and `databricks.sdk.service.iam.PermissionMigrationResponse` dataclasses. * Removed `databricks.sdk.service.pipelines.ManagedIngestionPipelineDefinition` dataclass. * Removed `databricks.sdk.service.serving.App`, `databricks.sdk.service.serving.AppDeployment`, `databricks.sdk.service.serving.AppDeploymentArtifacts`, `databricks.sdk.service.serving.AppDeploymentMode`, `databricks.sdk.service.serving.AppDeploymentState`, `databricks.sdk.service.serving.AppDeploymentStatus`, `databricks.sdk.service.serving.AppEnvironment`, `databricks.sdk.service.serving.AppState`, `databricks.sdk.service.serving.AppStatus`, `databricks.sdk.service.serving.CreateAppDeploymentRequest`, `databricks.sdk.service.serving.CreateAppRequest`, `databricks.sdk.service.serving.DeleteAppRequest`, `databricks.sdk.service.serving.EnvVariable`, `databricks.sdk.service.serving.GetAppDeploymentRequest`, `databricks.sdk.service.serving.GetAppEnvironmentRequest`, `databricks.sdk.service.serving.GetAppRequest`, `databricks.sdk.service.serving.ListAppDeploymentsRequest`, `databricks.sdk.service.serving.ListAppDeploymentsResponse`, `databricks.sdk.service.serving.ListAppsRequest`, `databricks.sdk.service.serving.ListAppsResponse`, `databricks.sdk.service.serving.StartAppRequest`, `databricks.sdk.service.serving.StopAppRequest`, `any` and `databricks.sdk.service.serving.UpdateAppRequest` dataclasses. * Removed `databricks.sdk.service.sql.CreateQueryVisualizationRequest`, `databricks.sdk.service.sql.DeleteAlertRequest`, `databricks.sdk.service.sql.DeleteQueryRequest`, `databricks.sdk.service.sql.DeleteQueryVisualizationRequest`, `databricks.sdk.service.sql.ExecuteStatementResponse`, `databricks.sdk.service.sql.GetStatementResponse`, `databricks.sdk.service.sql.RestoreQueryRequest`, `databricks.sdk.service.sql.StatementId`, `databricks.sdk.service.sql.UserId` and `databricks.sdk.service.sql.WarehouseId` dataclasses. * Removed `databricks.sdk.service.compute.PolicyFamilyId` dataclass. * Removed `can_use_client` field for `databricks.sdk.service.compute.ListClustersRequest`. * Removed `is_ascending` and `sort_by` fields for `databricks.sdk.service.marketplace.ListListingsRequest`. * Removed `provider_summary` field for `databricks.sdk.service.marketplace.Listing`. * Removed `filters` field for `databricks.sdk.service.marketplace.ListingSetting`. * Removed `metastore_id` field for `databricks.sdk.service.marketplace.ListingSummary`. * Removed `is_ascending` and `sort_by` fields for `databricks.sdk.service.marketplace.SearchListingsRequest`. * Removed `created_at`, `last_triggered_at`, `name`, `options`, `parent`, `query`, `rearm`, `updated_at` and `user` fields for `databricks.sdk.service.sql.Alert`. * Removed `alert_id` field for `databricks.sdk.service.sql.GetAlertRequest`. * Removed `query_id` field for `databricks.sdk.service.sql.GetQueryRequest`. * Removed `order`, `page` and `q` fields for `databricks.sdk.service.sql.ListQueriesRequest`. * Removed `include_metrics` field for `databricks.sdk.service.sql.ListQueryHistoryRequest`. * Removed `can_edit`, `created_at`, `data_source_id`, `is_archived`, `is_draft`, `is_favorite`, `is_safe`, `last_modified_by`, `last_modified_by_id`, `latest_query_data_id`, `name`, `options`, `parent`, `permission_tier`, `query`, `query_hash`, `run_as_role`, `updated_at`, `user`, `user_id` and `visualizations` fields for `databricks.sdk.service.sql.Query`. * Removed `statement_ids` field for `databricks.sdk.service.sql.QueryFilter`. * Removed `can_subscribe_to_live_query` field for `databricks.sdk.service.sql.QueryInfo`. * Removed `metadata_time_ms`, `planning_time_ms` and `query_execution_time_ms` fields for `databricks.sdk.service.sql.QueryMetrics`. * Removed `created_at`, `description`, `name`, `options`, `query` and `updated_at` fields for `databricks.sdk.service.sql.Visualization`. OpenAPI SHA: f98c07f9c71f579de65d2587bb0292f83d10e55d, Date: 2024-08-12 --- .codegen/_openapi_sha | 2 +- .gitattributes | 1 + CHANGELOG.md | 198 ++++ databricks/sdk/__init__.py | 11 +- databricks/sdk/service/apps.py | 977 ++++++++++++++++++ databricks/sdk/service/billing.py | 131 +++ databricks/sdk/service/catalog.py | 120 ++- databricks/sdk/service/compute.py | 393 ++++++- databricks/sdk/service/dashboards.py | 21 +- databricks/sdk/service/iam.py | 43 +- databricks/sdk/service/jobs.py | 42 +- databricks/sdk/service/pipelines.py | 124 ++- databricks/sdk/service/serving.py | 719 ------------- databricks/sdk/service/sharing.py | 259 ++++- databricks/sdk/service/sql.py | 171 ++- databricks/sdk/version.py | 2 +- docs/account/billing/budgets.rst | 186 ++-- docs/account/billing/index.rst | 3 +- docs/account/billing/usage_dashboards.rst | 39 + docs/account/iam/workspace_assignment.rst | 16 +- .../account/oauth2/custom_app_integration.rst | 35 +- docs/account/oauth2/o_auth_published_apps.rst | 2 +- .../oauth2/published_app_integration.rst | 22 +- docs/dbdataclasses/apps.rst | 144 +++ docs/dbdataclasses/billing.rst | 101 +- docs/dbdataclasses/catalog.rst | 28 +- docs/dbdataclasses/compute.rst | 42 + docs/dbdataclasses/dashboards.rst | 191 +++- docs/dbdataclasses/iam.rst | 21 +- docs/dbdataclasses/index.rst | 1 + docs/dbdataclasses/jobs.rst | 3 - docs/dbdataclasses/marketplace.rst | 48 - docs/dbdataclasses/pipelines.rst | 18 +- docs/dbdataclasses/serving.rst | 120 +-- docs/dbdataclasses/settings.rst | 78 +- docs/dbdataclasses/sharing.rst | 3 + docs/dbdataclasses/sql.rst | 399 ++++++- docs/workspace/apps/apps.rst | 220 ++++ docs/workspace/apps/index.rst | 10 + docs/workspace/catalog/model_versions.rst | 10 +- docs/workspace/catalog/registered_models.rst | 4 +- docs/workspace/catalog/schemas.rst | 4 +- docs/workspace/catalog/system_schemas.rst | 9 +- docs/workspace/catalog/workspace_bindings.rst | 23 +- docs/workspace/compute/cluster_policies.rst | 18 +- docs/workspace/compute/clusters.rst | 64 +- docs/workspace/compute/command_execution.rst | 3 +- docs/workspace/compute/policy_families.rst | 12 +- docs/workspace/dashboards/genie.rst | 102 ++ docs/workspace/dashboards/index.rst | 1 + docs/workspace/dashboards/lakeview.rst | 3 +- docs/workspace/iam/permission_migration.rst | 15 +- docs/workspace/iam/permissions.rst | 22 +- docs/workspace/index.rst | 1 + docs/workspace/jobs/jobs.rst | 17 +- .../marketplace/consumer_listings.rst | 9 +- docs/workspace/pipelines/pipelines.rst | 8 +- docs/workspace/serving/index.rst | 1 - docs/workspace/settings/index.rst | 1 + .../settings/notification_destinations.rst | 74 ++ docs/workspace/sharing/providers.rst | 24 +- docs/workspace/sharing/recipients.rst | 32 +- docs/workspace/sharing/shares.rst | 40 +- docs/workspace/sql/alerts.rst | 174 ++-- docs/workspace/sql/alerts_legacy.rst | 114 ++ docs/workspace/sql/data_sources.rst | 13 +- docs/workspace/sql/dbsql_permissions.rst | 19 +- docs/workspace/sql/index.rst | 3 + docs/workspace/sql/queries.rst | 223 ++-- docs/workspace/sql/queries_legacy.rst | 183 ++++ docs/workspace/sql/query_history.rst | 19 +- docs/workspace/sql/query_visualizations.rst | 51 +- .../sql/query_visualizations_legacy.rst | 85 ++ docs/workspace/sql/statement_execution.rst | 28 +- docs/workspace/sql/warehouses.rst | 39 +- examples/account/budgets/create_budgets.py | 29 +- examples/account/budgets/get_budgets.py | 31 +- examples/account/budgets/list_budgets.py | 3 +- examples/account/budgets/update_budgets.py | 61 +- examples/workspace/alerts/create_alerts.py | 24 +- examples/workspace/alerts/get_alerts.py | 26 +- examples/workspace/alerts/list_alerts.py | 3 +- examples/workspace/alerts/update_alerts.py | 31 +- examples/workspace/queries/create_alerts.py | 11 +- examples/workspace/queries/create_queries.py | 11 +- examples/workspace/queries/get_queries.py | 13 +- examples/workspace/queries/update_queries.py | 21 +- examples/workspace/shares/list_shares.py | 3 +- .../warehouses/create_sql_warehouses.py | 13 +- .../warehouses/edit_sql_warehouses.py | 13 +- .../warehouses/get_sql_warehouses.py | 13 +- 91 files changed, 4841 insertions(+), 1857 deletions(-) create mode 100755 databricks/sdk/service/apps.py create mode 100644 docs/account/billing/usage_dashboards.rst create mode 100644 docs/dbdataclasses/apps.rst create mode 100644 docs/workspace/apps/apps.rst create mode 100644 docs/workspace/apps/index.rst create mode 100644 docs/workspace/dashboards/genie.rst create mode 100644 docs/workspace/settings/notification_destinations.rst create mode 100644 docs/workspace/sql/alerts_legacy.rst create mode 100644 docs/workspace/sql/queries_legacy.rst create mode 100644 docs/workspace/sql/query_visualizations_legacy.rst diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index ed18d818d..fef6f268b 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -37e2bbe0cbcbbbe78a06a018d4fab06314a26a40 \ No newline at end of file +f98c07f9c71f579de65d2587bb0292f83d10e55d \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index c37d866dc..22e000b1b 100755 --- a/.gitattributes +++ b/.gitattributes @@ -1,6 +1,7 @@ databricks/sdk/__init__.py linguist-generated=true databricks/sdk/errors/overrides.py linguist-generated=true databricks/sdk/errors/platform.py linguist-generated=true +databricks/sdk/service/apps.py linguist-generated=true databricks/sdk/service/billing.py linguist-generated=true databricks/sdk/service/catalog.py linguist-generated=true databricks/sdk/service/compute.py linguist-generated=true diff --git a/CHANGELOG.md b/CHANGELOG.md index 341770ca5..278eec3e2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,203 @@ # Version changelog +## [Release] Release v0.30.0 + +### New Features and Improvements + + * Add DataPlane support ([#700](https://github.com/databricks/databricks-sdk-py/pull/700)). + * Support partners in SDK ([#648](https://github.com/databricks/databricks-sdk-py/pull/648)). + + +### Bug Fixes + + * Check trailing slash in host url ([#681](https://github.com/databricks/databricks-sdk-py/pull/681)). + * Decrease runtime of recursive workspace listing test ([#721](https://github.com/databricks/databricks-sdk-py/pull/721)). + * Fix test_get_workspace_client and test_runtime_auth_from_jobs ([#719](https://github.com/databricks/databricks-sdk-py/pull/719)). + * Infer Azure tenant ID if not set ([#638](https://github.com/databricks/databricks-sdk-py/pull/638)). + + +### Internal Changes + + * Add Release tag and Workflow fix ([#704](https://github.com/databricks/databricks-sdk-py/pull/704)). + * Add apps package in docgen ([#722](https://github.com/databricks/databricks-sdk-py/pull/722)). + * Fix processing of `quoted` titles ([#712](https://github.com/databricks/databricks-sdk-py/pull/712)). + * Improve Changelog by grouping changes ([#703](https://github.com/databricks/databricks-sdk-py/pull/703)). + * Move PR message validation to a separate workflow ([#707](https://github.com/databricks/databricks-sdk-py/pull/707)). + * Test that Jobs API endpoints are pinned to 2.1 ([#714](https://github.com/databricks/databricks-sdk-py/pull/714)). + * Trigger the validate workflow in the merge queue ([#709](https://github.com/databricks/databricks-sdk-py/pull/709)). + * Update OpenAPI spec ([#715](https://github.com/databricks/databricks-sdk-py/pull/715)). + + +### Other Changes + + * Add Windows WorkFlow ([#692](https://github.com/databricks/databricks-sdk-py/pull/692)). + * Fix auth tests for windows. ([#697](https://github.com/databricks/databricks-sdk-py/pull/697)). + * Fix for cancelled workflow ([#701](https://github.com/databricks/databricks-sdk-py/pull/701)). + * Fix test_core for windows ([#702](https://github.com/databricks/databricks-sdk-py/pull/702)). + * Fix test_local_io for windows ([#695](https://github.com/databricks/databricks-sdk-py/pull/695)). + * Remove duplicate ubuntu tests ([#693](https://github.com/databricks/databricks-sdk-py/pull/693)). + * fix windows path ([#660](https://github.com/databricks/databricks-sdk-py/pull/660)) ([#673](https://github.com/databricks/databricks-sdk-py/pull/673)). + + +### API Changes: + + * Added `databricks.sdk.service.apps` package. + * Added [a.usage_dashboards](https://databricks-sdk-py.readthedocs.io/en/latest/account/usage_dashboards.html) account-level service. + * Added [w.alerts_legacy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts_legacy.html) workspace-level service, [w.queries_legacy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries_legacy.html) workspace-level service and [w.query_visualizations_legacy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations_legacy.html) workspace-level service. + * Added [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html) workspace-level service. + * Added [w.notification_destinations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/notification_destinations.html) workspace-level service. + * Added `update()` method for [w.clusters](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/clusters.html) workspace-level service. + * Added `list_visualizations()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service. + * Added `databricks.sdk.service.catalog.GetBindingsSecurableType` and `databricks.sdk.service.catalog.UpdateBindingsSecurableType` dataclasses. + * Added `databricks.sdk.service.billing.ActionConfiguration`, `databricks.sdk.service.billing.ActionConfigurationType`, `databricks.sdk.service.billing.AlertConfiguration`, `databricks.sdk.service.billing.AlertConfigurationQuantityType`, `databricks.sdk.service.billing.AlertConfigurationTimePeriod`, `databricks.sdk.service.billing.AlertConfigurationTriggerType`, `databricks.sdk.service.billing.BudgetConfiguration`, `databricks.sdk.service.billing.BudgetConfigurationFilter`, `databricks.sdk.service.billing.BudgetConfigurationFilterClause`, `databricks.sdk.service.billing.BudgetConfigurationFilterOperator`, `databricks.sdk.service.billing.BudgetConfigurationFilterTagClause`, `databricks.sdk.service.billing.BudgetConfigurationFilterWorkspaceIdClause`, `databricks.sdk.service.billing.CreateBillingUsageDashboardRequest`, `databricks.sdk.service.billing.CreateBillingUsageDashboardResponse`, `databricks.sdk.service.billing.CreateBudgetConfigurationBudget`, `databricks.sdk.service.billing.CreateBudgetConfigurationBudgetActionConfigurations`, `databricks.sdk.service.billing.CreateBudgetConfigurationBudgetAlertConfigurations`, `databricks.sdk.service.billing.CreateBudgetConfigurationRequest`, `databricks.sdk.service.billing.CreateBudgetConfigurationResponse`, `databricks.sdk.service.billing.DeleteBudgetConfigurationRequest`, `any`, `databricks.sdk.service.billing.GetBillingUsageDashboardRequest`, `databricks.sdk.service.billing.GetBillingUsageDashboardResponse`, `databricks.sdk.service.billing.GetBudgetConfigurationRequest`, `databricks.sdk.service.billing.GetBudgetConfigurationResponse`, `databricks.sdk.service.billing.ListBudgetConfigurationsRequest`, `databricks.sdk.service.billing.ListBudgetConfigurationsResponse`, `databricks.sdk.service.billing.UpdateBudgetConfigurationBudget`, `databricks.sdk.service.billing.UpdateBudgetConfigurationRequest`, `databricks.sdk.service.billing.UpdateBudgetConfigurationResponse` and `databricks.sdk.service.billing.UsageDashboardType` dataclasses. + * Added `databricks.sdk.service.compute.ListClustersFilterBy`, `databricks.sdk.service.compute.ListClustersSortBy`, `databricks.sdk.service.compute.ListClustersSortByDirection`, `databricks.sdk.service.compute.ListClustersSortByField`, `databricks.sdk.service.compute.UpdateCluster`, `databricks.sdk.service.compute.UpdateClusterResource` and `any` dataclasses. + * Added `databricks.sdk.service.dashboards.ExecuteMessageQueryRequest`, `databricks.sdk.service.dashboards.GenieAttachment`, `databricks.sdk.service.dashboards.GenieConversation`, `databricks.sdk.service.dashboards.GenieCreateConversationMessageRequest`, `databricks.sdk.service.dashboards.GenieGetConversationMessageRequest`, `databricks.sdk.service.dashboards.GenieGetMessageQueryResultRequest`, `databricks.sdk.service.dashboards.GenieGetMessageQueryResultResponse`, `databricks.sdk.service.dashboards.GenieMessage`, `databricks.sdk.service.dashboards.GenieStartConversationMessageRequest`, `databricks.sdk.service.dashboards.GenieStartConversationResponse`, `databricks.sdk.service.dashboards.MessageError`, `databricks.sdk.service.dashboards.MessageErrorType`, `databricks.sdk.service.dashboards.MessageStatus`, `databricks.sdk.service.dashboards.QueryAttachment`, `databricks.sdk.service.dashboards.Result` and `databricks.sdk.service.dashboards.TextAttachment` dataclasses. + * Added `any`, `databricks.sdk.service.iam.MigratePermissionsRequest` and `databricks.sdk.service.iam.MigratePermissionsResponse` dataclasses. + * Added `databricks.sdk.service.oauth2.ListCustomAppIntegrationsRequest` and `databricks.sdk.service.oauth2.ListPublishedAppIntegrationsRequest` dataclasses. + * Added `databricks.sdk.service.pipelines.IngestionPipelineDefinition` and `databricks.sdk.service.pipelines.PipelineStateInfoHealth` dataclasses. + * Added `databricks.sdk.service.serving.GoogleCloudVertexAiConfig` dataclass. + * Added `databricks.sdk.service.settings.Config`, `databricks.sdk.service.settings.CreateNotificationDestinationRequest`, `databricks.sdk.service.settings.DeleteNotificationDestinationRequest`, `databricks.sdk.service.settings.DestinationType`, `databricks.sdk.service.settings.EmailConfig`, `any`, `databricks.sdk.service.settings.GenericWebhookConfig`, `databricks.sdk.service.settings.GetNotificationDestinationRequest`, `databricks.sdk.service.settings.ListNotificationDestinationsRequest`, `databricks.sdk.service.settings.ListNotificationDestinationsResponse`, `databricks.sdk.service.settings.ListNotificationDestinationsResult`, `databricks.sdk.service.settings.MicrosoftTeamsConfig`, `databricks.sdk.service.settings.NotificationDestination`, `databricks.sdk.service.settings.PagerdutyConfig`, `databricks.sdk.service.settings.SlackConfig` and `databricks.sdk.service.settings.UpdateNotificationDestinationRequest` dataclasses. + * Added `databricks.sdk.service.sql.AlertCondition`, `databricks.sdk.service.sql.AlertConditionOperand`, `databricks.sdk.service.sql.AlertConditionThreshold`, `databricks.sdk.service.sql.AlertOperandColumn`, `databricks.sdk.service.sql.AlertOperandValue`, `databricks.sdk.service.sql.AlertOperator`, `databricks.sdk.service.sql.ClientCallContext`, `databricks.sdk.service.sql.ContextFilter`, `databricks.sdk.service.sql.CreateAlertRequest`, `databricks.sdk.service.sql.CreateAlertRequestAlert`, `databricks.sdk.service.sql.CreateQueryRequest`, `databricks.sdk.service.sql.CreateQueryRequestQuery`, `databricks.sdk.service.sql.CreateQueryVisualizationsLegacyRequest`, `databricks.sdk.service.sql.CreateVisualizationRequest`, `databricks.sdk.service.sql.CreateVisualizationRequestVisualization`, `databricks.sdk.service.sql.DatePrecision`, `databricks.sdk.service.sql.DateRange`, `databricks.sdk.service.sql.DateRangeValue`, `databricks.sdk.service.sql.DateRangeValueDynamicDateRange`, `databricks.sdk.service.sql.DateValue`, `databricks.sdk.service.sql.DateValueDynamicDate`, `databricks.sdk.service.sql.DeleteAlertsLegacyRequest`, `databricks.sdk.service.sql.DeleteQueriesLegacyRequest`, `databricks.sdk.service.sql.DeleteQueryVisualizationsLegacyRequest`, `databricks.sdk.service.sql.DeleteVisualizationRequest`, `any`, `databricks.sdk.service.sql.EncodedText`, `databricks.sdk.service.sql.EncodedTextEncoding`, `databricks.sdk.service.sql.EnumValue`, `databricks.sdk.service.sql.GetAlertsLegacyRequest`, `databricks.sdk.service.sql.GetQueriesLegacyRequest`, `databricks.sdk.service.sql.LegacyAlert`, `databricks.sdk.service.sql.LegacyAlertState`, `databricks.sdk.service.sql.LegacyQuery`, `databricks.sdk.service.sql.LegacyVisualization`, `databricks.sdk.service.sql.LifecycleState`, `databricks.sdk.service.sql.ListAlertsRequest`, `databricks.sdk.service.sql.ListAlertsResponse`, `databricks.sdk.service.sql.ListAlertsResponseAlert`, `databricks.sdk.service.sql.ListQueriesLegacyRequest`, `databricks.sdk.service.sql.ListQueryObjectsResponse`, `databricks.sdk.service.sql.ListQueryObjectsResponseQuery`, `databricks.sdk.service.sql.ListVisualizationsForQueryRequest`, `databricks.sdk.service.sql.ListVisualizationsForQueryResponse`, `databricks.sdk.service.sql.NumericValue`, `databricks.sdk.service.sql.QueryBackedValue`, `databricks.sdk.service.sql.QueryParameter`, `databricks.sdk.service.sql.QuerySource`, `databricks.sdk.service.sql.QuerySourceDriverInfo`, `databricks.sdk.service.sql.QuerySourceEntryPoint`, `databricks.sdk.service.sql.QuerySourceJobManager`, `databricks.sdk.service.sql.QuerySourceTrigger`, `databricks.sdk.service.sql.RestoreQueriesLegacyRequest`, `databricks.sdk.service.sql.RunAsMode`, `databricks.sdk.service.sql.ServerlessChannelInfo`, `databricks.sdk.service.sql.StatementResponse`, `databricks.sdk.service.sql.TextValue`, `databricks.sdk.service.sql.TrashAlertRequest`, `databricks.sdk.service.sql.TrashQueryRequest`, `databricks.sdk.service.sql.UpdateAlertRequest`, `databricks.sdk.service.sql.UpdateAlertRequestAlert`, `databricks.sdk.service.sql.UpdateQueryRequest`, `databricks.sdk.service.sql.UpdateQueryRequestQuery`, `databricks.sdk.service.sql.UpdateVisualizationRequest` and `databricks.sdk.service.sql.UpdateVisualizationRequestVisualization` dataclasses. + * Added `force` field for `databricks.sdk.service.catalog.DeleteSchemaRequest`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.catalog.GetBindingsRequest`. + * Added `include_aliases` field for `databricks.sdk.service.catalog.GetByAliasRequest`. + * Added `include_aliases` field for `databricks.sdk.service.catalog.GetModelVersionRequest`. + * Added `include_aliases` field for `databricks.sdk.service.catalog.GetRegisteredModelRequest`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.catalog.ListSystemSchemasRequest`. + * Added `next_page_token` field for `databricks.sdk.service.catalog.ListSystemSchemasResponse`. + * Added `aliases` field for `databricks.sdk.service.catalog.ModelVersionInfo`. + * Added `next_page_token` field for `databricks.sdk.service.catalog.WorkspaceBindingsResponse`. + * Added `version` field for `databricks.sdk.service.compute.GetPolicyFamilyRequest`. + * Added `filter_by`, `page_size`, `page_token` and `sort_by` fields for `databricks.sdk.service.compute.ListClustersRequest`. + * Added `next_page_token` and `prev_page_token` fields for `databricks.sdk.service.compute.ListClustersResponse`. + * Added `page_token` field for `databricks.sdk.service.jobs.GetRunRequest`. + * Added `iterations`, `next_page_token` and `prev_page_token` fields for `databricks.sdk.service.jobs.Run`. + * Added `create_time`, `created_by`, `creator_username` and `scopes` fields for `databricks.sdk.service.oauth2.GetCustomAppIntegrationOutput`. + * Added `next_page_token` field for `databricks.sdk.service.oauth2.GetCustomAppIntegrationsOutput`. + * Added `create_time` and `created_by` fields for `databricks.sdk.service.oauth2.GetPublishedAppIntegrationOutput`. + * Added `next_page_token` field for `databricks.sdk.service.oauth2.GetPublishedAppIntegrationsOutput`. + * Added `enable_local_disk_encryption` field for `databricks.sdk.service.pipelines.PipelineCluster`. + * Added `whl` field for `databricks.sdk.service.pipelines.PipelineLibrary`. + * Added `health` field for `databricks.sdk.service.pipelines.PipelineStateInfo`. + * Added `ai21labs_api_key_plaintext` field for `databricks.sdk.service.serving.Ai21LabsConfig`. + * Added `aws_access_key_id_plaintext` and `aws_secret_access_key_plaintext` fields for `databricks.sdk.service.serving.AmazonBedrockConfig`. + * Added `anthropic_api_key_plaintext` field for `databricks.sdk.service.serving.AnthropicConfig`. + * Added `cohere_api_base` and `cohere_api_key_plaintext` fields for `databricks.sdk.service.serving.CohereConfig`. + * Added `databricks_api_token_plaintext` field for `databricks.sdk.service.serving.DatabricksModelServingConfig`. + * Added `google_cloud_vertex_ai_config` field for `databricks.sdk.service.serving.ExternalModel`. + * Added `microsoft_entra_client_secret_plaintext` and `openai_api_key_plaintext` fields for `databricks.sdk.service.serving.OpenAiConfig`. + * Added `palm_api_key_plaintext` field for `databricks.sdk.service.serving.PaLmConfig`. + * Added `expiration_time` field for `databricks.sdk.service.sharing.CreateRecipient`. + * Added `next_page_token` field for `databricks.sdk.service.sharing.GetRecipientSharePermissionsResponse`. + * Added `next_page_token` field for `databricks.sdk.service.sharing.ListProviderSharesResponse`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.ListProvidersRequest`. + * Added `next_page_token` field for `databricks.sdk.service.sharing.ListProvidersResponse`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.ListRecipientsRequest`. + * Added `next_page_token` field for `databricks.sdk.service.sharing.ListRecipientsResponse`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.ListSharesRequest`. + * Added `next_page_token` field for `databricks.sdk.service.sharing.ListSharesResponse`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.SharePermissionsRequest`. + * Added `expiration_time` field for `databricks.sdk.service.sharing.UpdateRecipient`. + * Added `max_results` and `page_token` fields for `databricks.sdk.service.sharing.UpdateSharePermissions`. + * Added `condition`, `create_time`, `custom_body`, `custom_subject`, `display_name`, `lifecycle_state`, `owner_user_name`, `parent_path`, `query_id`, `seconds_to_retrigger`, `trigger_time` and `update_time` fields for `databricks.sdk.service.sql.Alert`. + * Added `id` field for `databricks.sdk.service.sql.GetAlertRequest`. + * Added `id` field for `databricks.sdk.service.sql.GetQueryRequest`. + * Added `page_token` field for `databricks.sdk.service.sql.ListQueriesRequest`. + * Added `apply_auto_limit`, `catalog`, `create_time`, `display_name`, `last_modifier_user_name`, `lifecycle_state`, `owner_user_name`, `parameters`, `parent_path`, `query_text`, `run_as_mode`, `schema`, `update_time` and `warehouse_id` fields for `databricks.sdk.service.sql.Query`. + * Added `context_filter` field for `databricks.sdk.service.sql.QueryFilter`. + * Added `query_source` field for `databricks.sdk.service.sql.QueryInfo`. + * Added `create_time`, `display_name`, `query_id`, `serialized_options`, `serialized_query_plan` and `update_time` fields for `databricks.sdk.service.sql.Visualization`. + * Changed `create()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `databricks.sdk.service.billing.CreateBudgetConfigurationResponse` dataclass. + * Changed `create()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service . New request type is `databricks.sdk.service.billing.CreateBudgetConfigurationRequest` dataclass. + * Changed `delete()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service . New request type is `databricks.sdk.service.billing.DeleteBudgetConfigurationRequest` dataclass. + * Changed `delete()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `any` dataclass. + * Changed `get()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service . New request type is `databricks.sdk.service.billing.GetBudgetConfigurationRequest` dataclass. + * Changed `get()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `databricks.sdk.service.billing.GetBudgetConfigurationResponse` dataclass. + * Changed `list()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `databricks.sdk.service.billing.ListBudgetConfigurationsResponse` dataclass. + * Changed `list()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to require request of `databricks.sdk.service.billing.ListBudgetConfigurationsRequest` dataclass. + * Changed `update()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service to return `databricks.sdk.service.billing.UpdateBudgetConfigurationResponse` dataclass. + * Changed `update()` method for [a.budgets](https://databricks-sdk-py.readthedocs.io/en/latest/account/budgets.html) account-level service . New request type is `databricks.sdk.service.billing.UpdateBudgetConfigurationRequest` dataclass. + * Changed `create()` method for [a.custom_app_integration](https://databricks-sdk-py.readthedocs.io/en/latest/account/custom_app_integration.html) account-level service with new required argument order. + * Changed `list()` method for [a.custom_app_integration](https://databricks-sdk-py.readthedocs.io/en/latest/account/custom_app_integration.html) account-level service to require request of `databricks.sdk.service.oauth2.ListCustomAppIntegrationsRequest` dataclass. + * Changed `list()` method for [a.published_app_integration](https://databricks-sdk-py.readthedocs.io/en/latest/account/published_app_integration.html) account-level service to require request of `databricks.sdk.service.oauth2.ListPublishedAppIntegrationsRequest` dataclass. + * Changed `delete()` method for [a.workspace_assignment](https://databricks-sdk-py.readthedocs.io/en/latest/account/workspace_assignment.html) account-level service to return `any` dataclass. + * Changed `update()` method for [a.workspace_assignment](https://databricks-sdk-py.readthedocs.io/en/latest/account/workspace_assignment.html) account-level service with new required argument order. + * Changed `create()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service . New request type is `databricks.sdk.service.sql.CreateAlertRequest` dataclass. + * Changed `delete()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service to return `any` dataclass. + * Changed `delete()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service . New request type is `databricks.sdk.service.sql.TrashAlertRequest` dataclass. + * Changed `get()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service with new required argument order. + * Changed `list()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service to return `databricks.sdk.service.sql.ListAlertsResponse` dataclass. + * Changed `list()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service to require request of `databricks.sdk.service.sql.ListAlertsRequest` dataclass. + * Changed `update()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service to return `databricks.sdk.service.sql.Alert` dataclass. + * Changed `update()` method for [w.alerts](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/alerts.html) workspace-level service . New request type is `databricks.sdk.service.sql.UpdateAlertRequest` dataclass. + * Changed `create()` and `edit()` methods for [w.cluster_policies](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/cluster_policies.html) workspace-level service with new required argument order. + * Changed `get()` method for [w.model_versions](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/model_versions.html) workspace-level service to return `databricks.sdk.service.catalog.ModelVersionInfo` dataclass. + * Changed `migrate_permissions()` method for [w.permission_migration](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/permission_migration.html) workspace-level service . New request type is `databricks.sdk.service.iam.MigratePermissionsRequest` dataclass. + * Changed `migrate_permissions()` method for [w.permission_migration](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/permission_migration.html) workspace-level service to return `databricks.sdk.service.iam.MigratePermissionsResponse` dataclass. + * Changed `create()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service . New request type is `databricks.sdk.service.sql.CreateQueryRequest` dataclass. + * Changed `delete()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service to return `any` dataclass. + * Changed `delete()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service . New request type is `databricks.sdk.service.sql.TrashQueryRequest` dataclass. + * Changed `get()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service with new required argument order. + * Changed `list()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service to return `databricks.sdk.service.sql.ListQueryObjectsResponse` dataclass. + * Changed `update()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service . New request type is `databricks.sdk.service.sql.UpdateQueryRequest` dataclass. + * Changed `create()` method for [w.query_visualizations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations.html) workspace-level service . New request type is `databricks.sdk.service.sql.CreateVisualizationRequest` dataclass. + * Changed `delete()` method for [w.query_visualizations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations.html) workspace-level service to return `any` dataclass. + * Changed `delete()` method for [w.query_visualizations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations.html) workspace-level service . New request type is `databricks.sdk.service.sql.DeleteVisualizationRequest` dataclass. + * Changed `update()` method for [w.query_visualizations](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_visualizations.html) workspace-level service . New request type is `databricks.sdk.service.sql.UpdateVisualizationRequest` dataclass. + * Changed `list()` method for [w.shares](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/shares.html) workspace-level service to require request of `databricks.sdk.service.sharing.ListSharesRequest` dataclass. + * Changed `execute_statement()` and `get_statement()` methods for [w.statement_execution](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/statement_execution.html) workspace-level service to return `databricks.sdk.service.sql.StatementResponse` dataclass. + * Changed `securable_type` field for `databricks.sdk.service.catalog.GetBindingsRequest` to `databricks.sdk.service.catalog.GetBindingsSecurableType` dataclass. + * Changed `securable_type` field for `databricks.sdk.service.catalog.UpdateWorkspaceBindingsParameters` to `databricks.sdk.service.catalog.UpdateBindingsSecurableType` dataclass. + * Changed `name` field for `databricks.sdk.service.compute.CreatePolicy` to no longer be required. + * Changed `name` field for `databricks.sdk.service.compute.EditPolicy` to no longer be required. + * Changed `policy_family_id` field for `databricks.sdk.service.compute.GetPolicyFamilyRequest` to `str` dataclass. + * Changed `policy_families` field for `databricks.sdk.service.compute.ListPolicyFamiliesResponse` to no longer be required. + * Changed `definition`, `description`, `name` and `policy_family_id` fields for `databricks.sdk.service.compute.PolicyFamily` to no longer be required. + * Changed `permissions` field for `databricks.sdk.service.iam.UpdateWorkspaceAssignments` to no longer be required. + * Changed `access_control_list` field for `databricks.sdk.service.jobs.CreateJob` to `databricks.sdk.service.jobs.JobAccessControlRequestList` dataclass. + * Changed `access_control_list` field for `databricks.sdk.service.jobs.SubmitRun` to `databricks.sdk.service.jobs.JobAccessControlRequestList` dataclass. + * Changed `name` and `redirect_urls` fields for `databricks.sdk.service.oauth2.CreateCustomAppIntegration` to no longer be required. + * Changed `ingestion_definition` field for `databricks.sdk.service.pipelines.CreatePipeline` to `databricks.sdk.service.pipelines.IngestionPipelineDefinition` dataclass. + * Changed `ingestion_definition` field for `databricks.sdk.service.pipelines.EditPipeline` to `databricks.sdk.service.pipelines.IngestionPipelineDefinition` dataclass. + * Changed `ingestion_definition` field for `databricks.sdk.service.pipelines.PipelineSpec` to `databricks.sdk.service.pipelines.IngestionPipelineDefinition` dataclass. + * Changed `ai21labs_api_key` field for `databricks.sdk.service.serving.Ai21LabsConfig` to no longer be required. + * Changed `aws_access_key_id` and `aws_secret_access_key` fields for `databricks.sdk.service.serving.AmazonBedrockConfig` to no longer be required. + * Changed `anthropic_api_key` field for `databricks.sdk.service.serving.AnthropicConfig` to no longer be required. + * Changed `cohere_api_key` field for `databricks.sdk.service.serving.CohereConfig` to no longer be required. + * Changed `databricks_api_token` field for `databricks.sdk.service.serving.DatabricksModelServingConfig` to no longer be required. + * Changed `palm_api_key` field for `databricks.sdk.service.serving.PaLmConfig` to no longer be required. + * Changed `tags` field for `databricks.sdk.service.sql.Query` to `databricks.sdk.service.sql.List` dataclass. + * Changed `user_ids` and `warehouse_ids` fields for `databricks.sdk.service.sql.QueryFilter` to `databricks.sdk.service.sql.List` dataclass. + * Changed `results` field for `databricks.sdk.service.sql.QueryList` to `databricks.sdk.service.sql.LegacyQueryList` dataclass. + * Changed `visualization` field for `databricks.sdk.service.sql.Widget` to `databricks.sdk.service.sql.LegacyVisualization` dataclass. + * Removed [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service. + * Removed `restore()` method for [w.queries](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/queries.html) workspace-level service. + * Removed `databricks.sdk.service.marketplace.FilterType`, `databricks.sdk.service.marketplace.ProviderIconFile`, `databricks.sdk.service.marketplace.ProviderIconType`, `databricks.sdk.service.marketplace.ProviderListingSummaryInfo`, `databricks.sdk.service.marketplace.SortBy` and `databricks.sdk.service.marketplace.VisibilityFilter` dataclasses. + * Removed `databricks.sdk.service.billing.Budget`, `databricks.sdk.service.billing.BudgetAlert`, `databricks.sdk.service.billing.BudgetList`, `databricks.sdk.service.billing.BudgetWithStatus`, `databricks.sdk.service.billing.BudgetWithStatusStatusDailyItem`, `databricks.sdk.service.billing.DeleteBudgetRequest`, `any`, `databricks.sdk.service.billing.GetBudgetRequest`, `any`, `databricks.sdk.service.billing.WrappedBudget` and `databricks.sdk.service.billing.WrappedBudgetWithStatus` dataclasses. + * Removed `any`, `databricks.sdk.service.iam.PermissionMigrationRequest` and `databricks.sdk.service.iam.PermissionMigrationResponse` dataclasses. + * Removed `databricks.sdk.service.pipelines.ManagedIngestionPipelineDefinition` dataclass. + * Removed `databricks.sdk.service.serving.App`, `databricks.sdk.service.serving.AppDeployment`, `databricks.sdk.service.serving.AppDeploymentArtifacts`, `databricks.sdk.service.serving.AppDeploymentMode`, `databricks.sdk.service.serving.AppDeploymentState`, `databricks.sdk.service.serving.AppDeploymentStatus`, `databricks.sdk.service.serving.AppEnvironment`, `databricks.sdk.service.serving.AppState`, `databricks.sdk.service.serving.AppStatus`, `databricks.sdk.service.serving.CreateAppDeploymentRequest`, `databricks.sdk.service.serving.CreateAppRequest`, `databricks.sdk.service.serving.DeleteAppRequest`, `databricks.sdk.service.serving.EnvVariable`, `databricks.sdk.service.serving.GetAppDeploymentRequest`, `databricks.sdk.service.serving.GetAppEnvironmentRequest`, `databricks.sdk.service.serving.GetAppRequest`, `databricks.sdk.service.serving.ListAppDeploymentsRequest`, `databricks.sdk.service.serving.ListAppDeploymentsResponse`, `databricks.sdk.service.serving.ListAppsRequest`, `databricks.sdk.service.serving.ListAppsResponse`, `databricks.sdk.service.serving.StartAppRequest`, `databricks.sdk.service.serving.StopAppRequest`, `any` and `databricks.sdk.service.serving.UpdateAppRequest` dataclasses. + * Removed `databricks.sdk.service.sql.CreateQueryVisualizationRequest`, `databricks.sdk.service.sql.DeleteAlertRequest`, `databricks.sdk.service.sql.DeleteQueryRequest`, `databricks.sdk.service.sql.DeleteQueryVisualizationRequest`, `databricks.sdk.service.sql.ExecuteStatementResponse`, `databricks.sdk.service.sql.GetStatementResponse`, `databricks.sdk.service.sql.RestoreQueryRequest`, `databricks.sdk.service.sql.StatementId`, `databricks.sdk.service.sql.UserId` and `databricks.sdk.service.sql.WarehouseId` dataclasses. + * Removed `databricks.sdk.service.compute.PolicyFamilyId` dataclass. + * Removed `can_use_client` field for `databricks.sdk.service.compute.ListClustersRequest`. + * Removed `is_ascending` and `sort_by` fields for `databricks.sdk.service.marketplace.ListListingsRequest`. + * Removed `provider_summary` field for `databricks.sdk.service.marketplace.Listing`. + * Removed `filters` field for `databricks.sdk.service.marketplace.ListingSetting`. + * Removed `metastore_id` field for `databricks.sdk.service.marketplace.ListingSummary`. + * Removed `is_ascending` and `sort_by` fields for `databricks.sdk.service.marketplace.SearchListingsRequest`. + * Removed `created_at`, `last_triggered_at`, `name`, `options`, `parent`, `query`, `rearm`, `updated_at` and `user` fields for `databricks.sdk.service.sql.Alert`. + * Removed `alert_id` field for `databricks.sdk.service.sql.GetAlertRequest`. + * Removed `query_id` field for `databricks.sdk.service.sql.GetQueryRequest`. + * Removed `order`, `page` and `q` fields for `databricks.sdk.service.sql.ListQueriesRequest`. + * Removed `include_metrics` field for `databricks.sdk.service.sql.ListQueryHistoryRequest`. + * Removed `can_edit`, `created_at`, `data_source_id`, `is_archived`, `is_draft`, `is_favorite`, `is_safe`, `last_modified_by`, `last_modified_by_id`, `latest_query_data_id`, `name`, `options`, `parent`, `permission_tier`, `query`, `query_hash`, `run_as_role`, `updated_at`, `user`, `user_id` and `visualizations` fields for `databricks.sdk.service.sql.Query`. + * Removed `statement_ids` field for `databricks.sdk.service.sql.QueryFilter`. + * Removed `can_subscribe_to_live_query` field for `databricks.sdk.service.sql.QueryInfo`. + * Removed `metadata_time_ms`, `planning_time_ms` and `query_execution_time_ms` fields for `databricks.sdk.service.sql.QueryMetrics`. + * Removed `created_at`, `description`, `name`, `options`, `query` and `updated_at` fields for `databricks.sdk.service.sql.Visualization`. + +OpenAPI SHA: f98c07f9c71f579de65d2587bb0292f83d10e55d, Date: 2024-08-12 + ## 0.29.0 ### Breaking Changes diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 7603678e3..48fe1beb6 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -5,8 +5,9 @@ from databricks.sdk.mixins.compute import ClustersExt from databricks.sdk.mixins.files import DbfsExt from databricks.sdk.mixins.workspace import WorkspaceExt +from databricks.sdk.service.apps import AppsAPI from databricks.sdk.service.billing import (BillableUsageAPI, BudgetsAPI, - LogDeliveryAPI) + LogDeliveryAPI, UsageDashboardsAPI) from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI, AccountMetastoresAPI, AccountStorageCredentialsAPI, @@ -55,7 +56,7 @@ NetworksAPI, PrivateAccessAPI, StorageAPI, VpcEndpointsAPI, Workspace, WorkspacesAPI) -from databricks.sdk.service.serving import (AppsAPI, ServingEndpointsAPI, +from databricks.sdk.service.serving import (ServingEndpointsAPI, ServingEndpointsDataPlaneAPI) from databricks.sdk.service.settings import (AccountIpAccessListsAPI, AccountSettingsAPI, @@ -793,6 +794,7 @@ def __init__(self, self._settings = AccountSettingsAPI(self._api_client) self._storage = StorageAPI(self._api_client) self._storage_credentials = AccountStorageCredentialsAPI(self._api_client) + self._usage_dashboards = UsageDashboardsAPI(self._api_client) self._users = AccountUsersAPI(self._api_client) self._vpc_endpoints = VpcEndpointsAPI(self._api_client) self._workspace_assignment = WorkspaceAssignmentAPI(self._api_client) @@ -907,6 +909,11 @@ def storage_credentials(self) -> AccountStorageCredentialsAPI: """These APIs manage storage credentials for a particular metastore.""" return self._storage_credentials + @property + def usage_dashboards(self) -> UsageDashboardsAPI: + """These APIs manage usage dashboards for this account.""" + return self._usage_dashboards + @property def users(self) -> AccountUsersAPI: """User identities recognized by Databricks and represented by email addresses.""" diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py new file mode 100755 index 000000000..7ec495b19 --- /dev/null +++ b/databricks/sdk/service/apps.py @@ -0,0 +1,977 @@ +# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT. + +from __future__ import annotations + +import logging +import random +import time +from dataclasses import dataclass +from datetime import timedelta +from enum import Enum +from typing import Callable, Dict, Iterator, List, Optional + +from ..errors import OperationFailed +from ._internal import Wait, _enum, _from_dict, _repeated_dict + +_LOG = logging.getLogger('databricks.sdk') + +# all definitions in this file are in alphabetical order + + +@dataclass +class App: + name: str + """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. + It must be unique within the workspace.""" + + active_deployment: Optional[AppDeployment] = None + """The active deployment of the app.""" + + create_time: Optional[str] = None + """The creation time of the app. Formatted timestamp in ISO 6801.""" + + creator: Optional[str] = None + """The email of the user that created the app.""" + + description: Optional[str] = None + """The description of the app.""" + + pending_deployment: Optional[AppDeployment] = None + """The pending deployment of the app.""" + + service_principal_id: Optional[int] = None + + service_principal_name: Optional[str] = None + + status: Optional[AppStatus] = None + + update_time: Optional[str] = None + """The update time of the app. Formatted timestamp in ISO 6801.""" + + updater: Optional[str] = None + """The email of the user that last updated the app.""" + + url: Optional[str] = None + """The URL of the app once it is deployed.""" + + def as_dict(self) -> dict: + """Serializes the App into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.active_deployment: body['active_deployment'] = self.active_deployment.as_dict() + if self.create_time is not None: body['create_time'] = self.create_time + if self.creator is not None: body['creator'] = self.creator + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + if self.pending_deployment: body['pending_deployment'] = self.pending_deployment.as_dict() + if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.status: body['status'] = self.status.as_dict() + if self.update_time is not None: body['update_time'] = self.update_time + if self.updater is not None: body['updater'] = self.updater + if self.url is not None: body['url'] = self.url + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> App: + """Deserializes the App from a dictionary.""" + return cls(active_deployment=_from_dict(d, 'active_deployment', AppDeployment), + create_time=d.get('create_time', None), + creator=d.get('creator', None), + description=d.get('description', None), + name=d.get('name', None), + pending_deployment=_from_dict(d, 'pending_deployment', AppDeployment), + service_principal_id=d.get('service_principal_id', None), + service_principal_name=d.get('service_principal_name', None), + status=_from_dict(d, 'status', AppStatus), + update_time=d.get('update_time', None), + updater=d.get('updater', None), + url=d.get('url', None)) + + +@dataclass +class AppAccessControlRequest: + group_name: Optional[str] = None + """name of the group""" + + permission_level: Optional[AppPermissionLevel] = None + """Permission level""" + + service_principal_name: Optional[str] = None + """application ID of a service principal""" + + user_name: Optional[str] = None + """name of the user""" + + def as_dict(self) -> dict: + """Serializes the AppAccessControlRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.group_name is not None: body['group_name'] = self.group_name + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppAccessControlRequest: + """Deserializes the AppAccessControlRequest from a dictionary.""" + return cls(group_name=d.get('group_name', None), + permission_level=_enum(d, 'permission_level', AppPermissionLevel), + service_principal_name=d.get('service_principal_name', None), + user_name=d.get('user_name', None)) + + +@dataclass +class AppAccessControlResponse: + all_permissions: Optional[List[AppPermission]] = None + """All permissions.""" + + display_name: Optional[str] = None + """Display name of the user or service principal.""" + + group_name: Optional[str] = None + """name of the group""" + + service_principal_name: Optional[str] = None + """Name of the service principal.""" + + user_name: Optional[str] = None + """name of the user""" + + def as_dict(self) -> dict: + """Serializes the AppAccessControlResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.all_permissions: body['all_permissions'] = [v.as_dict() for v in self.all_permissions] + if self.display_name is not None: body['display_name'] = self.display_name + if self.group_name is not None: body['group_name'] = self.group_name + if self.service_principal_name is not None: + body['service_principal_name'] = self.service_principal_name + if self.user_name is not None: body['user_name'] = self.user_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppAccessControlResponse: + """Deserializes the AppAccessControlResponse from a dictionary.""" + return cls(all_permissions=_repeated_dict(d, 'all_permissions', AppPermission), + display_name=d.get('display_name', None), + group_name=d.get('group_name', None), + service_principal_name=d.get('service_principal_name', None), + user_name=d.get('user_name', None)) + + +@dataclass +class AppDeployment: + source_code_path: str + """The workspace file system path of the source code used to create the app deployment. This is + different from `deployment_artifacts.source_code_path`, which is the path used by the deployed + app. The former refers to the original source code location of the app in the workspace during + deployment creation, whereas the latter provides a system generated stable snapshotted source + code path used by the deployment.""" + + create_time: Optional[str] = None + """The creation time of the deployment. Formatted timestamp in ISO 6801.""" + + creator: Optional[str] = None + """The email of the user creates the deployment.""" + + deployment_artifacts: Optional[AppDeploymentArtifacts] = None + """The deployment artifacts for an app.""" + + deployment_id: Optional[str] = None + """The unique id of the deployment.""" + + mode: Optional[AppDeploymentMode] = None + """The mode of which the deployment will manage the source code.""" + + status: Optional[AppDeploymentStatus] = None + """Status and status message of the deployment""" + + update_time: Optional[str] = None + """The update time of the deployment. Formatted timestamp in ISO 6801.""" + + def as_dict(self) -> dict: + """Serializes the AppDeployment into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.create_time is not None: body['create_time'] = self.create_time + if self.creator is not None: body['creator'] = self.creator + if self.deployment_artifacts: body['deployment_artifacts'] = self.deployment_artifacts.as_dict() + if self.deployment_id is not None: body['deployment_id'] = self.deployment_id + if self.mode is not None: body['mode'] = self.mode.value + if self.source_code_path is not None: body['source_code_path'] = self.source_code_path + if self.status: body['status'] = self.status.as_dict() + if self.update_time is not None: body['update_time'] = self.update_time + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppDeployment: + """Deserializes the AppDeployment from a dictionary.""" + return cls(create_time=d.get('create_time', None), + creator=d.get('creator', None), + deployment_artifacts=_from_dict(d, 'deployment_artifacts', AppDeploymentArtifacts), + deployment_id=d.get('deployment_id', None), + mode=_enum(d, 'mode', AppDeploymentMode), + source_code_path=d.get('source_code_path', None), + status=_from_dict(d, 'status', AppDeploymentStatus), + update_time=d.get('update_time', None)) + + +@dataclass +class AppDeploymentArtifacts: + source_code_path: Optional[str] = None + """The snapshotted workspace file system path of the source code loaded by the deployed app.""" + + def as_dict(self) -> dict: + """Serializes the AppDeploymentArtifacts into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.source_code_path is not None: body['source_code_path'] = self.source_code_path + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppDeploymentArtifacts: + """Deserializes the AppDeploymentArtifacts from a dictionary.""" + return cls(source_code_path=d.get('source_code_path', None)) + + +class AppDeploymentMode(Enum): + + AUTO_SYNC = 'AUTO_SYNC' + SNAPSHOT = 'SNAPSHOT' + + +class AppDeploymentState(Enum): + + FAILED = 'FAILED' + IN_PROGRESS = 'IN_PROGRESS' + STOPPED = 'STOPPED' + SUCCEEDED = 'SUCCEEDED' + + +@dataclass +class AppDeploymentStatus: + message: Optional[str] = None + """Message corresponding with the deployment state.""" + + state: Optional[AppDeploymentState] = None + """State of the deployment.""" + + def as_dict(self) -> dict: + """Serializes the AppDeploymentStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.message is not None: body['message'] = self.message + if self.state is not None: body['state'] = self.state.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppDeploymentStatus: + """Deserializes the AppDeploymentStatus from a dictionary.""" + return cls(message=d.get('message', None), state=_enum(d, 'state', AppDeploymentState)) + + +@dataclass +class AppPermission: + inherited: Optional[bool] = None + + inherited_from_object: Optional[List[str]] = None + + permission_level: Optional[AppPermissionLevel] = None + """Permission level""" + + def as_dict(self) -> dict: + """Serializes the AppPermission into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.inherited is not None: body['inherited'] = self.inherited + if self.inherited_from_object: body['inherited_from_object'] = [v for v in self.inherited_from_object] + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppPermission: + """Deserializes the AppPermission from a dictionary.""" + return cls(inherited=d.get('inherited', None), + inherited_from_object=d.get('inherited_from_object', None), + permission_level=_enum(d, 'permission_level', AppPermissionLevel)) + + +class AppPermissionLevel(Enum): + """Permission level""" + + CAN_MANAGE = 'CAN_MANAGE' + CAN_USE = 'CAN_USE' + + +@dataclass +class AppPermissions: + access_control_list: Optional[List[AppAccessControlResponse]] = None + + object_id: Optional[str] = None + + object_type: Optional[str] = None + + def as_dict(self) -> dict: + """Serializes the AppPermissions into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.access_control_list: + body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.object_id is not None: body['object_id'] = self.object_id + if self.object_type is not None: body['object_type'] = self.object_type + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppPermissions: + """Deserializes the AppPermissions from a dictionary.""" + return cls(access_control_list=_repeated_dict(d, 'access_control_list', AppAccessControlResponse), + object_id=d.get('object_id', None), + object_type=d.get('object_type', None)) + + +@dataclass +class AppPermissionsDescription: + description: Optional[str] = None + + permission_level: Optional[AppPermissionLevel] = None + """Permission level""" + + def as_dict(self) -> dict: + """Serializes the AppPermissionsDescription into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.permission_level is not None: body['permission_level'] = self.permission_level.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppPermissionsDescription: + """Deserializes the AppPermissionsDescription from a dictionary.""" + return cls(description=d.get('description', None), + permission_level=_enum(d, 'permission_level', AppPermissionLevel)) + + +@dataclass +class AppPermissionsRequest: + access_control_list: Optional[List[AppAccessControlRequest]] = None + + app_name: Optional[str] = None + """The app for which to get or manage permissions.""" + + def as_dict(self) -> dict: + """Serializes the AppPermissionsRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.access_control_list: + body['access_control_list'] = [v.as_dict() for v in self.access_control_list] + if self.app_name is not None: body['app_name'] = self.app_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppPermissionsRequest: + """Deserializes the AppPermissionsRequest from a dictionary.""" + return cls(access_control_list=_repeated_dict(d, 'access_control_list', AppAccessControlRequest), + app_name=d.get('app_name', None)) + + +class AppState(Enum): + + CREATING = 'CREATING' + DELETED = 'DELETED' + DELETING = 'DELETING' + ERROR = 'ERROR' + IDLE = 'IDLE' + RUNNING = 'RUNNING' + STARTING = 'STARTING' + + +@dataclass +class AppStatus: + message: Optional[str] = None + """Message corresponding with the app state.""" + + state: Optional[AppState] = None + """State of the app.""" + + def as_dict(self) -> dict: + """Serializes the AppStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.message is not None: body['message'] = self.message + if self.state is not None: body['state'] = self.state.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> AppStatus: + """Deserializes the AppStatus from a dictionary.""" + return cls(message=d.get('message', None), state=_enum(d, 'state', AppState)) + + +@dataclass +class CreateAppDeploymentRequest: + source_code_path: str + """The workspace file system path of the source code used to create the app deployment. This is + different from `deployment_artifacts.source_code_path`, which is the path used by the deployed + app. The former refers to the original source code location of the app in the workspace during + deployment creation, whereas the latter provides a system generated stable snapshotted source + code path used by the deployment.""" + + app_name: Optional[str] = None + """The name of the app.""" + + mode: Optional[AppDeploymentMode] = None + """The mode of which the deployment will manage the source code.""" + + def as_dict(self) -> dict: + """Serializes the CreateAppDeploymentRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.app_name is not None: body['app_name'] = self.app_name + if self.mode is not None: body['mode'] = self.mode.value + if self.source_code_path is not None: body['source_code_path'] = self.source_code_path + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateAppDeploymentRequest: + """Deserializes the CreateAppDeploymentRequest from a dictionary.""" + return cls(app_name=d.get('app_name', None), + mode=_enum(d, 'mode', AppDeploymentMode), + source_code_path=d.get('source_code_path', None)) + + +@dataclass +class CreateAppRequest: + name: str + """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. + It must be unique within the workspace.""" + + description: Optional[str] = None + """The description of the app.""" + + def as_dict(self) -> dict: + """Serializes the CreateAppRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateAppRequest: + """Deserializes the CreateAppRequest from a dictionary.""" + return cls(description=d.get('description', None), name=d.get('name', None)) + + +@dataclass +class DeleteResponse: + + def as_dict(self) -> dict: + """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> DeleteResponse: + """Deserializes the DeleteResponse from a dictionary.""" + return cls() + + +@dataclass +class GetAppPermissionLevelsResponse: + permission_levels: Optional[List[AppPermissionsDescription]] = None + """Specific permission levels""" + + def as_dict(self) -> dict: + """Serializes the GetAppPermissionLevelsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GetAppPermissionLevelsResponse: + """Deserializes the GetAppPermissionLevelsResponse from a dictionary.""" + return cls(permission_levels=_repeated_dict(d, 'permission_levels', AppPermissionsDescription)) + + +@dataclass +class ListAppDeploymentsResponse: + app_deployments: Optional[List[AppDeployment]] = None + """Deployment history of the app.""" + + next_page_token: Optional[str] = None + """Pagination token to request the next page of apps.""" + + def as_dict(self) -> dict: + """Serializes the ListAppDeploymentsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.app_deployments: body['app_deployments'] = [v.as_dict() for v in self.app_deployments] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListAppDeploymentsResponse: + """Deserializes the ListAppDeploymentsResponse from a dictionary.""" + return cls(app_deployments=_repeated_dict(d, 'app_deployments', AppDeployment), + next_page_token=d.get('next_page_token', None)) + + +@dataclass +class ListAppsResponse: + apps: Optional[List[App]] = None + + next_page_token: Optional[str] = None + """Pagination token to request the next page of apps.""" + + def as_dict(self) -> dict: + """Serializes the ListAppsResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.apps: body['apps'] = [v.as_dict() for v in self.apps] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListAppsResponse: + """Deserializes the ListAppsResponse from a dictionary.""" + return cls(apps=_repeated_dict(d, 'apps', App), next_page_token=d.get('next_page_token', None)) + + +@dataclass +class StartAppRequest: + name: Optional[str] = None + """The name of the app.""" + + +@dataclass +class StopAppRequest: + name: Optional[str] = None + """The name of the app.""" + + +@dataclass +class StopAppResponse: + + def as_dict(self) -> dict: + """Serializes the StopAppResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> StopAppResponse: + """Deserializes the StopAppResponse from a dictionary.""" + return cls() + + +@dataclass +class UpdateAppRequest: + name: str + """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. + It must be unique within the workspace.""" + + description: Optional[str] = None + """The description of the app.""" + + def as_dict(self) -> dict: + """Serializes the UpdateAppRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.description is not None: body['description'] = self.description + if self.name is not None: body['name'] = self.name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateAppRequest: + """Deserializes the UpdateAppRequest from a dictionary.""" + return cls(description=d.get('description', None), name=d.get('name', None)) + + +class AppsAPI: + """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend + Databricks services, and enable users to interact through single sign-on.""" + + def __init__(self, api_client): + self._api = api_client + + def wait_get_app_idle(self, + name: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[App], None]] = None) -> App: + deadline = time.time() + timeout.total_seconds() + target_states = (AppState.IDLE, ) + failure_states = (AppState.ERROR, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get(name=name) + status = poll.status.state + status_message = f'current status: {status}' + if poll.status: + status_message = poll.status.message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach IDLE, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"name={name}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def wait_get_deployment_app_succeeded( + self, + app_name: str, + deployment_id: str, + timeout=timedelta(minutes=20), + callback: Optional[Callable[[AppDeployment], None]] = None) -> AppDeployment: + deadline = time.time() + timeout.total_seconds() + target_states = (AppDeploymentState.SUCCEEDED, ) + failure_states = (AppDeploymentState.FAILED, ) + status_message = 'polling...' + attempt = 1 + while time.time() < deadline: + poll = self.get_deployment(app_name=app_name, deployment_id=deployment_id) + status = poll.status.state + status_message = f'current status: {status}' + if poll.status: + status_message = poll.status.message + if status in target_states: + return poll + if callback: + callback(poll) + if status in failure_states: + msg = f'failed to reach SUCCEEDED, got {status}: {status_message}' + raise OperationFailed(msg) + prefix = f"app_name={app_name}, deployment_id={deployment_id}" + sleep = attempt + if sleep > 10: + # sleep 10s max per attempt + sleep = 10 + _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') + time.sleep(sleep + random.random()) + attempt += 1 + raise TimeoutError(f'timed out after {timeout}: {status_message}') + + def create(self, name: str, *, description: Optional[str] = None) -> Wait[App]: + """Create an app. + + Creates a new app. + + :param name: str + The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It + must be unique within the workspace. + :param description: str (optional) + The description of the app. + + :returns: + Long-running operation waiter for :class:`App`. + See :method:wait_get_app_idle for more details. + """ + body = {} + if description is not None: body['description'] = description + if name is not None: body['name'] = name + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + op_response = self._api.do('POST', '/api/2.0/preview/apps', body=body, headers=headers) + return Wait(self.wait_get_app_idle, response=App.from_dict(op_response), name=op_response['name']) + + def create_and_wait(self, + name: str, + *, + description: Optional[str] = None, + timeout=timedelta(minutes=20)) -> App: + return self.create(description=description, name=name).result(timeout=timeout) + + def delete(self, name: str): + """Delete an app. + + Deletes an app. + + :param name: str + The name of the app. + + + """ + + headers = {'Accept': 'application/json', } + + self._api.do('DELETE', f'/api/2.0/preview/apps/{name}', headers=headers) + + def deploy(self, + app_name: str, + source_code_path: str, + *, + mode: Optional[AppDeploymentMode] = None) -> Wait[AppDeployment]: + """Create an app deployment. + + Creates an app deployment for the app with the supplied name. + + :param app_name: str + The name of the app. + :param source_code_path: str + The workspace file system path of the source code used to create the app deployment. This is + different from `deployment_artifacts.source_code_path`, which is the path used by the deployed app. + The former refers to the original source code location of the app in the workspace during deployment + creation, whereas the latter provides a system generated stable snapshotted source code path used by + the deployment. + :param mode: :class:`AppDeploymentMode` (optional) + The mode of which the deployment will manage the source code. + + :returns: + Long-running operation waiter for :class:`AppDeployment`. + See :method:wait_get_deployment_app_succeeded for more details. + """ + body = {} + if mode is not None: body['mode'] = mode.value + if source_code_path is not None: body['source_code_path'] = source_code_path + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + op_response = self._api.do('POST', + f'/api/2.0/preview/apps/{app_name}/deployments', + body=body, + headers=headers) + return Wait(self.wait_get_deployment_app_succeeded, + response=AppDeployment.from_dict(op_response), + app_name=app_name, + deployment_id=op_response['deployment_id']) + + def deploy_and_wait( + self, + app_name: str, + source_code_path: str, + *, + mode: Optional[AppDeploymentMode] = None, + timeout=timedelta(minutes=20)) -> AppDeployment: + return self.deploy(app_name=app_name, mode=mode, + source_code_path=source_code_path).result(timeout=timeout) + + def get(self, name: str) -> App: + """Get an app. + + Retrieves information for the app with the supplied name. + + :param name: str + The name of the app. + + :returns: :class:`App` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', f'/api/2.0/preview/apps/{name}', headers=headers) + return App.from_dict(res) + + def get_deployment(self, app_name: str, deployment_id: str) -> AppDeployment: + """Get an app deployment. + + Retrieves information for the app deployment with the supplied name and deployment id. + + :param app_name: str + The name of the app. + :param deployment_id: str + The unique id of the deployment. + + :returns: :class:`AppDeployment` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', + f'/api/2.0/preview/apps/{app_name}/deployments/{deployment_id}', + headers=headers) + return AppDeployment.from_dict(res) + + def get_permission_levels(self, app_name: str) -> GetAppPermissionLevelsResponse: + """Get app permission levels. + + Gets the permission levels that a user can have on an object. + + :param app_name: str + The app for which to get or manage permissions. + + :returns: :class:`GetAppPermissionLevelsResponse` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', f'/api/2.0/permissions/apps/{app_name}/permissionLevels', headers=headers) + return GetAppPermissionLevelsResponse.from_dict(res) + + def get_permissions(self, app_name: str) -> AppPermissions: + """Get app permissions. + + Gets the permissions of an app. Apps can inherit permissions from their root object. + + :param app_name: str + The app for which to get or manage permissions. + + :returns: :class:`AppPermissions` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', f'/api/2.0/permissions/apps/{app_name}', headers=headers) + return AppPermissions.from_dict(res) + + def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[App]: + """List apps. + + Lists all apps in the workspace. + + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of apps. Requests first page if absent. + + :returns: Iterator over :class:`App` + """ + + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', '/api/2.0/preview/apps', query=query, headers=headers) + if 'apps' in json: + for v in json['apps']: + yield App.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def list_deployments(self, + app_name: str, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[AppDeployment]: + """List app deployments. + + Lists all app deployments for the app with the supplied name. + + :param app_name: str + The name of the app. + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of apps. Requests first page if absent. + + :returns: Iterator over :class:`AppDeployment` + """ + + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', + f'/api/2.0/preview/apps/{app_name}/deployments', + query=query, + headers=headers) + if 'app_deployments' in json: + for v in json['app_deployments']: + yield AppDeployment.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + def set_permissions( + self, + app_name: str, + *, + access_control_list: Optional[List[AppAccessControlRequest]] = None) -> AppPermissions: + """Set app permissions. + + Sets permissions on an app. Apps can inherit permissions from their root object. + + :param app_name: str + The app for which to get or manage permissions. + :param access_control_list: List[:class:`AppAccessControlRequest`] (optional) + + :returns: :class:`AppPermissions` + """ + body = {} + if access_control_list is not None: + body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PUT', f'/api/2.0/permissions/apps/{app_name}', body=body, headers=headers) + return AppPermissions.from_dict(res) + + def start(self, name: str) -> Wait[AppDeployment]: + """Start an app. + + Start the last active deployment of the app in the workspace. + + :param name: str + The name of the app. + + :returns: + Long-running operation waiter for :class:`AppDeployment`. + See :method:wait_get_deployment_app_succeeded for more details. + """ + + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + op_response = self._api.do('POST', f'/api/2.0/preview/apps/{name}/start', headers=headers) + return Wait(self.wait_get_deployment_app_succeeded, + response=AppDeployment.from_dict(op_response), + app_name=name, + deployment_id=op_response['deployment_id']) + + def start_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> AppDeployment: + return self.start(name=name).result(timeout=timeout) + + def stop(self, name: str): + """Stop an app. + + Stops the active deployment of the app in the workspace. + + :param name: str + The name of the app. + + + """ + + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + self._api.do('POST', f'/api/2.0/preview/apps/{name}/stop', headers=headers) + + def update(self, name: str, *, description: Optional[str] = None) -> App: + """Update an app. + + Updates the app with the supplied name. + + :param name: str + The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It + must be unique within the workspace. + :param description: str (optional) + The description of the app. + + :returns: :class:`App` + """ + body = {} + if description is not None: body['description'] = description + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PATCH', f'/api/2.0/preview/apps/{name}', body=body, headers=headers) + return App.from_dict(res) + + def update_permissions( + self, + app_name: str, + *, + access_control_list: Optional[List[AppAccessControlRequest]] = None) -> AppPermissions: + """Update app permissions. + + Updates the permissions on an app. Apps can inherit permissions from their root object. + + :param app_name: str + The app for which to get or manage permissions. + :param access_control_list: List[:class:`AppAccessControlRequest`] (optional) + + :returns: :class:`AppPermissions` + """ + body = {} + if access_control_list is not None: + body['access_control_list'] = [v.as_dict() for v in access_control_list] + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('PATCH', f'/api/2.0/permissions/apps/{app_name}', body=body, headers=headers) + return AppPermissions.from_dict(res) diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py index d2ef50bc3..cfb7ba0b4 100755 --- a/databricks/sdk/service/billing.py +++ b/databricks/sdk/service/billing.py @@ -249,6 +249,46 @@ def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterWorkspaceIdCla values=d.get('values', None)) +@dataclass +class CreateBillingUsageDashboardRequest: + dashboard_type: Optional[UsageDashboardType] = None + """Workspace level usage dashboard shows usage data for the specified workspace ID. Global level + usage dashboard shows usage data for all workspaces in the account.""" + + workspace_id: Optional[int] = None + """The workspace ID of the workspace in which the usage dashboard is created.""" + + def as_dict(self) -> dict: + """Serializes the CreateBillingUsageDashboardRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.dashboard_type is not None: body['dashboard_type'] = self.dashboard_type.value + if self.workspace_id is not None: body['workspace_id'] = self.workspace_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateBillingUsageDashboardRequest: + """Deserializes the CreateBillingUsageDashboardRequest from a dictionary.""" + return cls(dashboard_type=_enum(d, 'dashboard_type', UsageDashboardType), + workspace_id=d.get('workspace_id', None)) + + +@dataclass +class CreateBillingUsageDashboardResponse: + dashboard_id: Optional[str] = None + """The unique id of the usage dashboard.""" + + def as_dict(self) -> dict: + """Serializes the CreateBillingUsageDashboardResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> CreateBillingUsageDashboardResponse: + """Deserializes the CreateBillingUsageDashboardResponse from a dictionary.""" + return cls(dashboard_id=d.get('dashboard_id', None)) + + @dataclass class CreateBudgetConfigurationBudget: account_id: Optional[str] = None @@ -529,6 +569,27 @@ def from_dict(cls, d: Dict[str, any]) -> DownloadResponse: return cls(contents=d.get('contents', None)) +@dataclass +class GetBillingUsageDashboardResponse: + dashboard_id: Optional[str] = None + """The unique id of the usage dashboard.""" + + dashboard_url: Optional[str] = None + """The URL of the usage dashboard.""" + + def as_dict(self) -> dict: + """Serializes the GetBillingUsageDashboardResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.dashboard_url is not None: body['dashboard_url'] = self.dashboard_url + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GetBillingUsageDashboardResponse: + """Deserializes the GetBillingUsageDashboardResponse from a dictionary.""" + return cls(dashboard_id=d.get('dashboard_id', None), dashboard_url=d.get('dashboard_url', None)) + + @dataclass class GetBudgetConfigurationResponse: budget: Optional[BudgetConfiguration] = None @@ -898,6 +959,12 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateLogDeliveryConfigurationStatusReq status=_enum(d, 'status', LogDeliveryConfigStatus)) +class UsageDashboardType(Enum): + + USAGE_DASHBOARD_TYPE_GLOBAL = 'USAGE_DASHBOARD_TYPE_GLOBAL' + USAGE_DASHBOARD_TYPE_WORKSPACE = 'USAGE_DASHBOARD_TYPE_WORKSPACE' + + @dataclass class WrappedCreateLogDeliveryConfiguration: log_delivery_configuration: Optional[CreateLogDeliveryConfigurationParams] = None @@ -1290,3 +1357,67 @@ def patch_status(self, log_delivery_configuration_id: str, status: LogDeliveryCo f'/api/2.0/accounts/{self._api.account_id}/log-delivery/{log_delivery_configuration_id}', body=body, headers=headers) + + +class UsageDashboardsAPI: + """These APIs manage usage dashboards for this account. Usage dashboards enable you to gain insights into + your usage with pre-built dashboards: visualize breakdowns, analyze tag attributions, and identify cost + drivers.""" + + def __init__(self, api_client): + self._api = api_client + + def create(self, + *, + dashboard_type: Optional[UsageDashboardType] = None, + workspace_id: Optional[int] = None) -> CreateBillingUsageDashboardResponse: + """Create new usage dashboard. + + Create a usage dashboard specified by workspaceId, accountId, and dashboard type. + + :param dashboard_type: :class:`UsageDashboardType` (optional) + Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage + dashboard shows usage data for all workspaces in the account. + :param workspace_id: int (optional) + The workspace ID of the workspace in which the usage dashboard is created. + + :returns: :class:`CreateBillingUsageDashboardResponse` + """ + body = {} + if dashboard_type is not None: body['dashboard_type'] = dashboard_type.value + if workspace_id is not None: body['workspace_id'] = workspace_id + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', + f'/api/2.0/accounts/{self._api.account_id}/dashboard', + body=body, + headers=headers) + return CreateBillingUsageDashboardResponse.from_dict(res) + + def get(self, + *, + dashboard_type: Optional[UsageDashboardType] = None, + workspace_id: Optional[int] = None) -> GetBillingUsageDashboardResponse: + """Get usage dashboard. + + Get a usage dashboard specified by workspaceId, accountId, and dashboard type. + + :param dashboard_type: :class:`UsageDashboardType` (optional) + Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage + dashboard shows usage data for all workspaces in the account. + :param workspace_id: int (optional) + The workspace ID of the workspace in which the usage dashboard is created. + + :returns: :class:`GetBillingUsageDashboardResponse` + """ + + query = {} + if dashboard_type is not None: query['dashboard_type'] = dashboard_type.value + if workspace_id is not None: query['workspace_id'] = workspace_id + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', + f'/api/2.0/accounts/{self._api.account_id}/dashboard', + query=query, + headers=headers) + return GetBillingUsageDashboardResponse.from_dict(res) diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index c6da9b8c5..0e81d239f 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -2789,19 +2789,25 @@ def from_dict(cls, d: Dict[str, any]) -> ListStorageCredentialsResponse: @dataclass class ListSystemSchemasResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + schemas: Optional[List[SystemSchemaInfo]] = None """An array of system schema information objects.""" def as_dict(self) -> dict: """Serializes the ListSystemSchemasResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token if self.schemas: body['schemas'] = [v.as_dict() for v in self.schemas] return body @classmethod def from_dict(cls, d: Dict[str, any]) -> ListSystemSchemasResponse: """Deserializes the ListSystemSchemasResponse from a dictionary.""" - return cls(schemas=_repeated_dict(d, 'schemas', SystemSchemaInfo)) + return cls(next_page_token=d.get('next_page_token', None), + schemas=_repeated_dict(d, 'schemas', SystemSchemaInfo)) @dataclass @@ -3026,6 +3032,9 @@ class MetastoreInfoDeltaSharingScope(Enum): @dataclass class ModelVersionInfo: + aliases: Optional[List[RegisteredModelAlias]] = None + """List of aliases associated with the model version""" + browse_only: Optional[bool] = None """Indicates whether the principal is limited to retrieving metadata for the associated object through the BROWSE privilege when include_browse is enabled in the request.""" @@ -3086,6 +3095,7 @@ class ModelVersionInfo: def as_dict(self) -> dict: """Serializes the ModelVersionInfo into a dictionary suitable for use as a JSON request body.""" body = {} + if self.aliases: body['aliases'] = [v.as_dict() for v in self.aliases] if self.browse_only is not None: body['browse_only'] = self.browse_only if self.catalog_name is not None: body['catalog_name'] = self.catalog_name if self.comment is not None: body['comment'] = self.comment @@ -3110,7 +3120,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> ModelVersionInfo: """Deserializes the ModelVersionInfo from a dictionary.""" - return cls(browse_only=d.get('browse_only', None), + return cls(aliases=_repeated_dict(d, 'aliases', RegisteredModelAlias), + browse_only=d.get('browse_only', None), catalog_name=d.get('catalog_name', None), comment=d.get('comment', None), created_at=d.get('created_at', None), @@ -5790,16 +5801,22 @@ class WorkspaceBindingsResponse: bindings: Optional[List[WorkspaceBinding]] = None """List of workspace bindings""" + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + def as_dict(self) -> dict: """Serializes the WorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body.""" body = {} if self.bindings: body['bindings'] = [v.as_dict() for v in self.bindings] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token return body @classmethod def from_dict(cls, d: Dict[str, any]) -> WorkspaceBindingsResponse: """Deserializes the WorkspaceBindingsResponse from a dictionary.""" - return cls(bindings=_repeated_dict(d, 'bindings', WorkspaceBinding)) + return cls(bindings=_repeated_dict(d, 'bindings', WorkspaceBinding), + next_page_token=d.get('next_page_token', None)) class AccountMetastoreAssignmentsAPI: @@ -7339,7 +7356,8 @@ def get(self, full_name: str, version: int, *, - include_browse: Optional[bool] = None) -> RegisteredModelInfo: + include_aliases: Optional[bool] = None, + include_browse: Optional[bool] = None) -> ModelVersionInfo: """Get a Model Version. Get a model version. @@ -7352,14 +7370,17 @@ def get(self, The three-level (fully qualified) name of the model version :param version: int The integer version number of the model version + :param include_aliases: bool (optional) + Whether to include aliases associated with the model version in the response :param include_browse: bool (optional) Whether to include model versions in the response for which the principal can only access selective metadata for - :returns: :class:`RegisteredModelInfo` + :returns: :class:`ModelVersionInfo` """ query = {} + if include_aliases is not None: query['include_aliases'] = include_aliases if include_browse is not None: query['include_browse'] = include_browse headers = {'Accept': 'application/json', } @@ -7367,9 +7388,13 @@ def get(self, f'/api/2.1/unity-catalog/models/{full_name}/versions/{version}', query=query, headers=headers) - return RegisteredModelInfo.from_dict(res) + return ModelVersionInfo.from_dict(res) - def get_by_alias(self, full_name: str, alias: str) -> ModelVersionInfo: + def get_by_alias(self, + full_name: str, + alias: str, + *, + include_aliases: Optional[bool] = None) -> ModelVersionInfo: """Get Model Version By Alias. Get a model version by alias. @@ -7382,14 +7407,19 @@ def get_by_alias(self, full_name: str, alias: str) -> ModelVersionInfo: The three-level (fully qualified) name of the registered model :param alias: str The name of the alias + :param include_aliases: bool (optional) + Whether to include aliases associated with the model version in the response :returns: :class:`ModelVersionInfo` """ + query = {} + if include_aliases is not None: query['include_aliases'] = include_aliases headers = {'Accept': 'application/json', } res = self._api.do('GET', f'/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}', + query=query, headers=headers) return ModelVersionInfo.from_dict(res) @@ -7985,7 +8015,11 @@ def delete_alias(self, full_name: str, alias: str): self._api.do('DELETE', f'/api/2.1/unity-catalog/models/{full_name}/aliases/{alias}', headers=headers) - def get(self, full_name: str, *, include_browse: Optional[bool] = None) -> RegisteredModelInfo: + def get(self, + full_name: str, + *, + include_aliases: Optional[bool] = None, + include_browse: Optional[bool] = None) -> RegisteredModelInfo: """Get a Registered Model. Get a registered model. @@ -7996,6 +8030,8 @@ def get(self, full_name: str, *, include_browse: Optional[bool] = None) -> Regis :param full_name: str The three-level (fully qualified) name of the registered model + :param include_aliases: bool (optional) + Whether to include registered model aliases in the response :param include_browse: bool (optional) Whether to include registered models in the response for which the principal can only access selective metadata for @@ -8004,6 +8040,7 @@ def get(self, full_name: str, *, include_browse: Optional[bool] = None) -> Regis """ query = {} + if include_aliases is not None: query['include_aliases'] = include_aliases if include_browse is not None: query['include_browse'] = include_browse headers = {'Accept': 'application/json', } @@ -8650,7 +8687,11 @@ def enable(self, metastore_id: str, schema_name: str): f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas/{schema_name}', headers=headers) - def list(self, metastore_id: str) -> Iterator[SystemSchemaInfo]: + def list(self, + metastore_id: str, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[SystemSchemaInfo]: """List system schemas. Gets an array of system schemas for a metastore. The caller must be an account admin or a metastore @@ -8658,17 +8699,33 @@ def list(self, metastore_id: str) -> Iterator[SystemSchemaInfo]: :param metastore_id: str The ID for the metastore in which the system schema resides. + :param max_results: int (optional) + Maximum number of schemas to return. - When set to 0, the page length is set to a server configured + value (recommended); - When set to a value greater than 0, the page length is the minimum of this + value and a server configured value; - When set to a value less than 0, an invalid parameter error + is returned; - If not set, all the schemas are returned (not recommended). + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`SystemSchemaInfo` """ + query = {} + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - json = self._api.do('GET', - f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas', - headers=headers) - parsed = ListSystemSchemasResponse.from_dict(json).schemas - return parsed if parsed is not None else [] + while True: + json = self._api.do('GET', + f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas', + query=query, + headers=headers) + if 'schemas' in json: + for v in json['schemas']: + yield SystemSchemaInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] class TableConstraintsAPI: @@ -9214,8 +9271,12 @@ def get(self, name: str) -> CurrentWorkspaceBindings: headers=headers) return CurrentWorkspaceBindings.from_dict(res) - def get_bindings(self, securable_type: GetBindingsSecurableType, - securable_name: str) -> WorkspaceBindingsResponse: + def get_bindings(self, + securable_type: GetBindingsSecurableType, + securable_name: str, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[WorkspaceBinding]: """Get securable workspace bindings. Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the @@ -9225,16 +9286,33 @@ def get_bindings(self, securable_type: GetBindingsSecurableType, The type of the securable to bind to a workspace. :param securable_name: str The name of the securable. + :param max_results: int (optional) + Maximum number of workspace bindings to return. - When set to 0, the page length is set to a server + configured value (recommended); - When set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - When set to a value less than 0, an invalid parameter + error is returned; - If not set, all the workspace bindings are returned (not recommended). + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. - :returns: :class:`WorkspaceBindingsResponse` + :returns: Iterator over :class:`WorkspaceBinding` """ + query = {} + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - res = self._api.do('GET', - f'/api/2.1/unity-catalog/bindings/{securable_type.value}/{securable_name}', - headers=headers) - return WorkspaceBindingsResponse.from_dict(res) + while True: + json = self._api.do('GET', + f'/api/2.1/unity-catalog/bindings/{securable_type.value}/{securable_name}', + query=query, + headers=headers) + if 'bindings' in json: + for v in json['bindings']: + yield WorkspaceBinding.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] def update(self, name: str, diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index bbfda7891..148ce44e8 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -2106,10 +2106,6 @@ def from_dict(cls, d: Dict[str, any]) -> CreateInstancePoolResponse: @dataclass class CreatePolicy: - name: str - """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and - 100 characters.""" - definition: Optional[str] = None """Policy definition document expressed in [Databricks Cluster Policy Definition Language]. @@ -2126,6 +2122,10 @@ class CreatePolicy: """Max number of clusters per user that can be active using this policy. If not present, there is no max limit.""" + name: Optional[str] = None + """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and + 100 characters.""" + policy_family_definition_overrides: Optional[str] = None """Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. @@ -2891,10 +2891,6 @@ class EditPolicy: policy_id: str """The ID of the policy to update.""" - name: str - """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and - 100 characters.""" - definition: Optional[str] = None """Policy definition document expressed in [Databricks Cluster Policy Definition Language]. @@ -2911,6 +2907,10 @@ class EditPolicy: """Max number of clusters per user that can be active using this policy. If not present, there is no max limit.""" + name: Optional[str] = None + """Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and + 100 characters.""" + policy_family_definition_overrides: Optional[str] = None """Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. @@ -4784,13 +4784,13 @@ def from_dict(cls, d: Dict[str, any]) -> ListPoliciesResponse: @dataclass class ListPolicyFamiliesResponse: - policy_families: List[PolicyFamily] - """List of policy families.""" - next_page_token: Optional[str] = None """A token that can be used to get the next page of results. If not present, there are no more results to show.""" + policy_families: Optional[List[PolicyFamily]] = None + """List of policy families.""" + def as_dict(self) -> dict: """Serializes the ListPolicyFamiliesResponse into a dictionary suitable for use as a JSON request body.""" body = {} @@ -4812,6 +4812,7 @@ class ListSortColumn(Enum): class ListSortOrder(Enum): + """A generic ordering enum for list-based queries.""" ASC = 'ASC' DESC = 'DESC' @@ -5138,6 +5139,8 @@ def from_dict(cls, d: Dict[str, any]) -> PinClusterResponse: @dataclass class Policy: + """Describes a Cluster Policy entity.""" + created_at_timestamp: Optional[int] = None """Creation time. The timestamp (in millisecond) when this Cluster Policy was created.""" @@ -5179,7 +5182,11 @@ class Policy: [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" policy_family_id: Optional[str] = None - """ID of the policy family.""" + """ID of the policy family. The cluster policy's policy definition inherits the policy family's + policy definition. + + Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize + the policy definition.""" policy_id: Optional[str] = None """Canonical unique identifier for the Cluster Policy.""" @@ -5219,20 +5226,20 @@ def from_dict(cls, d: Dict[str, any]) -> Policy: @dataclass class PolicyFamily: - policy_family_id: str - """ID of the policy family.""" - - name: str - """Name of the policy family.""" - - description: str - """Human-readable description of the purpose of the policy family.""" - - definition: str + definition: Optional[str] = None """Policy definition document expressed in [Databricks Cluster Policy Definition Language]. [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html""" + description: Optional[str] = None + """Human-readable description of the purpose of the policy family.""" + + name: Optional[str] = None + """Name of the policy family.""" + + policy_family_id: Optional[str] = None + """Unique identifier for the policy family.""" + def as_dict(self) -> dict: """Serializes the PolicyFamily into a dictionary suitable for use as a JSON request body.""" body = {} @@ -5872,6 +5879,260 @@ def from_dict(cls, d: Dict[str, any]) -> UnpinClusterResponse: return cls() +@dataclass +class UpdateCluster: + cluster_id: str + """ID of the cluster.""" + + update_mask: str + """Specifies which fields of the cluster will be updated. This is required in the POST request. The + update mask should be supplied as a single string. To specify multiple fields, separate them + with commas (no spaces). To delete a field from a cluster configuration, add it to the + `update_mask` string but omit it from the `cluster` object.""" + + cluster: Optional[UpdateClusterResource] = None + """The cluster to be updated.""" + + def as_dict(self) -> dict: + """Serializes the UpdateCluster into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.cluster: body['cluster'] = self.cluster.as_dict() + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.update_mask is not None: body['update_mask'] = self.update_mask + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateCluster: + """Deserializes the UpdateCluster from a dictionary.""" + return cls(cluster=_from_dict(d, 'cluster', UpdateClusterResource), + cluster_id=d.get('cluster_id', None), + update_mask=d.get('update_mask', None)) + + +@dataclass +class UpdateClusterResource: + autoscale: Optional[AutoScale] = None + """Parameters needed in order to automatically scale clusters up and down based on load. Note: + autoscaling works best with DB runtime versions 3.0 or later.""" + + autotermination_minutes: Optional[int] = None + """Automatically terminates the cluster after it is inactive for this time in minutes. If not set, + this cluster will not be automatically terminated. If specified, the threshold must be between + 10 and 10000 minutes. Users can also set this value to 0 to explicitly disable automatic + termination.""" + + aws_attributes: Optional[AwsAttributes] = None + """Attributes related to clusters running on Amazon Web Services. If not specified at cluster + creation, a set of default values will be used.""" + + azure_attributes: Optional[AzureAttributes] = None + """Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, + a set of default values will be used.""" + + cluster_log_conf: Optional[ClusterLogConf] = None + """The configuration for delivering spark logs to a long-term storage destination. Two kinds of + destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. + If the conf is given, the logs will be delivered to the destination every `5 mins`. The + destination of driver logs is `$destination/$clusterId/driver`, while the destination of + executor logs is `$destination/$clusterId/executor`.""" + + cluster_name: Optional[str] = None + """Cluster name requested by the user. This doesn't have to be unique. If not specified at + creation, the cluster name will be an empty string.""" + + custom_tags: Optional[Dict[str, str]] = None + """Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS + instances and EBS volumes) with these tags in addition to `default_tags`. Notes: + + - Currently, Databricks allows at most 45 custom tags + + - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster + tags""" + + data_security_mode: Optional[DataSecurityMode] = None + """Data security mode decides what data governance model to use when accessing data from a cluster. + + * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features + are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively + used by a single user specified in `single_user_name`. Most programming languages, cluster + features and data governance features are available in this mode. * `USER_ISOLATION`: A secure + cluster that can be shared by multiple users. Cluster users are fully isolated so that they + cannot see each other's data and credentials. Most data governance features are supported in + this mode. But programming languages and cluster features might be limited. + + The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for + future Databricks Runtime versions: + + * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * + `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high + concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy + Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that + doesn’t have UC nor passthrough enabled.""" + + docker_image: Optional[DockerImage] = None + + driver_instance_pool_id: Optional[str] = None + """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster + uses the instance pool with id (instance_pool_id) if the driver pool is not assigned.""" + + driver_node_type_id: Optional[str] = None + """The node type of the Spark driver. Note that this field is optional; if unset, the driver node + type will be set as the same value as `node_type_id` defined above.""" + + enable_elastic_disk: Optional[bool] = None + """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk + space when its Spark workers are running low on disk space. This feature requires specific AWS + permissions to function correctly - refer to the User Guide for more details.""" + + enable_local_disk_encryption: Optional[bool] = None + """Whether to enable LUKS on cluster VMs' local disks""" + + gcp_attributes: Optional[GcpAttributes] = None + """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster + creation, a set of default values will be used.""" + + init_scripts: Optional[List[InitScriptInfo]] = None + """The configuration for storing init scripts. Any number of destinations can be specified. The + scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified, + init script logs are sent to `//init_scripts`.""" + + instance_pool_id: Optional[str] = None + """The optional ID of the instance pool to which the cluster belongs.""" + + node_type_id: Optional[str] = None + """This field encodes, through a single value, the resources available to each of the Spark nodes + in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or + compute intensive workloads. A list of available node types can be retrieved by using the + :method:clusters/listNodeTypes API call.""" + + num_workers: Optional[int] = None + """Number of worker nodes that this cluster should have. A cluster has one Spark Driver and + `num_workers` Executors for a total of `num_workers` + 1 Spark nodes. + + Note: When reading the properties of a cluster, this field reflects the desired number of + workers rather than the actual current number of workers. For instance, if a cluster is resized + from 5 to 10 workers, this field will immediately be updated to reflect the target size of 10 + workers, whereas the workers listed in `spark_info` will gradually increase from 5 to 10 as the + new nodes are provisioned.""" + + policy_id: Optional[str] = None + """The ID of the cluster policy used to create the cluster if applicable.""" + + runtime_engine: Optional[RuntimeEngine] = None + """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime + engine is inferred from spark_version.""" + + single_user_name: Optional[str] = None + """Single user name if data_security_mode is `SINGLE_USER`""" + + spark_conf: Optional[Dict[str, str]] = None + """An object containing a set of optional, user-specified Spark configuration key-value pairs. + Users can also pass in a string of extra JVM options to the driver and the executors via + `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.""" + + spark_env_vars: Optional[Dict[str, str]] = None + """An object containing a set of optional, user-specified environment variable key-value pairs. + Please note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) + while launching the driver and workers. + + In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them + to `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default + databricks managed environmental variables are included as well. + + Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS": + "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS + -Dspark.shuffle.service.enabled=true"}`""" + + spark_version: Optional[str] = None + """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can + be retrieved by using the :method:clusters/sparkVersions API call.""" + + ssh_public_keys: Optional[List[str]] = None + """SSH public key contents that will be added to each Spark node in this cluster. The corresponding + private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can + be specified.""" + + workload_type: Optional[WorkloadType] = None + + def as_dict(self) -> dict: + """Serializes the UpdateClusterResource into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.autoscale: body['autoscale'] = self.autoscale.as_dict() + if self.autotermination_minutes is not None: + body['autotermination_minutes'] = self.autotermination_minutes + if self.aws_attributes: body['aws_attributes'] = self.aws_attributes.as_dict() + if self.azure_attributes: body['azure_attributes'] = self.azure_attributes.as_dict() + if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf.as_dict() + if self.cluster_name is not None: body['cluster_name'] = self.cluster_name + if self.custom_tags: body['custom_tags'] = self.custom_tags + if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode.value + if self.docker_image: body['docker_image'] = self.docker_image.as_dict() + if self.driver_instance_pool_id is not None: + body['driver_instance_pool_id'] = self.driver_instance_pool_id + if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk + if self.enable_local_disk_encryption is not None: + body['enable_local_disk_encryption'] = self.enable_local_disk_encryption + if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() + if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] + if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id + if self.node_type_id is not None: body['node_type_id'] = self.node_type_id + if self.num_workers is not None: body['num_workers'] = self.num_workers + if self.policy_id is not None: body['policy_id'] = self.policy_id + if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value + if self.single_user_name is not None: body['single_user_name'] = self.single_user_name + if self.spark_conf: body['spark_conf'] = self.spark_conf + if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars + if self.spark_version is not None: body['spark_version'] = self.spark_version + if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys] + if self.workload_type: body['workload_type'] = self.workload_type.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateClusterResource: + """Deserializes the UpdateClusterResource from a dictionary.""" + return cls(autoscale=_from_dict(d, 'autoscale', AutoScale), + autotermination_minutes=d.get('autotermination_minutes', None), + aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes), + azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes), + cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf), + cluster_name=d.get('cluster_name', None), + custom_tags=d.get('custom_tags', None), + data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode), + docker_image=_from_dict(d, 'docker_image', DockerImage), + driver_instance_pool_id=d.get('driver_instance_pool_id', None), + driver_node_type_id=d.get('driver_node_type_id', None), + enable_elastic_disk=d.get('enable_elastic_disk', None), + enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), + gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes), + init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo), + instance_pool_id=d.get('instance_pool_id', None), + node_type_id=d.get('node_type_id', None), + num_workers=d.get('num_workers', None), + policy_id=d.get('policy_id', None), + runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine), + single_user_name=d.get('single_user_name', None), + spark_conf=d.get('spark_conf', None), + spark_env_vars=d.get('spark_env_vars', None), + spark_version=d.get('spark_version', None), + ssh_public_keys=d.get('ssh_public_keys', None), + workload_type=_from_dict(d, 'workload_type', WorkloadType)) + + +@dataclass +class UpdateClusterResponse: + + def as_dict(self) -> dict: + """Serializes the UpdateClusterResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> UpdateClusterResponse: + """Deserializes the UpdateClusterResponse from a dictionary.""" + return cls() + + @dataclass class UpdateResponse: @@ -5960,21 +6221,18 @@ def __init__(self, api_client): self._api = api_client def create(self, - name: str, *, definition: Optional[str] = None, description: Optional[str] = None, libraries: Optional[List[Library]] = None, max_clusters_per_user: Optional[int] = None, + name: Optional[str] = None, policy_family_definition_overrides: Optional[str] = None, policy_family_id: Optional[str] = None) -> CreatePolicyResponse: """Create a new policy. Creates a new policy with prescribed settings. - :param name: str - Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 - characters. :param definition: str (optional) Policy definition document expressed in [Databricks Cluster Policy Definition Language]. @@ -5987,6 +6245,9 @@ def create(self, :param max_clusters_per_user: int (optional) Max number of clusters per user that can be active using this policy. If not present, there is no max limit. + :param name: str (optional) + Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 + characters. :param policy_family_definition_overrides: str (optional) Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. @@ -6036,12 +6297,12 @@ def delete(self, policy_id: str): def edit(self, policy_id: str, - name: str, *, definition: Optional[str] = None, description: Optional[str] = None, libraries: Optional[List[Library]] = None, max_clusters_per_user: Optional[int] = None, + name: Optional[str] = None, policy_family_definition_overrides: Optional[str] = None, policy_family_id: Optional[str] = None): """Update a cluster policy. @@ -6051,9 +6312,6 @@ def edit(self, :param policy_id: str The ID of the policy to update. - :param name: str - Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 - characters. :param definition: str (optional) Policy definition document expressed in [Databricks Cluster Policy Definition Language]. @@ -6066,6 +6324,9 @@ def edit(self, :param max_clusters_per_user: int (optional) Max number of clusters per user that can be active using this policy. If not present, there is no max limit. + :param name: str (optional) + Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 + characters. :param policy_family_definition_overrides: str (optional) Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. @@ -6103,7 +6364,7 @@ def get(self, policy_id: str) -> Policy: Get a cluster policy entity. Creation and editing is available to admins only. :param policy_id: str - Canonical unique identifier for the cluster policy. + Canonical unique identifier for the Cluster Policy. :returns: :class:`Policy` """ @@ -7275,6 +7536,57 @@ def unpin(self, cluster_id: str): self._api.do('POST', '/api/2.1/clusters/unpin', body=body, headers=headers) + def update(self, + cluster_id: str, + update_mask: str, + *, + cluster: Optional[UpdateClusterResource] = None) -> Wait[ClusterDetails]: + """Update cluster configuration (partial). + + Updates the configuration of a cluster to match the partial set of attributes and size. Denote which + fields to update using the `update_mask` field in the request body. A cluster can be updated if it is + in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a `RUNNING` state, it will be + restarted so that the new attributes can take effect. If a cluster is updated while in a `TERMINATED` + state, it will remain `TERMINATED`. The updated attributes will take effect the next time the cluster + is started using the `clusters/start` API. Attempts to update a cluster in any other state will be + rejected with an `INVALID_STATE` error code. Clusters created by the Databricks Jobs service cannot be + updated. + + :param cluster_id: str + ID of the cluster. + :param update_mask: str + Specifies which fields of the cluster will be updated. This is required in the POST request. The + update mask should be supplied as a single string. To specify multiple fields, separate them with + commas (no spaces). To delete a field from a cluster configuration, add it to the `update_mask` + string but omit it from the `cluster` object. + :param cluster: :class:`UpdateClusterResource` (optional) + The cluster to be updated. + + :returns: + Long-running operation waiter for :class:`ClusterDetails`. + See :method:wait_get_cluster_running for more details. + """ + body = {} + if cluster is not None: body['cluster'] = cluster.as_dict() + if cluster_id is not None: body['cluster_id'] = cluster_id + if update_mask is not None: body['update_mask'] = update_mask + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + op_response = self._api.do('POST', '/api/2.1/clusters/update', body=body, headers=headers) + return Wait(self.wait_get_cluster_running, + response=UpdateClusterResponse.from_dict(op_response), + cluster_id=cluster_id) + + def update_and_wait( + self, + cluster_id: str, + update_mask: str, + *, + cluster: Optional[UpdateClusterResource] = None, + timeout=timedelta(minutes=20)) -> ClusterDetails: + return self.update(cluster=cluster, cluster_id=cluster_id, + update_mask=update_mask).result(timeout=timeout) + def update_permissions( self, cluster_id: str, @@ -8286,19 +8598,27 @@ class PolicyFamiliesAPI: def __init__(self, api_client): self._api = api_client - def get(self, policy_family_id: str) -> PolicyFamily: + def get(self, policy_family_id: str, *, version: Optional[int] = None) -> PolicyFamily: """Get policy family information. - Retrieve the information for an policy family based on its identifier. + Retrieve the information for an policy family based on its identifier and version :param policy_family_id: str + The family ID about which to retrieve information. + :param version: int (optional) + The version number for the family to fetch. Defaults to the latest version. :returns: :class:`PolicyFamily` """ + query = {} + if version is not None: query['version'] = version headers = {'Accept': 'application/json', } - res = self._api.do('GET', f'/api/2.0/policy-families/{policy_family_id}', headers=headers) + res = self._api.do('GET', + f'/api/2.0/policy-families/{policy_family_id}', + query=query, + headers=headers) return PolicyFamily.from_dict(res) def list(self, @@ -8307,10 +8627,11 @@ def list(self, page_token: Optional[str] = None) -> Iterator[PolicyFamily]: """List policy families. - Retrieve a list of policy families. This API is paginated. + Returns the list of policy definition types available to use at their latest version. This API is + paginated. :param max_results: int (optional) - The max number of policy families to return. + Maximum number of policy families to return. :param page_token: str (optional) A token that can be used to get the next page of results. diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index bf571dd49..28ddca569 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -208,7 +208,6 @@ def from_dict(cls, d: Dict[str, any]) -> Dashboard: class DashboardView(Enum): DASHBOARD_VIEW_BASIC = 'DASHBOARD_VIEW_BASIC' - DASHBOARD_VIEW_FULL = 'DASHBOARD_VIEW_FULL' @dataclass @@ -381,7 +380,9 @@ class GenieMessage: """MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data sources. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling - [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `FAILED`: Generating a + [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message + status will stay in the `EXECUTING_QUERY` until a client calls + [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message processing is completed. Results are in the `attachments` field. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message @@ -612,7 +613,9 @@ class MessageStatus(Enum): """MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data sources. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling - [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `FAILED`: Generating a + [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message + status will stay in the `EXECUTING_QUERY` until a client calls + [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message processing is completed. Results are in the `attachments` field. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message @@ -721,6 +724,8 @@ class QueryAttachment: description: Optional[str] = None """Description of the query""" + id: Optional[str] = None + instruction_id: Optional[str] = None """If the query was created on an instruction (trusted asset) we link to the id""" @@ -741,6 +746,7 @@ def as_dict(self) -> dict: """Serializes the QueryAttachment into a dictionary suitable for use as a JSON request body.""" body = {} if self.description is not None: body['description'] = self.description + if self.id is not None: body['id'] = self.id if self.instruction_id is not None: body['instruction_id'] = self.instruction_id if self.instruction_title is not None: body['instruction_title'] = self.instruction_title if self.last_updated_timestamp is not None: @@ -753,6 +759,7 @@ def as_dict(self) -> dict: def from_dict(cls, d: Dict[str, any]) -> QueryAttachment: """Deserializes the QueryAttachment from a dictionary.""" return cls(description=d.get('description', None), + id=d.get('id', None), instruction_id=d.get('instruction_id', None), instruction_title=d.get('instruction_title', None), last_updated_timestamp=d.get('last_updated_timestamp', None), @@ -960,16 +967,19 @@ class TextAttachment: content: Optional[str] = None """AI generated message""" + id: Optional[str] = None + def as_dict(self) -> dict: """Serializes the TextAttachment into a dictionary suitable for use as a JSON request body.""" body = {} if self.content is not None: body['content'] = self.content + if self.id is not None: body['id'] = self.id return body @classmethod def from_dict(cls, d: Dict[str, any]) -> TextAttachment: """Deserializes the TextAttachment from a dictionary.""" - return cls(content=d.get('content', None)) + return cls(content=d.get('content', None), id=d.get('id', None)) @dataclass @@ -1505,8 +1515,7 @@ def list(self, The flag to include dashboards located in the trash. If unspecified, only active dashboards will be returned. :param view: :class:`DashboardView` (optional) - Indicates whether to include all metadata from the dashboard in the response. If unset, the response - defaults to `DASHBOARD_VIEW_BASIC` which only includes summary metadata from the dashboard. + `DASHBOARD_VIEW_BASIC`only includes summary metadata from the dashboard. :returns: Iterator over :class:`Dashboard` """ diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index b5cf91846..2b027fe6e 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -830,6 +830,7 @@ class PermissionLevel(Enum): CAN_MANAGE_PRODUCTION_VERSIONS = 'CAN_MANAGE_PRODUCTION_VERSIONS' CAN_MANAGE_RUN = 'CAN_MANAGE_RUN' CAN_MANAGE_STAGING_VERSIONS = 'CAN_MANAGE_STAGING_VERSIONS' + CAN_MONITOR = 'CAN_MONITOR' CAN_QUERY = 'CAN_QUERY' CAN_READ = 'CAN_READ' CAN_RESTART = 'CAN_RESTART' @@ -890,9 +891,9 @@ class PermissionsRequest: """The id of the request object.""" request_object_type: Optional[str] = None - """The type of the request object. Can be one of the following: authorization, clusters, - cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, - registered-models, repos, serving-endpoints, or warehouses.""" + """The type of the request object. Can be one of the following: alerts, authorization, clusters, + cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, + notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.""" def as_dict(self) -> dict: """Serializes the PermissionsRequest into a dictionary suitable for use as a JSON request body.""" @@ -1139,8 +1140,10 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateRuleSetRequest: @dataclass class UpdateWorkspaceAssignments: permissions: Optional[List[WorkspacePermission]] = None - """Array of permissions assignments to update on the workspace. Note that excluding this field will - have the same effect as providing an empty list which will result in the deletion of all + """Array of permissions assignments to update on the workspace. Valid values are "USER" and "ADMIN" + (case-sensitive). If both "USER" and "ADMIN" are provided, "ADMIN" takes precedence. Other + values will be ignored. Note that excluding this field, or providing unsupported values, will + have the same effect as providing an empty list, which will result in the deletion of all permissions for the principal.""" principal_id: Optional[int] = None @@ -2539,6 +2542,8 @@ class PermissionsAPI: """Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints. + * **[Apps permissions](:service:apps)** — Manage which users can manage or use apps. + * **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or attach to clusters. @@ -2574,7 +2579,7 @@ class PermissionsAPI: * **[Token permissions](:service:tokenmanagement)** — Manage which users can create or use tokens. * **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, edit, or - manage directories, files, and notebooks. + manage alerts, dbsql-dashboards, directories, files, notebooks and queries. For the mapping of the required permissions for specific actions or abilities and other important information, see [Access Control]. @@ -2594,9 +2599,9 @@ def get(self, request_object_type: str, request_object_id: str) -> ObjectPermiss object. :param request_object_type: str - The type of the request object. Can be one of the following: authorization, clusters, - cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, - registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, authorization, clusters, + cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, + notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. @@ -2642,9 +2647,9 @@ def set(self, object. :param request_object_type: str - The type of the request object. Can be one of the following: authorization, clusters, - cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, - registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, authorization, clusters, + cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, + notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) @@ -2673,9 +2678,9 @@ def update(self, root object. :param request_object_type: str - The type of the request object. Can be one of the following: authorization, clusters, - cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, - registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, authorization, clusters, + cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, + notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) @@ -3382,9 +3387,11 @@ def update(self, :param principal_id: int The ID of the user, service principal, or group. :param permissions: List[:class:`WorkspacePermission`] (optional) - Array of permissions assignments to update on the workspace. Note that excluding this field will - have the same effect as providing an empty list which will result in the deletion of all permissions - for the principal. + Array of permissions assignments to update on the workspace. Valid values are "USER" and "ADMIN" + (case-sensitive). If both "USER" and "ADMIN" are provided, "ADMIN" takes precedence. Other values + will be ignored. Note that excluding this field, or providing unsupported values, will have the same + effect as providing an empty list, which will result in the deletion of all permissions for the + principal. :returns: :class:`PermissionAssignment` """ diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index cf677fd06..6e5b34ad1 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -15,7 +15,7 @@ _LOG = logging.getLogger('databricks.sdk') -from databricks.sdk.service import compute, iam +from databricks.sdk.service import compute # all definitions in this file are in alphabetical order @@ -469,7 +469,7 @@ def from_dict(cls, d: Dict[str, any]) -> Continuous: @dataclass class CreateJob: - access_control_list: Optional[List[iam.AccessControlRequest]] = None + access_control_list: Optional[List[JobAccessControlRequest]] = None """List of permissions to set on the job.""" continuous: Optional[Continuous] = None @@ -603,7 +603,7 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> CreateJob: """Deserializes the CreateJob from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', iam.AccessControlRequest), + return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest), continuous=_from_dict(d, 'continuous', Continuous), deployment=_from_dict(d, 'deployment', JobDeployment), description=d.get('description', None), @@ -2614,6 +2614,9 @@ class Run: Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are used, `git_source` must be defined on the job.""" + iterations: Optional[List[RunTask]] = None + """Only populated by for-each iterations. The parent for-each task is located in tasks array.""" + job_clusters: Optional[List[JobCluster]] = None """A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in @@ -2625,6 +2628,9 @@ class Run: job_parameters: Optional[List[JobParameter]] = None """Job-level parameters used in the run""" + next_page_token: Optional[str] = None + """A token that can be used to list the next page of sub-resources.""" + number_in_job: Optional[int] = None """A unique identifier for this job run. This is set to the same value as `run_id`.""" @@ -2635,6 +2641,9 @@ class Run: overriding_parameters: Optional[RunParameters] = None """The parameters used for this run.""" + prev_page_token: Optional[str] = None + """A token that can be used to list the previous page of sub-resources.""" + queue_duration: Optional[int] = None """The time in milliseconds that the run has spent in the queue.""" @@ -2708,13 +2717,16 @@ def as_dict(self) -> dict: if self.end_time is not None: body['end_time'] = self.end_time if self.execution_duration is not None: body['execution_duration'] = self.execution_duration if self.git_source: body['git_source'] = self.git_source.as_dict() + if self.iterations: body['iterations'] = [v.as_dict() for v in self.iterations] if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters] if self.job_id is not None: body['job_id'] = self.job_id if self.job_parameters: body['job_parameters'] = [v.as_dict() for v in self.job_parameters] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token if self.number_in_job is not None: body['number_in_job'] = self.number_in_job if self.original_attempt_run_id is not None: body['original_attempt_run_id'] = self.original_attempt_run_id if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters.as_dict() + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token if self.queue_duration is not None: body['queue_duration'] = self.queue_duration if self.repair_history: body['repair_history'] = [v.as_dict() for v in self.repair_history] if self.run_duration is not None: body['run_duration'] = self.run_duration @@ -2743,12 +2755,15 @@ def from_dict(cls, d: Dict[str, any]) -> Run: end_time=d.get('end_time', None), execution_duration=d.get('execution_duration', None), git_source=_from_dict(d, 'git_source', GitSource), + iterations=_repeated_dict(d, 'iterations', RunTask), job_clusters=_repeated_dict(d, 'job_clusters', JobCluster), job_id=d.get('job_id', None), job_parameters=_repeated_dict(d, 'job_parameters', JobParameter), + next_page_token=d.get('next_page_token', None), number_in_job=d.get('number_in_job', None), original_attempt_run_id=d.get('original_attempt_run_id', None), overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters), + prev_page_token=d.get('prev_page_token', None), queue_duration=d.get('queue_duration', None), repair_history=_repeated_dict(d, 'repair_history', RepairHistoryItem), run_duration=d.get('run_duration', None), @@ -4187,7 +4202,7 @@ def from_dict(cls, d: Dict[str, any]) -> SqlTaskSubscription: @dataclass class SubmitRun: - access_control_list: Optional[List[iam.AccessControlRequest]] = None + access_control_list: Optional[List[JobAccessControlRequest]] = None """List of permissions to set on the job.""" email_notifications: Optional[JobEmailNotifications] = None @@ -4267,7 +4282,7 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> SubmitRun: """Deserializes the SubmitRun from a dictionary.""" - return cls(access_control_list=_repeated_dict(d, 'access_control_list', iam.AccessControlRequest), + return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest), email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications), environments=_repeated_dict(d, 'environments', JobEnvironment), git_source=_from_dict(d, 'git_source', GitSource), @@ -5156,7 +5171,7 @@ def cancel_run_and_wait(self, run_id: int, timeout=timedelta(minutes=20)) -> Run def create(self, *, - access_control_list: Optional[List[iam.AccessControlRequest]] = None, + access_control_list: Optional[List[JobAccessControlRequest]] = None, continuous: Optional[Continuous] = None, deployment: Optional[JobDeployment] = None, description: Optional[str] = None, @@ -5183,7 +5198,7 @@ def create(self, Create a new job. - :param access_control_list: List[:class:`AccessControlRequest`] (optional) + :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) List of permissions to set on the job. :param continuous: :class:`Continuous` (optional) An optional continuous property for this job. The continuous property will ensure that there is @@ -5401,7 +5416,8 @@ def get_run(self, run_id: int, *, include_history: Optional[bool] = None, - include_resolved_values: Optional[bool] = None) -> Run: + include_resolved_values: Optional[bool] = None, + page_token: Optional[str] = None) -> Run: """Get a single job run. Retrieve the metadata of a run. @@ -5412,6 +5428,9 @@ def get_run(self, Whether to include the repair history in the response. :param include_resolved_values: bool (optional) Whether to include resolved parameter values in the response. + :param page_token: str (optional) + To list the next page or the previous page of job tasks, set this field to the value of the + `next_page_token` or `prev_page_token` returned in the GetJob response. :returns: :class:`Run` """ @@ -5419,6 +5438,7 @@ def get_run(self, query = {} if include_history is not None: query['include_history'] = include_history if include_resolved_values is not None: query['include_resolved_values'] = include_resolved_values + if page_token is not None: query['page_token'] = page_token if run_id is not None: query['run_id'] = run_id headers = {'Accept': 'application/json', } @@ -5926,7 +5946,7 @@ def set_permissions( def submit(self, *, - access_control_list: Optional[List[iam.AccessControlRequest]] = None, + access_control_list: Optional[List[JobAccessControlRequest]] = None, email_notifications: Optional[JobEmailNotifications] = None, environments: Optional[List[JobEnvironment]] = None, git_source: Optional[GitSource] = None, @@ -5945,7 +5965,7 @@ def submit(self, Runs submitted using this endpoint don’t display in the UI. Use the `jobs/runs/get` API to check the run state after the job is submitted. - :param access_control_list: List[:class:`AccessControlRequest`] (optional) + :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) List of permissions to set on the job. :param email_notifications: :class:`JobEmailNotifications` (optional) An optional set of email addresses notified when the run begins or completes. @@ -6020,7 +6040,7 @@ def submit(self, def submit_and_wait( self, *, - access_control_list: Optional[List[iam.AccessControlRequest]] = None, + access_control_list: Optional[List[JobAccessControlRequest]] = None, email_notifications: Optional[JobEmailNotifications] = None, environments: Optional[List[JobEnvironment]] = None, git_source: Optional[GitSource] = None, diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py index bba59811d..ae76632ef 100755 --- a/databricks/sdk/service/pipelines.py +++ b/databricks/sdk/service/pipelines.py @@ -63,7 +63,7 @@ class CreatePipeline: id: Optional[str] = None """Unique identifier for this pipeline.""" - ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None + ingestion_definition: Optional[IngestionPipelineDefinition] = None """The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'target' or 'catalog' settings.""" @@ -136,8 +136,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreatePipeline: filters=_from_dict(d, 'filters', Filters), gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition), id=d.get('id', None), - ingestion_definition=_from_dict(d, 'ingestion_definition', - ManagedIngestionPipelineDefinition), + ingestion_definition=_from_dict(d, 'ingestion_definition', IngestionPipelineDefinition), libraries=_repeated_dict(d, 'libraries', PipelineLibrary), name=d.get('name', None), notifications=_repeated_dict(d, 'notifications', Notifications), @@ -277,7 +276,7 @@ class EditPipeline: id: Optional[str] = None """Unique identifier for this pipeline.""" - ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None + ingestion_definition: Optional[IngestionPipelineDefinition] = None """The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'target' or 'catalog' settings.""" @@ -355,8 +354,7 @@ def from_dict(cls, d: Dict[str, any]) -> EditPipeline: filters=_from_dict(d, 'filters', Filters), gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition), id=d.get('id', None), - ingestion_definition=_from_dict(d, 'ingestion_definition', - ManagedIngestionPipelineDefinition), + ingestion_definition=_from_dict(d, 'ingestion_definition', IngestionPipelineDefinition), libraries=_repeated_dict(d, 'libraries', PipelineLibrary), name=d.get('name', None), notifications=_repeated_dict(d, 'notifications', Notifications), @@ -590,7 +588,7 @@ class IngestionGatewayPipelineDefinition: """Required, Immutable. The name of the catalog for the gateway pipeline's storage location.""" gateway_storage_name: Optional[str] = None - """Required. The Unity Catalog-compatible naming for the gateway storage location. This is the + """Optional. The Unity Catalog-compatible name for the gateway storage location. This is the destination to use for the data that is extracted by the gateway. Delta Live Tables system will automatically create the storage location under the catalog and schema.""" @@ -617,6 +615,41 @@ def from_dict(cls, d: Dict[str, any]) -> IngestionGatewayPipelineDefinition: gateway_storage_schema=d.get('gateway_storage_schema', None)) +@dataclass +class IngestionPipelineDefinition: + connection_name: Optional[str] = None + """Immutable. The Unity Catalog connection this ingestion pipeline uses to communicate with the + source. Specify either ingestion_gateway_id or connection_name.""" + + ingestion_gateway_id: Optional[str] = None + """Immutable. Identifier for the ingestion gateway used by this ingestion pipeline to communicate + with the source. Specify either ingestion_gateway_id or connection_name.""" + + objects: Optional[List[IngestionConfig]] = None + """Required. Settings specifying tables to replicate and the destination for the replicated tables.""" + + table_configuration: Optional[TableSpecificConfig] = None + """Configuration settings to control the ingestion of tables. These settings are applied to all + tables in the pipeline.""" + + def as_dict(self) -> dict: + """Serializes the IngestionPipelineDefinition into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.connection_name is not None: body['connection_name'] = self.connection_name + if self.ingestion_gateway_id is not None: body['ingestion_gateway_id'] = self.ingestion_gateway_id + if self.objects: body['objects'] = [v.as_dict() for v in self.objects] + if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> IngestionPipelineDefinition: + """Deserializes the IngestionPipelineDefinition from a dictionary.""" + return cls(connection_name=d.get('connection_name', None), + ingestion_gateway_id=d.get('ingestion_gateway_id', None), + objects=_repeated_dict(d, 'objects', IngestionConfig), + table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig)) + + @dataclass class ListPipelineEventsResponse: events: Optional[List[PipelineEvent]] = None @@ -693,41 +726,6 @@ def from_dict(cls, d: Dict[str, any]) -> ListUpdatesResponse: updates=_repeated_dict(d, 'updates', UpdateInfo)) -@dataclass -class ManagedIngestionPipelineDefinition: - connection_name: Optional[str] = None - """Immutable. The Unity Catalog connection this ingestion pipeline uses to communicate with the - source. Specify either ingestion_gateway_id or connection_name.""" - - ingestion_gateway_id: Optional[str] = None - """Immutable. Identifier for the ingestion gateway used by this ingestion pipeline to communicate - with the source. Specify either ingestion_gateway_id or connection_name.""" - - objects: Optional[List[IngestionConfig]] = None - """Required. Settings specifying tables to replicate and the destination for the replicated tables.""" - - table_configuration: Optional[TableSpecificConfig] = None - """Configuration settings to control the ingestion of tables. These settings are applied to all - tables in the pipeline.""" - - def as_dict(self) -> dict: - """Serializes the ManagedIngestionPipelineDefinition into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.connection_name is not None: body['connection_name'] = self.connection_name - if self.ingestion_gateway_id is not None: body['ingestion_gateway_id'] = self.ingestion_gateway_id - if self.objects: body['objects'] = [v.as_dict() for v in self.objects] - if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict() - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> ManagedIngestionPipelineDefinition: - """Deserializes the ManagedIngestionPipelineDefinition from a dictionary.""" - return cls(connection_name=d.get('connection_name', None), - ingestion_gateway_id=d.get('ingestion_gateway_id', None), - objects=_repeated_dict(d, 'objects', IngestionConfig), - table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig)) - - @dataclass class ManualTrigger: @@ -1003,6 +1001,9 @@ class PipelineCluster: """The node type of the Spark driver. Note that this field is optional; if unset, the driver node type will be set as the same value as `node_type_id` defined above.""" + enable_local_disk_encryption: Optional[bool] = None + """Whether to enable local disk encryption for the cluster.""" + gcp_attributes: Optional[compute.GcpAttributes] = None """Attributes related to clusters running on Google Cloud Platform. If not specified at cluster creation, a set of default values will be used.""" @@ -1074,6 +1075,8 @@ def as_dict(self) -> dict: if self.driver_instance_pool_id is not None: body['driver_instance_pool_id'] = self.driver_instance_pool_id if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id + if self.enable_local_disk_encryption is not None: + body['enable_local_disk_encryption'] = self.enable_local_disk_encryption if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict() if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts] if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id @@ -1097,6 +1100,7 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineCluster: custom_tags=d.get('custom_tags', None), driver_instance_pool_id=d.get('driver_instance_pool_id', None), driver_node_type_id=d.get('driver_node_type_id', None), + enable_local_disk_encryption=d.get('enable_local_disk_encryption', None), gcp_attributes=_from_dict(d, 'gcp_attributes', compute.GcpAttributes), init_scripts=_repeated_dict(d, 'init_scripts', compute.InitScriptInfo), instance_pool_id=d.get('instance_pool_id', None), @@ -1244,6 +1248,9 @@ class PipelineLibrary: notebook: Optional[NotebookLibrary] = None """The path to a notebook that defines a pipeline and is stored in the Databricks workspace.""" + whl: Optional[str] = None + """URI of the whl to be installed.""" + def as_dict(self) -> dict: """Serializes the PipelineLibrary into a dictionary suitable for use as a JSON request body.""" body = {} @@ -1251,6 +1258,7 @@ def as_dict(self) -> dict: if self.jar is not None: body['jar'] = self.jar if self.maven: body['maven'] = self.maven.as_dict() if self.notebook: body['notebook'] = self.notebook.as_dict() + if self.whl is not None: body['whl'] = self.whl return body @classmethod @@ -1259,7 +1267,8 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineLibrary: return cls(file=_from_dict(d, 'file', FileLibrary), jar=d.get('jar', None), maven=_from_dict(d, 'maven', compute.MavenLibrary), - notebook=_from_dict(d, 'notebook', NotebookLibrary)) + notebook=_from_dict(d, 'notebook', NotebookLibrary), + whl=d.get('whl', None)) @dataclass @@ -1403,7 +1412,7 @@ class PipelineSpec: id: Optional[str] = None """Unique identifier for this pipeline.""" - ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None + ingestion_definition: Optional[IngestionPipelineDefinition] = None """The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'target' or 'catalog' settings.""" @@ -1472,8 +1481,7 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineSpec: filters=_from_dict(d, 'filters', Filters), gateway_definition=_from_dict(d, 'gateway_definition', IngestionGatewayPipelineDefinition), id=d.get('id', None), - ingestion_definition=_from_dict(d, 'ingestion_definition', - ManagedIngestionPipelineDefinition), + ingestion_definition=_from_dict(d, 'ingestion_definition', IngestionPipelineDefinition), libraries=_repeated_dict(d, 'libraries', PipelineLibrary), name=d.get('name', None), notifications=_repeated_dict(d, 'notifications', Notifications), @@ -1506,6 +1514,9 @@ class PipelineStateInfo: creator_user_name: Optional[str] = None """The username of the pipeline creator.""" + health: Optional[PipelineStateInfoHealth] = None + """The health of a pipeline.""" + latest_updates: Optional[List[UpdateStateInfo]] = None """Status of the latest updates for the pipeline. Ordered with the newest update first.""" @@ -1527,6 +1538,7 @@ def as_dict(self) -> dict: body = {} if self.cluster_id is not None: body['cluster_id'] = self.cluster_id if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name + if self.health is not None: body['health'] = self.health.value if self.latest_updates: body['latest_updates'] = [v.as_dict() for v in self.latest_updates] if self.name is not None: body['name'] = self.name if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id @@ -1539,6 +1551,7 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineStateInfo: """Deserializes the PipelineStateInfo from a dictionary.""" return cls(cluster_id=d.get('cluster_id', None), creator_user_name=d.get('creator_user_name', None), + health=_enum(d, 'health', PipelineStateInfoHealth), latest_updates=_repeated_dict(d, 'latest_updates', UpdateStateInfo), name=d.get('name', None), pipeline_id=d.get('pipeline_id', None), @@ -1546,6 +1559,13 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineStateInfo: state=_enum(d, 'state', PipelineState)) +class PipelineStateInfoHealth(Enum): + """The health of a pipeline.""" + + HEALTHY = 'HEALTHY' + UNHEALTHY = 'UNHEALTHY' + + @dataclass class PipelineTrigger: cron: Optional[CronTrigger] = None @@ -1584,7 +1604,7 @@ class SchemaSpec: table_configuration: Optional[TableSpecificConfig] = None """Configuration settings to control the ingestion of tables. These settings are applied to all tables in this schema and override the table_configuration defined in the - ManagedIngestionPipelineDefinition object.""" + IngestionPipelineDefinition object.""" def as_dict(self) -> dict: """Serializes the SchemaSpec into a dictionary suitable for use as a JSON request body.""" @@ -1796,7 +1816,7 @@ class TableSpec: table_configuration: Optional[TableSpecificConfig] = None """Configuration settings to control the ingestion of tables. These settings override the - table_configuration defined in the ManagedIngestionPipelineDefinition object and the SchemaSpec.""" + table_configuration defined in the IngestionPipelineDefinition object and the SchemaSpec.""" def as_dict(self) -> dict: """Serializes the TableSpec into a dictionary suitable for use as a JSON request body.""" @@ -2090,7 +2110,7 @@ def create(self, filters: Optional[Filters] = None, gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None, id: Optional[str] = None, - ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None, + ingestion_definition: Optional[IngestionPipelineDefinition] = None, libraries: Optional[List[PipelineLibrary]] = None, name: Optional[str] = None, notifications: Optional[List[Notifications]] = None, @@ -2131,7 +2151,7 @@ def create(self, The definition of a gateway pipeline to support CDC. :param id: str (optional) Unique identifier for this pipeline. - :param ingestion_definition: :class:`ManagedIngestionPipelineDefinition` (optional) + :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional) The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'target' or 'catalog' settings. :param libraries: List[:class:`PipelineLibrary`] (optional) @@ -2498,7 +2518,7 @@ def update(self, filters: Optional[Filters] = None, gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None, id: Optional[str] = None, - ingestion_definition: Optional[ManagedIngestionPipelineDefinition] = None, + ingestion_definition: Optional[IngestionPipelineDefinition] = None, libraries: Optional[List[PipelineLibrary]] = None, name: Optional[str] = None, notifications: Optional[List[Notifications]] = None, @@ -2542,7 +2562,7 @@ def update(self, The definition of a gateway pipeline to support CDC. :param id: str (optional) Unique identifier for this pipeline. - :param ingestion_definition: :class:`ManagedIngestionPipelineDefinition` (optional) + :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional) The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'target' or 'catalog' settings. :param libraries: List[:class:`PipelineLibrary`] (optional) diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index b1c43a926..97306b075 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -144,233 +144,6 @@ def from_dict(cls, d: Dict[str, any]) -> AnthropicConfig: anthropic_api_key_plaintext=d.get('anthropic_api_key_plaintext', None)) -@dataclass -class App: - name: str - """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. - It must be unique within the workspace.""" - - active_deployment: Optional[AppDeployment] = None - """The active deployment of the app.""" - - create_time: Optional[str] = None - """The creation time of the app. Formatted timestamp in ISO 6801.""" - - creator: Optional[str] = None - """The email of the user that created the app.""" - - description: Optional[str] = None - """The description of the app.""" - - pending_deployment: Optional[AppDeployment] = None - """The pending deployment of the app.""" - - service_principal_id: Optional[int] = None - - service_principal_name: Optional[str] = None - - status: Optional[AppStatus] = None - - update_time: Optional[str] = None - """The update time of the app. Formatted timestamp in ISO 6801.""" - - updater: Optional[str] = None - """The email of the user that last updated the app.""" - - url: Optional[str] = None - """The URL of the app once it is deployed.""" - - def as_dict(self) -> dict: - """Serializes the App into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.active_deployment: body['active_deployment'] = self.active_deployment.as_dict() - if self.create_time is not None: body['create_time'] = self.create_time - if self.creator is not None: body['creator'] = self.creator - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - if self.pending_deployment: body['pending_deployment'] = self.pending_deployment.as_dict() - if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id - if self.service_principal_name is not None: - body['service_principal_name'] = self.service_principal_name - if self.status: body['status'] = self.status.as_dict() - if self.update_time is not None: body['update_time'] = self.update_time - if self.updater is not None: body['updater'] = self.updater - if self.url is not None: body['url'] = self.url - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> App: - """Deserializes the App from a dictionary.""" - return cls(active_deployment=_from_dict(d, 'active_deployment', AppDeployment), - create_time=d.get('create_time', None), - creator=d.get('creator', None), - description=d.get('description', None), - name=d.get('name', None), - pending_deployment=_from_dict(d, 'pending_deployment', AppDeployment), - service_principal_id=d.get('service_principal_id', None), - service_principal_name=d.get('service_principal_name', None), - status=_from_dict(d, 'status', AppStatus), - update_time=d.get('update_time', None), - updater=d.get('updater', None), - url=d.get('url', None)) - - -@dataclass -class AppDeployment: - source_code_path: str - """The workspace file system path of the source code used to create the app deployment. This is - different from `deployment_artifacts.source_code_path`, which is the path used by the deployed - app. The former refers to the original source code location of the app in the workspace during - deployment creation, whereas the latter provides a system generated stable snapshotted source - code path used by the deployment.""" - - mode: AppDeploymentMode - """The mode of which the deployment will manage the source code.""" - - create_time: Optional[str] = None - """The creation time of the deployment. Formatted timestamp in ISO 6801.""" - - creator: Optional[str] = None - """The email of the user creates the deployment.""" - - deployment_artifacts: Optional[AppDeploymentArtifacts] = None - """The deployment artifacts for an app.""" - - deployment_id: Optional[str] = None - """The unique id of the deployment.""" - - status: Optional[AppDeploymentStatus] = None - """Status and status message of the deployment""" - - update_time: Optional[str] = None - """The update time of the deployment. Formatted timestamp in ISO 6801.""" - - def as_dict(self) -> dict: - """Serializes the AppDeployment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.create_time is not None: body['create_time'] = self.create_time - if self.creator is not None: body['creator'] = self.creator - if self.deployment_artifacts: body['deployment_artifacts'] = self.deployment_artifacts.as_dict() - if self.deployment_id is not None: body['deployment_id'] = self.deployment_id - if self.mode is not None: body['mode'] = self.mode.value - if self.source_code_path is not None: body['source_code_path'] = self.source_code_path - if self.status: body['status'] = self.status.as_dict() - if self.update_time is not None: body['update_time'] = self.update_time - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> AppDeployment: - """Deserializes the AppDeployment from a dictionary.""" - return cls(create_time=d.get('create_time', None), - creator=d.get('creator', None), - deployment_artifacts=_from_dict(d, 'deployment_artifacts', AppDeploymentArtifacts), - deployment_id=d.get('deployment_id', None), - mode=_enum(d, 'mode', AppDeploymentMode), - source_code_path=d.get('source_code_path', None), - status=_from_dict(d, 'status', AppDeploymentStatus), - update_time=d.get('update_time', None)) - - -@dataclass -class AppDeploymentArtifacts: - source_code_path: Optional[str] = None - """The snapshotted workspace file system path of the source code loaded by the deployed app.""" - - def as_dict(self) -> dict: - """Serializes the AppDeploymentArtifacts into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.source_code_path is not None: body['source_code_path'] = self.source_code_path - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> AppDeploymentArtifacts: - """Deserializes the AppDeploymentArtifacts from a dictionary.""" - return cls(source_code_path=d.get('source_code_path', None)) - - -class AppDeploymentMode(Enum): - - AUTO_SYNC = 'AUTO_SYNC' - SNAPSHOT = 'SNAPSHOT' - - -class AppDeploymentState(Enum): - - FAILED = 'FAILED' - IN_PROGRESS = 'IN_PROGRESS' - STOPPED = 'STOPPED' - SUCCEEDED = 'SUCCEEDED' - - -@dataclass -class AppDeploymentStatus: - message: Optional[str] = None - """Message corresponding with the deployment state.""" - - state: Optional[AppDeploymentState] = None - """State of the deployment.""" - - def as_dict(self) -> dict: - """Serializes the AppDeploymentStatus into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.message is not None: body['message'] = self.message - if self.state is not None: body['state'] = self.state.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> AppDeploymentStatus: - """Deserializes the AppDeploymentStatus from a dictionary.""" - return cls(message=d.get('message', None), state=_enum(d, 'state', AppDeploymentState)) - - -@dataclass -class AppEnvironment: - env: Optional[List[EnvVariable]] = None - - def as_dict(self) -> dict: - """Serializes the AppEnvironment into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.env: body['env'] = [v.as_dict() for v in self.env] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> AppEnvironment: - """Deserializes the AppEnvironment from a dictionary.""" - return cls(env=_repeated_dict(d, 'env', EnvVariable)) - - -class AppState(Enum): - - CREATING = 'CREATING' - DELETED = 'DELETED' - DELETING = 'DELETING' - ERROR = 'ERROR' - IDLE = 'IDLE' - RUNNING = 'RUNNING' - STARTING = 'STARTING' - - -@dataclass -class AppStatus: - message: Optional[str] = None - """Message corresponding with the app state.""" - - state: Optional[AppState] = None - """State of the app.""" - - def as_dict(self) -> dict: - """Serializes the AppStatus into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.message is not None: body['message'] = self.message - if self.state is not None: body['state'] = self.state.value - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> AppStatus: - """Deserializes the AppStatus from a dictionary.""" - return cls(message=d.get('message', None), state=_enum(d, 'state', AppState)) - - @dataclass class AutoCaptureConfigInput: catalog_name: Optional[str] = None @@ -537,59 +310,6 @@ def from_dict(cls, d: Dict[str, any]) -> CohereConfig: cohere_api_key_plaintext=d.get('cohere_api_key_plaintext', None)) -@dataclass -class CreateAppDeploymentRequest: - source_code_path: str - """The workspace file system path of the source code used to create the app deployment. This is - different from `deployment_artifacts.source_code_path`, which is the path used by the deployed - app. The former refers to the original source code location of the app in the workspace during - deployment creation, whereas the latter provides a system generated stable snapshotted source - code path used by the deployment.""" - - mode: AppDeploymentMode - """The mode of which the deployment will manage the source code.""" - - app_name: Optional[str] = None - """The name of the app.""" - - def as_dict(self) -> dict: - """Serializes the CreateAppDeploymentRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.app_name is not None: body['app_name'] = self.app_name - if self.mode is not None: body['mode'] = self.mode.value - if self.source_code_path is not None: body['source_code_path'] = self.source_code_path - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CreateAppDeploymentRequest: - """Deserializes the CreateAppDeploymentRequest from a dictionary.""" - return cls(app_name=d.get('app_name', None), - mode=_enum(d, 'mode', AppDeploymentMode), - source_code_path=d.get('source_code_path', None)) - - -@dataclass -class CreateAppRequest: - name: str - """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. - It must be unique within the workspace.""" - - description: Optional[str] = None - """The description of the app.""" - - def as_dict(self) -> dict: - """Serializes the CreateAppRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> CreateAppRequest: - """Deserializes the CreateAppRequest from a dictionary.""" - return cls(description=d.get('description', None), name=d.get('name', None)) - - @dataclass class CreateServingEndpoint: name: str @@ -950,28 +670,6 @@ def from_dict(cls, d: Dict[str, any]) -> EndpointTag: return cls(key=d.get('key', None), value=d.get('value', None)) -@dataclass -class EnvVariable: - name: Optional[str] = None - - value: Optional[str] = None - - value_from: Optional[str] = None - - def as_dict(self) -> dict: - """Serializes the EnvVariable into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.name is not None: body['name'] = self.name - if self.value is not None: body['value'] = self.value - if self.value_from is not None: body['value_from'] = self.value_from - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> EnvVariable: - """Deserializes the EnvVariable from a dictionary.""" - return cls(name=d.get('name', None), value=d.get('value', None), value_from=d.get('value_from', None)) - - @dataclass class ExportMetricsResponse: contents: Optional[BinaryIO] = None @@ -1214,48 +912,6 @@ def from_dict(cls, d: Dict[str, any]) -> GoogleCloudVertexAiConfig: region=d.get('region', None)) -@dataclass -class ListAppDeploymentsResponse: - app_deployments: Optional[List[AppDeployment]] = None - """Deployment history of the app.""" - - next_page_token: Optional[str] = None - """Pagination token to request the next page of apps.""" - - def as_dict(self) -> dict: - """Serializes the ListAppDeploymentsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.app_deployments: body['app_deployments'] = [v.as_dict() for v in self.app_deployments] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> ListAppDeploymentsResponse: - """Deserializes the ListAppDeploymentsResponse from a dictionary.""" - return cls(app_deployments=_repeated_dict(d, 'app_deployments', AppDeployment), - next_page_token=d.get('next_page_token', None)) - - -@dataclass -class ListAppsResponse: - apps: Optional[List[App]] = None - - next_page_token: Optional[str] = None - """Pagination token to request the next page of apps.""" - - def as_dict(self) -> dict: - """Serializes the ListAppsResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.apps: body['apps'] = [v.as_dict() for v in self.apps] - if self.next_page_token is not None: body['next_page_token'] = self.next_page_token - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> ListAppsResponse: - """Deserializes the ListAppsResponse from a dictionary.""" - return cls(apps=_repeated_dict(d, 'apps', App), next_page_token=d.get('next_page_token', None)) - - @dataclass class ListEndpointsResponse: endpoints: Optional[List[ServingEndpoint]] = None @@ -2560,32 +2216,6 @@ def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissionsRequest: serving_endpoint_id=d.get('serving_endpoint_id', None)) -@dataclass -class StartAppRequest: - name: Optional[str] = None - """The name of the app.""" - - -@dataclass -class StopAppRequest: - name: Optional[str] = None - """The name of the app.""" - - -@dataclass -class StopAppResponse: - - def as_dict(self) -> dict: - """Serializes the StopAppResponse into a dictionary suitable for use as a JSON request body.""" - body = {} - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> StopAppResponse: - """Deserializes the StopAppResponse from a dictionary.""" - return cls() - - @dataclass class TrafficConfig: routes: Optional[List[Route]] = None @@ -2603,28 +2233,6 @@ def from_dict(cls, d: Dict[str, any]) -> TrafficConfig: return cls(routes=_repeated_dict(d, 'routes', Route)) -@dataclass -class UpdateAppRequest: - name: str - """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. - It must be unique within the workspace.""" - - description: Optional[str] = None - """The description of the app.""" - - def as_dict(self) -> dict: - """Serializes the UpdateAppRequest into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.description is not None: body['description'] = self.description - if self.name is not None: body['name'] = self.name - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> UpdateAppRequest: - """Deserializes the UpdateAppRequest from a dictionary.""" - return cls(description=d.get('description', None), name=d.get('name', None)) - - @dataclass class V1ResponseChoiceElement: finish_reason: Optional[str] = None @@ -2662,333 +2270,6 @@ def from_dict(cls, d: Dict[str, any]) -> V1ResponseChoiceElement: text=d.get('text', None)) -class AppsAPI: - """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend - Databricks services, and enable users to interact through single sign-on.""" - - def __init__(self, api_client): - self._api = api_client - - def wait_get_app_idle(self, - name: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[App], None]] = None) -> App: - deadline = time.time() + timeout.total_seconds() - target_states = (AppState.IDLE, ) - failure_states = (AppState.ERROR, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get(name=name) - status = poll.status.state - status_message = f'current status: {status}' - if poll.status: - status_message = poll.status.message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach IDLE, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"name={name}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - def wait_get_deployment_app_succeeded( - self, - app_name: str, - deployment_id: str, - timeout=timedelta(minutes=20), - callback: Optional[Callable[[AppDeployment], None]] = None) -> AppDeployment: - deadline = time.time() + timeout.total_seconds() - target_states = (AppDeploymentState.SUCCEEDED, ) - failure_states = (AppDeploymentState.FAILED, ) - status_message = 'polling...' - attempt = 1 - while time.time() < deadline: - poll = self.get_deployment(app_name=app_name, deployment_id=deployment_id) - status = poll.status.state - status_message = f'current status: {status}' - if poll.status: - status_message = poll.status.message - if status in target_states: - return poll - if callback: - callback(poll) - if status in failure_states: - msg = f'failed to reach SUCCEEDED, got {status}: {status_message}' - raise OperationFailed(msg) - prefix = f"app_name={app_name}, deployment_id={deployment_id}" - sleep = attempt - if sleep > 10: - # sleep 10s max per attempt - sleep = 10 - _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)') - time.sleep(sleep + random.random()) - attempt += 1 - raise TimeoutError(f'timed out after {timeout}: {status_message}') - - def create(self, name: str, *, description: Optional[str] = None) -> Wait[App]: - """Create an app. - - Creates a new app. - - :param name: str - The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It - must be unique within the workspace. - :param description: str (optional) - The description of the app. - - :returns: - Long-running operation waiter for :class:`App`. - See :method:wait_get_app_idle for more details. - """ - body = {} - if description is not None: body['description'] = description - if name is not None: body['name'] = name - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - - op_response = self._api.do('POST', '/api/2.0/preview/apps', body=body, headers=headers) - return Wait(self.wait_get_app_idle, response=App.from_dict(op_response), name=op_response['name']) - - def create_and_wait(self, - name: str, - *, - description: Optional[str] = None, - timeout=timedelta(minutes=20)) -> App: - return self.create(description=description, name=name).result(timeout=timeout) - - def delete(self, name: str): - """Delete an app. - - Deletes an app. - - :param name: str - The name of the app. - - - """ - - headers = {'Accept': 'application/json', } - - self._api.do('DELETE', f'/api/2.0/preview/apps/{name}', headers=headers) - - def deploy(self, app_name: str, source_code_path: str, mode: AppDeploymentMode) -> Wait[AppDeployment]: - """Create an app deployment. - - Creates an app deployment for the app with the supplied name. - - :param app_name: str - The name of the app. - :param source_code_path: str - The workspace file system path of the source code used to create the app deployment. This is - different from `deployment_artifacts.source_code_path`, which is the path used by the deployed app. - The former refers to the original source code location of the app in the workspace during deployment - creation, whereas the latter provides a system generated stable snapshotted source code path used by - the deployment. - :param mode: :class:`AppDeploymentMode` - The mode of which the deployment will manage the source code. - - :returns: - Long-running operation waiter for :class:`AppDeployment`. - See :method:wait_get_deployment_app_succeeded for more details. - """ - body = {} - if mode is not None: body['mode'] = mode.value - if source_code_path is not None: body['source_code_path'] = source_code_path - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - - op_response = self._api.do('POST', - f'/api/2.0/preview/apps/{app_name}/deployments', - body=body, - headers=headers) - return Wait(self.wait_get_deployment_app_succeeded, - response=AppDeployment.from_dict(op_response), - app_name=app_name, - deployment_id=op_response['deployment_id']) - - def deploy_and_wait(self, - app_name: str, - source_code_path: str, - mode: AppDeploymentMode, - timeout=timedelta(minutes=20)) -> AppDeployment: - return self.deploy(app_name=app_name, mode=mode, - source_code_path=source_code_path).result(timeout=timeout) - - def get(self, name: str) -> App: - """Get an app. - - Retrieves information for the app with the supplied name. - - :param name: str - The name of the app. - - :returns: :class:`App` - """ - - headers = {'Accept': 'application/json', } - - res = self._api.do('GET', f'/api/2.0/preview/apps/{name}', headers=headers) - return App.from_dict(res) - - def get_deployment(self, app_name: str, deployment_id: str) -> AppDeployment: - """Get an app deployment. - - Retrieves information for the app deployment with the supplied name and deployment id. - - :param app_name: str - The name of the app. - :param deployment_id: str - The unique id of the deployment. - - :returns: :class:`AppDeployment` - """ - - headers = {'Accept': 'application/json', } - - res = self._api.do('GET', - f'/api/2.0/preview/apps/{app_name}/deployments/{deployment_id}', - headers=headers) - return AppDeployment.from_dict(res) - - def get_environment(self, name: str) -> AppEnvironment: - """Get app environment. - - Retrieves app environment. - - :param name: str - The name of the app. - - :returns: :class:`AppEnvironment` - """ - - headers = {'Accept': 'application/json', } - - res = self._api.do('GET', f'/api/2.0/preview/apps/{name}/environment', headers=headers) - return AppEnvironment.from_dict(res) - - def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = None) -> Iterator[App]: - """List apps. - - Lists all apps in the workspace. - - :param page_size: int (optional) - Upper bound for items returned. - :param page_token: str (optional) - Pagination token to go to the next page of apps. Requests first page if absent. - - :returns: Iterator over :class:`App` - """ - - query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json', } - - while True: - json = self._api.do('GET', '/api/2.0/preview/apps', query=query, headers=headers) - if 'apps' in json: - for v in json['apps']: - yield App.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - def list_deployments(self, - app_name: str, - *, - page_size: Optional[int] = None, - page_token: Optional[str] = None) -> Iterator[AppDeployment]: - """List app deployments. - - Lists all app deployments for the app with the supplied name. - - :param app_name: str - The name of the app. - :param page_size: int (optional) - Upper bound for items returned. - :param page_token: str (optional) - Pagination token to go to the next page of apps. Requests first page if absent. - - :returns: Iterator over :class:`AppDeployment` - """ - - query = {} - if page_size is not None: query['page_size'] = page_size - if page_token is not None: query['page_token'] = page_token - headers = {'Accept': 'application/json', } - - while True: - json = self._api.do('GET', - f'/api/2.0/preview/apps/{app_name}/deployments', - query=query, - headers=headers) - if 'app_deployments' in json: - for v in json['app_deployments']: - yield AppDeployment.from_dict(v) - if 'next_page_token' not in json or not json['next_page_token']: - return - query['page_token'] = json['next_page_token'] - - def start(self, name: str) -> AppDeployment: - """Start an app. - - Start the last active deployment of the app in the workspace. - - :param name: str - The name of the app. - - :returns: :class:`AppDeployment` - """ - - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - - res = self._api.do('POST', f'/api/2.0/preview/apps/{name}/start', headers=headers) - return AppDeployment.from_dict(res) - - def stop(self, name: str): - """Stop an app. - - Stops the active deployment of the app in the workspace. - - :param name: str - The name of the app. - - - """ - - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - - self._api.do('POST', f'/api/2.0/preview/apps/{name}/stop', headers=headers) - - def update(self, name: str, *, description: Optional[str] = None) -> App: - """Update an app. - - Updates the app with the supplied name. - - :param name: str - The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It - must be unique within the workspace. - :param description: str (optional) - The description of the app. - - :returns: :class:`App` - """ - body = {} - if description is not None: body['description'] = description - headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - - res = self._api.do('PATCH', f'/api/2.0/preview/apps/{name}', body=body, headers=headers) - return App.from_dict(res) - - class ServingEndpointsAPI: """The Serving Endpoints API allows you to create, update, and delete model serving endpoints. diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py index fc411ff83..23a31e774 100755 --- a/databricks/sdk/service/sharing.py +++ b/databricks/sdk/service/sharing.py @@ -483,6 +483,9 @@ class CreateRecipient: when the __authentication_type__ is **DATABRICKS**. The identifier is of format __cloud__:__region__:__metastore-uuid__.""" + expiration_time: Optional[int] = None + """Expiration timestamp of the token, in epoch milliseconds.""" + ip_access_list: Optional[IpAccessList] = None """IP Access List""" @@ -503,6 +506,7 @@ def as_dict(self) -> dict: if self.comment is not None: body['comment'] = self.comment if self.data_recipient_global_metastore_id is not None: body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() if self.name is not None: body['name'] = self.name if self.owner is not None: body['owner'] = self.owner @@ -516,6 +520,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateRecipient: return cls(authentication_type=_enum(d, 'authentication_type', AuthenticationType), comment=d.get('comment', None), data_recipient_global_metastore_id=d.get('data_recipient_global_metastore_id', None), + expiration_time=d.get('expiration_time', None), ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList), name=d.get('name', None), owner=d.get('owner', None), @@ -580,19 +585,25 @@ def from_dict(cls, d: Dict[str, any]) -> GetActivationUrlInfoResponse: @dataclass class GetRecipientSharePermissionsResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + permissions_out: Optional[List[ShareToPrivilegeAssignment]] = None """An array of data share permissions for a recipient.""" def as_dict(self) -> dict: """Serializes the GetRecipientSharePermissionsResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token if self.permissions_out: body['permissions_out'] = [v.as_dict() for v in self.permissions_out] return body @classmethod def from_dict(cls, d: Dict[str, any]) -> GetRecipientSharePermissionsResponse: """Deserializes the GetRecipientSharePermissionsResponse from a dictionary.""" - return cls(permissions_out=_repeated_dict(d, 'permissions_out', ShareToPrivilegeAssignment)) + return cls(next_page_token=d.get('next_page_token', None), + permissions_out=_repeated_dict(d, 'permissions_out', ShareToPrivilegeAssignment)) @dataclass @@ -637,70 +648,94 @@ def from_dict(cls, d: Dict[str, any]) -> ListCleanRoomsResponse: @dataclass class ListProviderSharesResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + shares: Optional[List[ProviderShare]] = None """An array of provider shares.""" def as_dict(self) -> dict: """Serializes the ListProviderSharesResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token if self.shares: body['shares'] = [v.as_dict() for v in self.shares] return body @classmethod def from_dict(cls, d: Dict[str, any]) -> ListProviderSharesResponse: """Deserializes the ListProviderSharesResponse from a dictionary.""" - return cls(shares=_repeated_dict(d, 'shares', ProviderShare)) + return cls(next_page_token=d.get('next_page_token', None), + shares=_repeated_dict(d, 'shares', ProviderShare)) @dataclass class ListProvidersResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + providers: Optional[List[ProviderInfo]] = None """An array of provider information objects.""" def as_dict(self) -> dict: """Serializes the ListProvidersResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token if self.providers: body['providers'] = [v.as_dict() for v in self.providers] return body @classmethod def from_dict(cls, d: Dict[str, any]) -> ListProvidersResponse: """Deserializes the ListProvidersResponse from a dictionary.""" - return cls(providers=_repeated_dict(d, 'providers', ProviderInfo)) + return cls(next_page_token=d.get('next_page_token', None), + providers=_repeated_dict(d, 'providers', ProviderInfo)) @dataclass class ListRecipientsResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + recipients: Optional[List[RecipientInfo]] = None """An array of recipient information objects.""" def as_dict(self) -> dict: """Serializes the ListRecipientsResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token if self.recipients: body['recipients'] = [v.as_dict() for v in self.recipients] return body @classmethod def from_dict(cls, d: Dict[str, any]) -> ListRecipientsResponse: """Deserializes the ListRecipientsResponse from a dictionary.""" - return cls(recipients=_repeated_dict(d, 'recipients', RecipientInfo)) + return cls(next_page_token=d.get('next_page_token', None), + recipients=_repeated_dict(d, 'recipients', RecipientInfo)) @dataclass class ListSharesResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request (for the next page of results).""" + shares: Optional[List[ShareInfo]] = None """An array of data share information objects.""" def as_dict(self) -> dict: """Serializes the ListSharesResponse into a dictionary suitable for use as a JSON request body.""" body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token if self.shares: body['shares'] = [v.as_dict() for v in self.shares] return body @classmethod def from_dict(cls, d: Dict[str, any]) -> ListSharesResponse: """Deserializes the ListSharesResponse from a dictionary.""" - return cls(shares=_repeated_dict(d, 'shares', ShareInfo)) + return cls(next_page_token=d.get('next_page_token', None), + shares=_repeated_dict(d, 'shares', ShareInfo)) @dataclass @@ -1526,6 +1561,9 @@ class UpdateRecipient: comment: Optional[str] = None """Description about the recipient.""" + expiration_time: Optional[int] = None + """Expiration timestamp of the token, in epoch milliseconds.""" + ip_access_list: Optional[IpAccessList] = None """IP Access List""" @@ -1547,6 +1585,7 @@ def as_dict(self) -> dict: """Serializes the UpdateRecipient into a dictionary suitable for use as a JSON request body.""" body = {} if self.comment is not None: body['comment'] = self.comment + if self.expiration_time is not None: body['expiration_time'] = self.expiration_time if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict() if self.name is not None: body['name'] = self.name if self.new_name is not None: body['new_name'] = self.new_name @@ -1558,6 +1597,7 @@ def as_dict(self) -> dict: def from_dict(cls, d: Dict[str, any]) -> UpdateRecipient: """Deserializes the UpdateRecipient from a dictionary.""" return cls(comment=d.get('comment', None), + expiration_time=d.get('expiration_time', None), ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList), name=d.get('name', None), new_name=d.get('new_name', None), @@ -1626,20 +1666,37 @@ class UpdateSharePermissions: changes: Optional[List[catalog.PermissionsChange]] = None """Array of permission changes.""" + max_results: Optional[int] = None + """Maximum number of permissions to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the + minimum of this value and a server configured value; - when set to a value less than 0, an + invalid parameter error is returned; - If not set, all valid permissions are returned (not + recommended). - Note: The number of returned permissions might be less than the specified + max_results size, even zero. The only definitive indication that no further permissions can be + fetched is when the next_page_token is unset from the response.""" + name: Optional[str] = None """The name of the share.""" + page_token: Optional[str] = None + """Opaque pagination token to go to next page based on previous query.""" + def as_dict(self) -> dict: """Serializes the UpdateSharePermissions into a dictionary suitable for use as a JSON request body.""" body = {} if self.changes: body['changes'] = [v.as_dict() for v in self.changes] + if self.max_results is not None: body['max_results'] = self.max_results if self.name is not None: body['name'] = self.name + if self.page_token is not None: body['page_token'] = self.page_token return body @classmethod def from_dict(cls, d: Dict[str, any]) -> UpdateSharePermissions: """Deserializes the UpdateSharePermissions from a dictionary.""" - return cls(changes=_repeated_dict(d, 'changes', catalog.PermissionsChange), name=d.get('name', None)) + return cls(changes=_repeated_dict(d, 'changes', catalog.PermissionsChange), + max_results=d.get('max_results', None), + name=d.get('name', None), + page_token=d.get('page_token', None)) class CleanRoomsAPI: @@ -1865,7 +1922,11 @@ def get(self, name: str) -> ProviderInfo: res = self._api.do('GET', f'/api/2.1/unity-catalog/providers/{name}', headers=headers) return ProviderInfo.from_dict(res) - def list(self, *, data_provider_global_metastore_id: Optional[str] = None) -> Iterator[ProviderInfo]: + def list(self, + *, + data_provider_global_metastore_id: Optional[str] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[ProviderInfo]: """List providers. Gets an array of available authentication providers. The caller must either be a metastore admin or @@ -1875,6 +1936,16 @@ def list(self, *, data_provider_global_metastore_id: Optional[str] = None) -> It :param data_provider_global_metastore_id: str (optional) If not provided, all providers will be returned. If no providers exist with this ID, no results will be returned. + :param max_results: int (optional) + Maximum number of providers to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid providers are returned (not recommended). - Note: The + number of returned providers might be less than the specified max_results size, even zero. The only + definitive indication that no further providers can be fetched is when the next_page_token is unset + from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`ProviderInfo` """ @@ -1882,13 +1953,24 @@ def list(self, *, data_provider_global_metastore_id: Optional[str] = None) -> It query = {} if data_provider_global_metastore_id is not None: query['data_provider_global_metastore_id'] = data_provider_global_metastore_id + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - json = self._api.do('GET', '/api/2.1/unity-catalog/providers', query=query, headers=headers) - parsed = ListProvidersResponse.from_dict(json).providers - return parsed if parsed is not None else [] + while True: + json = self._api.do('GET', '/api/2.1/unity-catalog/providers', query=query, headers=headers) + if 'providers' in json: + for v in json['providers']: + yield ProviderInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] - def list_shares(self, name: str) -> Iterator[ProviderShare]: + def list_shares(self, + name: str, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[ProviderShare]: """List shares by Provider. Gets an array of a specified provider's shares within the metastore where: @@ -1897,13 +1979,29 @@ def list_shares(self, name: str) -> Iterator[ProviderShare]: :param name: str Name of the provider in which to list shares. + :param max_results: int (optional) + Maximum number of shares to return. - when set to 0, the page length is set to a server configured + value (recommended); - when set to a value greater than 0, the page length is the minimum of this + value and a server configured value; - when set to a value less than 0, an invalid parameter error + is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of + returned shares might be less than the specified max_results size, even zero. The only definitive + indication that no further shares can be fetched is when the next_page_token is unset from the + response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`ProviderShare` """ + query = {} + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - json = self._api.do('GET', f'/api/2.1/unity-catalog/providers/{name}/shares', headers=headers) + json = self._api.do('GET', + f'/api/2.1/unity-catalog/providers/{name}/shares', + query=query, + headers=headers) parsed = ListProviderSharesResponse.from_dict(json).shares return parsed if parsed is not None else [] @@ -2016,6 +2114,7 @@ def create(self, *, comment: Optional[str] = None, data_recipient_global_metastore_id: Optional[str] = None, + expiration_time: Optional[int] = None, ip_access_list: Optional[IpAccessList] = None, owner: Optional[str] = None, properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None, @@ -2035,6 +2134,8 @@ def create(self, The global Unity Catalog metastore id provided by the data recipient. This field is required when the __authentication_type__ is **DATABRICKS**. The identifier is of format __cloud__:__region__:__metastore-uuid__. + :param expiration_time: int (optional) + Expiration timestamp of the token, in epoch milliseconds. :param ip_access_list: :class:`IpAccessList` (optional) IP Access List :param owner: str (optional) @@ -2052,6 +2153,7 @@ def create(self, if comment is not None: body['comment'] = comment if data_recipient_global_metastore_id is not None: body['data_recipient_global_metastore_id'] = data_recipient_global_metastore_id + if expiration_time is not None: body['expiration_time'] = expiration_time if ip_access_list is not None: body['ip_access_list'] = ip_access_list.as_dict() if name is not None: body['name'] = name if owner is not None: body['owner'] = owner @@ -2095,7 +2197,11 @@ def get(self, name: str) -> RecipientInfo: res = self._api.do('GET', f'/api/2.1/unity-catalog/recipients/{name}', headers=headers) return RecipientInfo.from_dict(res) - def list(self, *, data_recipient_global_metastore_id: Optional[str] = None) -> Iterator[RecipientInfo]: + def list(self, + *, + data_recipient_global_metastore_id: Optional[str] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[RecipientInfo]: """List share recipients. Gets an array of all share recipients within the current metastore where: @@ -2106,6 +2212,16 @@ def list(self, *, data_recipient_global_metastore_id: Optional[str] = None) -> I :param data_recipient_global_metastore_id: str (optional) If not provided, all recipients will be returned. If no recipients exist with this ID, no results will be returned. + :param max_results: int (optional) + Maximum number of recipients to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid recipients are returned (not recommended). - Note: The + number of returned recipients might be less than the specified max_results size, even zero. The only + definitive indication that no further recipients can be fetched is when the next_page_token is unset + from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`RecipientInfo` """ @@ -2113,11 +2229,18 @@ def list(self, *, data_recipient_global_metastore_id: Optional[str] = None) -> I query = {} if data_recipient_global_metastore_id is not None: query['data_recipient_global_metastore_id'] = data_recipient_global_metastore_id + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - json = self._api.do('GET', '/api/2.1/unity-catalog/recipients', query=query, headers=headers) - parsed = ListRecipientsResponse.from_dict(json).recipients - return parsed if parsed is not None else [] + while True: + json = self._api.do('GET', '/api/2.1/unity-catalog/recipients', query=query, headers=headers) + if 'recipients' in json: + for v in json['recipients']: + yield RecipientInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] def rotate_token(self, name: str, existing_token_expire_in_seconds: int) -> RecipientInfo: """Rotate a token. @@ -2145,7 +2268,11 @@ def rotate_token(self, name: str, existing_token_expire_in_seconds: int) -> Reci headers=headers) return RecipientInfo.from_dict(res) - def share_permissions(self, name: str) -> GetRecipientSharePermissionsResponse: + def share_permissions(self, + name: str, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None) -> GetRecipientSharePermissionsResponse: """Get recipient share permissions. Gets the share permissions for the specified Recipient. The caller must be a metastore admin or the @@ -2153,14 +2280,28 @@ def share_permissions(self, name: str) -> GetRecipientSharePermissionsResponse: :param name: str The name of the Recipient. + :param max_results: int (optional) + Maximum number of permissions to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The + number of returned permissions might be less than the specified max_results size, even zero. The + only definitive indication that no further permissions can be fetched is when the next_page_token is + unset from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: :class:`GetRecipientSharePermissionsResponse` """ + query = {} + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } res = self._api.do('GET', f'/api/2.1/unity-catalog/recipients/{name}/share-permissions', + query=query, headers=headers) return GetRecipientSharePermissionsResponse.from_dict(res) @@ -2168,6 +2309,7 @@ def update(self, name: str, *, comment: Optional[str] = None, + expiration_time: Optional[int] = None, ip_access_list: Optional[IpAccessList] = None, new_name: Optional[str] = None, owner: Optional[str] = None, @@ -2182,6 +2324,8 @@ def update(self, Name of the recipient. :param comment: str (optional) Description about the recipient. + :param expiration_time: int (optional) + Expiration timestamp of the token, in epoch milliseconds. :param ip_access_list: :class:`IpAccessList` (optional) IP Access List :param new_name: str (optional) @@ -2197,6 +2341,7 @@ def update(self, """ body = {} if comment is not None: body['comment'] = comment + if expiration_time is not None: body['expiration_time'] = expiration_time if ip_access_list is not None: body['ip_access_list'] = ip_access_list.as_dict() if new_name is not None: body['new_name'] = new_name if owner is not None: body['owner'] = owner @@ -2279,22 +2424,48 @@ def get(self, name: str, *, include_shared_data: Optional[bool] = None) -> Share res = self._api.do('GET', f'/api/2.1/unity-catalog/shares/{name}', query=query, headers=headers) return ShareInfo.from_dict(res) - def list(self) -> Iterator[ShareInfo]: + def list(self, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[ShareInfo]: """List shares. Gets an array of data object shares from the metastore. The caller must be a metastore admin or the owner of the share. There is no guarantee of a specific ordering of the elements in the array. + :param max_results: int (optional) + Maximum number of shares to return. - when set to 0, the page length is set to a server configured + value (recommended); - when set to a value greater than 0, the page length is the minimum of this + value and a server configured value; - when set to a value less than 0, an invalid parameter error + is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of + returned shares might be less than the specified max_results size, even zero. The only definitive + indication that no further shares can be fetched is when the next_page_token is unset from the + response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. + :returns: Iterator over :class:`ShareInfo` """ + query = {} + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - json = self._api.do('GET', '/api/2.1/unity-catalog/shares', headers=headers) - parsed = ListSharesResponse.from_dict(json).shares - return parsed if parsed is not None else [] + while True: + json = self._api.do('GET', '/api/2.1/unity-catalog/shares', query=query, headers=headers) + if 'shares' in json: + for v in json['shares']: + yield ShareInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] - def share_permissions(self, name: str) -> catalog.PermissionsList: + def share_permissions(self, + name: str, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None) -> catalog.PermissionsList: """Get permissions. Gets the permissions for a data share from the metastore. The caller must be a metastore admin or the @@ -2302,13 +2473,29 @@ def share_permissions(self, name: str) -> catalog.PermissionsList: :param name: str The name of the share. + :param max_results: int (optional) + Maximum number of permissions to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The + number of returned permissions might be less than the specified max_results size, even zero. The + only definitive indication that no further permissions can be fetched is when the next_page_token is + unset from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: :class:`PermissionsList` """ + query = {} + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } - res = self._api.do('GET', f'/api/2.1/unity-catalog/shares/{name}/permissions', headers=headers) + res = self._api.do('GET', + f'/api/2.1/unity-catalog/shares/{name}/permissions', + query=query, + headers=headers) return PermissionsList.from_dict(res) def update(self, @@ -2363,7 +2550,12 @@ def update(self, res = self._api.do('PATCH', f'/api/2.1/unity-catalog/shares/{name}', body=body, headers=headers) return ShareInfo.from_dict(res) - def update_permissions(self, name: str, *, changes: Optional[List[catalog.PermissionsChange]] = None): + def update_permissions(self, + name: str, + *, + changes: Optional[List[catalog.PermissionsChange]] = None, + max_results: Optional[int] = None, + page_token: Optional[str] = None): """Update permissions. Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an @@ -2376,11 +2568,28 @@ def update_permissions(self, name: str, *, changes: Optional[List[catalog.Permis The name of the share. :param changes: List[:class:`PermissionsChange`] (optional) Array of permission changes. + :param max_results: int (optional) + Maximum number of permissions to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The + number of returned permissions might be less than the specified max_results size, even zero. The + only definitive indication that no further permissions can be fetched is when the next_page_token is + unset from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. """ body = {} + query = {} if changes is not None: body['changes'] = [v.as_dict() for v in changes] + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } - self._api.do('PATCH', f'/api/2.1/unity-catalog/shares/{name}/permissions', body=body, headers=headers) + self._api.do('PATCH', + f'/api/2.1/unity-catalog/shares/{name}/permissions', + query=query, + body=body, + headers=headers) diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index bcb46bb50..f2526909f 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -1483,26 +1483,6 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteWarehouseResponse: class Disposition(Enum): - """The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`. - - Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY` - format, in a series of chunks. If a given statement produces a result set with a size larger - than 25 MiB, that statement execution is aborted, and no result set will be available. - - **NOTE** Byte limits are computed based upon internal representations of the result set data, - and might not match the sizes visible in JSON responses. - - Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links: - URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition - allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The - resulting links have two important properties: - - 1. They point to resources _external_ to the Databricks compute; therefore any associated - authentication information (typically a personal access token, OAuth token, or similar) _must be - removed_ when fetching from these links. - - 2. These are presigned URLs with a specific expiration, indicated in the response. The behavior - when attempting to use an expired link is cloud specific.""" EXTERNAL_LINKS = 'EXTERNAL_LINKS' INLINE = 'INLINE' @@ -2019,26 +1999,6 @@ class ExecuteStatementRequest: [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html""" disposition: Optional[Disposition] = None - """The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`. - - Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY` - format, in a series of chunks. If a given statement produces a result set with a size larger - than 25 MiB, that statement execution is aborted, and no result set will be available. - - **NOTE** Byte limits are computed based upon internal representations of the result set data, - and might not match the sizes visible in JSON responses. - - Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links: - URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition - allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The - resulting links have two important properties: - - 1. They point to resources _external_ to the Databricks compute; therefore any associated - authentication information (typically a personal access token, OAuth token, or similar) _must be - removed_ when fetching from these links. - - 2. These are presigned URLs with a specific expiration, indicated in the response. The behavior - when attempting to use an expired link is cloud specific.""" format: Optional[Format] = None """Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and @@ -2191,9 +2151,6 @@ class ExternalLink: which point a new `external_link` must be requested.""" external_link: Optional[str] = None - """A presigned URL pointing to a chunk of result data, hosted by an external service, with a short - expiration time (<= 15 minutes). As this URL contains a temporary credential, it should be - considered sensitive and the client should not expose this URL in a log.""" http_headers: Optional[Dict[str, str]] = None """HTTP headers that must be included with a GET request to the `external_link`. Each header is @@ -4203,12 +4160,6 @@ def from_dict(cls, d: Dict[str, any]) -> RestoreResponse: @dataclass class ResultData: - """Contains the result data of a single chunk when using `INLINE` disposition. When using - `EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide presigned - URLs to the result data in cloud storage. Exactly one of these alternatives is used. (While the - `external_links` array prepares the API to return multiple links in a single response. Currently - only a single link is returned.)""" - byte_count: Optional[int] = None """The number of bytes in the result chunk. This field is not available when using `INLINE` disposition.""" @@ -4590,11 +4541,6 @@ class StatementResponse: """The result manifest provides schema and metadata for the result set.""" result: Optional[ResultData] = None - """Contains the result data of a single chunk when using `INLINE` disposition. When using - `EXTERNAL_LINKS` disposition, the array `external_links` is used instead to provide presigned - URLs to the result data in cloud storage. Exactly one of these alternatives is used. (While the - `external_links` array prepares the API to return multiple links in a single response. Currently - only a single link is returned.)""" statement_id: Optional[str] = None """The statement ID is returned upon successfully submitting a SQL statement, and is a required @@ -5304,6 +5250,7 @@ class WarehousePermissionLevel(Enum): """Permission level""" CAN_MANAGE = 'CAN_MANAGE' + CAN_MONITOR = 'CAN_MONITOR' CAN_USE = 'CAN_USE' IS_OWNER = 'IS_OWNER' @@ -5646,7 +5593,10 @@ class AlertsLegacyAPI: notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn + more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" def __init__(self, api_client): self._api = api_client @@ -5664,7 +5614,9 @@ def create(self, condition of its result, and notifies users or notification destinations if the condition was met. **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param name: str Name of the alert. @@ -5698,7 +5650,9 @@ def delete(self, alert_id: str): queries and dashboards, alerts cannot be moved to the trash. **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param alert_id: str @@ -5715,7 +5669,9 @@ def get(self, alert_id: str) -> LegacyAlert: Gets an alert. **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param alert_id: str @@ -5733,7 +5689,9 @@ def list(self) -> Iterator[LegacyAlert]: Gets a list of alerts. **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :returns: Iterator over :class:`LegacyAlert` """ @@ -5755,7 +5713,9 @@ def update(self, Updates an alert. **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param alert_id: str :param name: str @@ -6055,7 +6015,9 @@ class DataSourcesAPI: advise you to use any text editor, REST client, or `grep` to search the response from this API for the name of your SQL warehouse as it appears in Databricks SQL. - **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" + **Note**: A new version of the Databricks SQL API is now available. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" def __init__(self, api_client): self._api = api_client @@ -6068,7 +6030,9 @@ def list(self) -> Iterator[DataSource]: queries against it. **Note**: A new version of the Databricks SQL API is now available. Please use :method:warehouses/list - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :returns: Iterator over :class:`DataSource` """ @@ -6092,7 +6056,9 @@ class DbsqlPermissionsAPI: - `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`) - **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" + **Note**: A new version of the Databricks SQL API is now available. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" def __init__(self, api_client): self._api = api_client @@ -6102,6 +6068,11 @@ def get(self, object_type: ObjectTypePlural, object_id: str) -> GetResponse: Gets a JSON representation of the access control list (ACL) for a specified object. + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:workspace/getpermissions instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + :param object_type: :class:`ObjectTypePlural` The type of object permissions to check. :param object_id: str @@ -6127,6 +6098,11 @@ def set(self, Sets the access control list (ACL) for a specified object. This operation will complete rewrite the ACL. + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:workspace/setpermissions instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + :param object_type: :class:`ObjectTypePlural` The type of object permission to set. :param object_id: str @@ -6156,7 +6132,9 @@ def transfer_ownership(self, Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key. **Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use - :method:queries/update and :method:alerts/update respectively instead. + :method:queries/update and :method:alerts/update respectively instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param object_type: :class:`OwnableObjectType` The type of object on which to change ownership. @@ -6323,7 +6301,10 @@ class QueriesLegacyAPI: SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn + more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" def __init__(self, api_client): self._api = api_client @@ -6350,7 +6331,9 @@ def create(self, **Note**: You cannot add a visualization until you create the query. **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/create - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param data_source_id: str (optional) Data source ID maps to the ID of the data source used by the resource and is distinct from the @@ -6397,7 +6380,9 @@ def delete(self, query_id: str): they cannot be used for alerts. The trash is deleted after 30 days. **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param query_id: str @@ -6415,7 +6400,9 @@ def get(self, query_id: str) -> LegacyQuery: authenticated user. **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param query_id: str @@ -6441,7 +6428,9 @@ def list(self, degradation, or a temporary ban. **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/list - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param order: str (optional) Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order @@ -6497,6 +6486,9 @@ def restore(self, query_id: str): You can use restored queries for alerts. **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. + [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param query_id: str @@ -6524,7 +6516,9 @@ def update(self, **Note**: You cannot undo this operation. **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/update - instead. + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param query_id: str :param data_source_id: str (optional) @@ -6675,7 +6669,10 @@ class QueryVisualizationsLegacyAPI: """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace. Data structures may change over time. - **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.""" + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn + more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html""" def __init__(self, api_client): self._api = api_client @@ -6692,7 +6689,9 @@ def create(self, Creates visualization in the query. **Note**: A new version of the Databricks SQL API is now available. Please use - :method:queryvisualizations/create instead. + :method:queryvisualizations/create instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param query_id: str The identifier returned by :method:queries/create @@ -6725,7 +6724,9 @@ def delete(self, id: str): Removes a visualization from the query. **Note**: A new version of the Databricks SQL API is now available. Please use - :method:queryvisualizations/delete instead. + :method:queryvisualizations/delete instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param id: str Widget ID returned by :method:queryvizualisations/create @@ -6752,7 +6753,9 @@ def update(self, Updates visualization in the query. **Note**: A new version of the Databricks SQL API is now available. Please use - :method:queryvisualizations/update instead. + :method:queryvisualizations/update instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param id: str The UUID for this visualization. @@ -6921,26 +6924,6 @@ def execute_statement(self, [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html :param disposition: :class:`Disposition` (optional) - The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`. - - Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY` - format, in a series of chunks. If a given statement produces a result set with a size larger than 25 - MiB, that statement execution is aborted, and no result set will be available. - - **NOTE** Byte limits are computed based upon internal representations of the result set data, and - might not match the sizes visible in JSON responses. - - Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links: - URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition - allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The - resulting links have two important properties: - - 1. They point to resources _external_ to the Databricks compute; therefore any associated - authentication information (typically a personal access token, OAuth token, or similar) _must be - removed_ when fetching from these links. - - 2. These are presigned URLs with a specific expiration, indicated in the response. The behavior when - attempting to use an expired link is cloud specific. :param format: :class:`Format` (optional) Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and `CSV`. diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py index 9093e4e46..e187e0aa6 100644 --- a/databricks/sdk/version.py +++ b/databricks/sdk/version.py @@ -1 +1 @@ -__version__ = '0.29.0' +__version__ = '0.30.0' diff --git a/docs/account/billing/budgets.rst b/docs/account/billing/budgets.rst index 85f7ee133..bb625b49b 100644 --- a/docs/account/billing/budgets.rst +++ b/docs/account/billing/budgets.rst @@ -4,10 +4,11 @@ .. py:class:: BudgetsAPI - These APIs manage budget configuration including notifications for exceeding a budget for a period. They - can also retrieve the status of each budget. + These APIs manage budget configurations for this account. Budgets enable you to monitor usage across your + account. You can set up budgets to either track account-wide spending, or apply filters to track the + spending of specific teams, projects, or workspaces. - .. py:method:: create(budget: Budget) -> WrappedBudgetWithStatus + .. py:method:: create(budget: CreateBudgetConfigurationBudget) -> CreateBudgetConfigurationResponse Usage: @@ -21,40 +22,55 @@ a = AccountClient() - created = a.budgets.create(budget=billing.Budget( - name=f'sdk-{time.time_ns()}', - filter="tag.tagName = 'all'", - period="1 month", - start_date="2022-01-01", - target_amount="100", - alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)])) + created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget( + display_name=f'sdk-{time.time_ns()}', + filter=billing.BudgetConfigurationFilter(tags=[ + billing.BudgetConfigurationFilterTagClause(key="tagName", + value=billing.BudgetConfigurationFilterClause( + operator=billing.BudgetConfigurationFilterOperator.IN, + values=["all"])) + ]), + alert_configurations=[ + billing.CreateBudgetConfigurationBudgetAlertConfigurations( + time_period=billing.AlertConfigurationTimePeriod.MONTH, + quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD, + trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED, + quantity_threshold="100", + action_configurations=[ + billing.CreateBudgetConfigurationBudgetActionConfigurations( + action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION, + target="admin@example.com") + ]) + ])) # cleanup - a.budgets.delete(budget_id=created.budget.budget_id) + a.budgets.delete(budget_id=created.budget.budget_configuration_id) - Create a new budget. + Create new budget. - Creates a new budget in the specified account. + Create a new budget configuration for an account. For full details, see + https://docs.databricks.com/en/admin/account-settings/budgets.html. - :param budget: :class:`Budget` - Budget configuration to be created. + :param budget: :class:`CreateBudgetConfigurationBudget` + Properties of the new budget configuration. - :returns: :class:`WrappedBudgetWithStatus` + :returns: :class:`CreateBudgetConfigurationResponse` .. py:method:: delete(budget_id: str) Delete budget. - Deletes the budget specified by its UUID. + Deletes a budget configuration for an account. Both account and budget configuration are specified by + ID. This cannot be undone. :param budget_id: str - Budget ID + The Databricks budget configuration ID. - .. py:method:: get(budget_id: str) -> WrappedBudgetWithStatus + .. py:method:: get(budget_id: str) -> GetBudgetConfigurationResponse Usage: @@ -68,31 +84,43 @@ a = AccountClient() - created = a.budgets.create(budget=billing.Budget( - name=f'sdk-{time.time_ns()}', - filter="tag.tagName = 'all'", - period="1 month", - start_date="2022-01-01", - target_amount="100", - alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)])) - - by_id = a.budgets.get(budget_id=created.budget.budget_id) + created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget( + display_name=f'sdk-{time.time_ns()}', + filter=billing.BudgetConfigurationFilter(tags=[ + billing.BudgetConfigurationFilterTagClause(key="tagName", + value=billing.BudgetConfigurationFilterClause( + operator=billing.BudgetConfigurationFilterOperator.IN, + values=["all"])) + ]), + alert_configurations=[ + billing.CreateBudgetConfigurationBudgetAlertConfigurations( + time_period=billing.AlertConfigurationTimePeriod.MONTH, + quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD, + trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED, + quantity_threshold="100", + action_configurations=[ + billing.CreateBudgetConfigurationBudgetActionConfigurations( + action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION, + target="admin@example.com") + ]) + ])) + + by_id = a.budgets.get(budget_id=created.budget.budget_configuration_id) # cleanup - a.budgets.delete(budget_id=created.budget.budget_id) + a.budgets.delete(budget_id=created.budget.budget_configuration_id) - Get budget and its status. + Get budget. - Gets the budget specified by its UUID, including noncumulative status for each day that the budget is - configured to include. + Gets a budget configuration for an account. Both account and budget configuration are specified by ID. :param budget_id: str - Budget ID + The Databricks budget configuration ID. - :returns: :class:`WrappedBudgetWithStatus` + :returns: :class:`GetBudgetConfigurationResponse` - .. py:method:: list() -> Iterator[BudgetWithStatus] + .. py:method:: list( [, page_token: Optional[str]]) -> Iterator[BudgetConfiguration] Usage: @@ -100,20 +128,24 @@ .. code-block:: from databricks.sdk import AccountClient + from databricks.sdk.service import billing a = AccountClient() - all = a.budgets.list() + all = a.budgets.list(billing.ListBudgetConfigurationsRequest()) Get all budgets. - Gets all budgets associated with this account, including noncumulative status for each day that the - budget is configured to include. + Gets all budgets associated with this account. - :returns: Iterator over :class:`BudgetWithStatus` + :param page_token: str (optional) + A page token received from a previous get all budget configurations call. This token can be used to + retrieve the subsequent page. Requests first page if absent. + + :returns: Iterator over :class:`BudgetConfiguration` - .. py:method:: update(budget_id: str, budget: Budget) + .. py:method:: update(budget_id: str, budget: UpdateBudgetConfigurationBudget) -> UpdateBudgetConfigurationResponse Usage: @@ -127,36 +159,62 @@ a = AccountClient() - created = a.budgets.create(budget=billing.Budget( - name=f'sdk-{time.time_ns()}', - filter="tag.tagName = 'all'", - period="1 month", - start_date="2022-01-01", - target_amount="100", - alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)])) - - a.budgets.update(budget_id=created.budget.budget_id, - budget=billing.Budget(name=f'sdk-{time.time_ns()}', - filter="tag.tagName = 'all'", - period="1 month", - start_date="2022-01-01", - target_amount="100", - alerts=[ - billing.BudgetAlert(email_notifications=["admin@example.com"], - min_percentage=70) - ])) + created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget( + display_name=f'sdk-{time.time_ns()}', + filter=billing.BudgetConfigurationFilter(tags=[ + billing.BudgetConfigurationFilterTagClause(key="tagName", + value=billing.BudgetConfigurationFilterClause( + operator=billing.BudgetConfigurationFilterOperator.IN, + values=["all"])) + ]), + alert_configurations=[ + billing.CreateBudgetConfigurationBudgetAlertConfigurations( + time_period=billing.AlertConfigurationTimePeriod.MONTH, + quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD, + trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED, + quantity_threshold="100", + action_configurations=[ + billing.CreateBudgetConfigurationBudgetActionConfigurations( + action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION, + target="admin@example.com") + ]) + ])) + + _ = a.budgets.update( + budget_id=created.budget.budget_configuration_id, + budget=billing.UpdateBudgetConfigurationBudget( + display_name=f'sdk-{time.time_ns()}', + filter=billing.BudgetConfigurationFilter(tags=[ + billing.BudgetConfigurationFilterTagClause( + key="tagName", + value=billing.BudgetConfigurationFilterClause( + operator=billing.BudgetConfigurationFilterOperator.IN, values=["all"])) + ]), + alert_configurations=[ + billing.AlertConfiguration( + time_period=billing.AlertConfigurationTimePeriod.MONTH, + quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD, + trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED, + quantity_threshold="50", + action_configurations=[ + billing.ActionConfiguration( + action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION, + target="admin@example.com") + ]) + ])) # cleanup - a.budgets.delete(budget_id=created.budget.budget_id) + a.budgets.delete(budget_id=created.budget.budget_configuration_id) Modify budget. - Modifies a budget in this account. Budget properties are completely overwritten. + Updates a budget configuration for an account. Both account and budget configuration are specified by + ID. :param budget_id: str - Budget ID - :param budget: :class:`Budget` - Budget configuration to be created. - + The Databricks budget configuration ID. + :param budget: :class:`UpdateBudgetConfigurationBudget` + The updated budget. This will overwrite the budget specified by the budget ID. + :returns: :class:`UpdateBudgetConfigurationResponse` \ No newline at end of file diff --git a/docs/account/billing/index.rst b/docs/account/billing/index.rst index 522f6f5fd..0e07da594 100644 --- a/docs/account/billing/index.rst +++ b/docs/account/billing/index.rst @@ -9,4 +9,5 @@ Configure different aspects of Databricks billing and usage. billable_usage budgets - log_delivery \ No newline at end of file + log_delivery + usage_dashboards \ No newline at end of file diff --git a/docs/account/billing/usage_dashboards.rst b/docs/account/billing/usage_dashboards.rst new file mode 100644 index 000000000..350ef1f08 --- /dev/null +++ b/docs/account/billing/usage_dashboards.rst @@ -0,0 +1,39 @@ +``a.usage_dashboards``: Usage Dashboards +======================================== +.. currentmodule:: databricks.sdk.service.billing + +.. py:class:: UsageDashboardsAPI + + These APIs manage usage dashboards for this account. Usage dashboards enable you to gain insights into + your usage with pre-built dashboards: visualize breakdowns, analyze tag attributions, and identify cost + drivers. + + .. py:method:: create( [, dashboard_type: Optional[UsageDashboardType], workspace_id: Optional[int]]) -> CreateBillingUsageDashboardResponse + + Create new usage dashboard. + + Create a usage dashboard specified by workspaceId, accountId, and dashboard type. + + :param dashboard_type: :class:`UsageDashboardType` (optional) + Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage + dashboard shows usage data for all workspaces in the account. + :param workspace_id: int (optional) + The workspace ID of the workspace in which the usage dashboard is created. + + :returns: :class:`CreateBillingUsageDashboardResponse` + + + .. py:method:: get( [, dashboard_type: Optional[UsageDashboardType], workspace_id: Optional[int]]) -> GetBillingUsageDashboardResponse + + Get usage dashboard. + + Get a usage dashboard specified by workspaceId, accountId, and dashboard type. + + :param dashboard_type: :class:`UsageDashboardType` (optional) + Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage + dashboard shows usage data for all workspaces in the account. + :param workspace_id: int (optional) + The workspace ID of the workspace in which the usage dashboard is created. + + :returns: :class:`GetBillingUsageDashboardResponse` + \ No newline at end of file diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst index 1ce06996e..6230b8199 100644 --- a/docs/account/iam/workspace_assignment.rst +++ b/docs/account/iam/workspace_assignment.rst @@ -15,7 +15,7 @@ principal. :param workspace_id: int - The workspace ID. + The workspace ID for the account. :param principal_id: int The ID of the user, service principal, or group. @@ -61,7 +61,7 @@ :returns: Iterator over :class:`PermissionAssignment` - .. py:method:: update(workspace_id: int, principal_id: int, permissions: List[WorkspacePermission]) -> PermissionAssignment + .. py:method:: update(workspace_id: int, principal_id: int [, permissions: Optional[List[WorkspacePermission]]]) -> PermissionAssignment Usage: @@ -92,13 +92,15 @@ specified principal. :param workspace_id: int - The workspace ID. + The workspace ID for the account. :param principal_id: int The ID of the user, service principal, or group. - :param permissions: List[:class:`WorkspacePermission`] - Array of permissions assignments to update on the workspace. Note that excluding this field will - have the same effect as providing an empty list which will result in the deletion of all permissions - for the principal. + :param permissions: List[:class:`WorkspacePermission`] (optional) + Array of permissions assignments to update on the workspace. Valid values are "USER" and "ADMIN" + (case-sensitive). If both "USER" and "ADMIN" are provided, "ADMIN" takes precedence. Other values + will be ignored. Note that excluding this field, or providing unsupported values, will have the same + effect as providing an empty list, which will result in the deletion of all permissions for the + principal. :returns: :class:`PermissionAssignment` \ No newline at end of file diff --git a/docs/account/oauth2/custom_app_integration.rst b/docs/account/oauth2/custom_app_integration.rst index 382ce0bd0..0dcc3d8e0 100644 --- a/docs/account/oauth2/custom_app_integration.rst +++ b/docs/account/oauth2/custom_app_integration.rst @@ -4,23 +4,23 @@ .. py:class:: CustomAppIntegrationAPI - These APIs enable administrators to manage custom oauth app integrations, which is required for + These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud. - .. py:method:: create(name: str, redirect_urls: List[str] [, confidential: Optional[bool], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy]]) -> CreateCustomAppIntegrationOutput + .. py:method:: create( [, confidential: Optional[bool], name: Optional[str], redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy]]) -> CreateCustomAppIntegrationOutput Create Custom OAuth App Integration. Create Custom OAuth App Integration. - You can retrieve the custom oauth app integration via :method:CustomAppIntegration/get. + You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. - :param name: str - name of the custom oauth app - :param redirect_urls: List[str] - List of oauth redirect urls :param confidential: bool (optional) - indicates if an oauth client-secret should be generated + This field indicates whether an OAuth client secret is required to authenticate this client. + :param name: str (optional) + Name of the custom OAuth app + :param redirect_urls: List[str] (optional) + List of OAuth redirect urls :param scopes: List[str] (optional) OAuth scopes granted to the application. Supported scopes: all-apis, sql, offline_access, openid, profile, email. @@ -34,11 +34,10 @@ Delete Custom OAuth App Integration. - Delete an existing Custom OAuth App Integration. You can retrieve the custom oauth app integration via + Delete an existing Custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. :param integration_id: str - The oauth app integration ID. @@ -50,16 +49,19 @@ Gets the Custom OAuth App Integration for the given integration id. :param integration_id: str - The oauth app integration ID. :returns: :class:`GetCustomAppIntegrationOutput` - .. py:method:: list() -> Iterator[GetCustomAppIntegrationOutput] + .. py:method:: list( [, include_creator_username: Optional[bool], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[GetCustomAppIntegrationOutput] Get custom oauth app integrations. - Get the list of custom oauth app integrations for the specified Databricks account + Get the list of custom OAuth app integrations for the specified Databricks account + + :param include_creator_username: bool (optional) + :param page_size: int (optional) + :param page_token: str (optional) :returns: Iterator over :class:`GetCustomAppIntegrationOutput` @@ -68,15 +70,14 @@ Updates Custom OAuth App Integration. - Updates an existing custom OAuth App Integration. You can retrieve the custom oauth app integration + Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get. :param integration_id: str - The oauth app integration ID. :param redirect_urls: List[str] (optional) - List of oauth redirect urls to be updated in the custom oauth app integration + List of OAuth redirect urls to be updated in the custom OAuth app integration :param token_access_policy: :class:`TokenAccessPolicy` (optional) - Token access policy to be updated in the custom oauth app integration + Token access policy to be updated in the custom OAuth app integration \ No newline at end of file diff --git a/docs/account/oauth2/o_auth_published_apps.rst b/docs/account/oauth2/o_auth_published_apps.rst index 69aecb8ad..18c07c326 100644 --- a/docs/account/oauth2/o_auth_published_apps.rst +++ b/docs/account/oauth2/o_auth_published_apps.rst @@ -15,7 +15,7 @@ Get all the available published OAuth apps in Databricks. :param page_size: int (optional) - The max number of OAuth published apps to return. + The max number of OAuth published apps to return in one page. :param page_token: str (optional) A token that can be used to get the next page of results. diff --git a/docs/account/oauth2/published_app_integration.rst b/docs/account/oauth2/published_app_integration.rst index 0488415cd..f59f2c4aa 100644 --- a/docs/account/oauth2/published_app_integration.rst +++ b/docs/account/oauth2/published_app_integration.rst @@ -4,7 +4,7 @@ .. py:class:: PublishedAppIntegrationAPI - These APIs enable administrators to manage published oauth app integrations, which is required for + These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud. .. py:method:: create( [, app_id: Optional[str], token_access_policy: Optional[TokenAccessPolicy]]) -> CreatePublishedAppIntegrationOutput @@ -13,10 +13,10 @@ Create Published OAuth App Integration. - You can retrieve the published oauth app integration via :method:PublishedAppIntegration/get. + You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. :param app_id: str (optional) - app_id of the oauth published app integration. For example power-bi, tableau-deskop + App id of the OAuth published app integration. For example power-bi, tableau-deskop :param token_access_policy: :class:`TokenAccessPolicy` (optional) Token access policy @@ -27,11 +27,10 @@ Delete Published OAuth App Integration. - Delete an existing Published OAuth App Integration. You can retrieve the published oauth app + Delete an existing Published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. :param integration_id: str - The oauth app integration ID. @@ -43,16 +42,18 @@ Gets the Published OAuth App Integration for the given integration id. :param integration_id: str - The oauth app integration ID. :returns: :class:`GetPublishedAppIntegrationOutput` - .. py:method:: list() -> Iterator[GetPublishedAppIntegrationOutput] + .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[GetPublishedAppIntegrationOutput] Get published oauth app integrations. - Get the list of published oauth app integrations for the specified Databricks account + Get the list of published OAuth app integrations for the specified Databricks account + + :param page_size: int (optional) + :param page_token: str (optional) :returns: Iterator over :class:`GetPublishedAppIntegrationOutput` @@ -61,13 +62,12 @@ Updates Published OAuth App Integration. - Updates an existing published OAuth App Integration. You can retrieve the published oauth app + Updates an existing published OAuth App Integration. You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get. :param integration_id: str - The oauth app integration ID. :param token_access_policy: :class:`TokenAccessPolicy` (optional) - Token access policy to be updated in the published oauth app integration + Token access policy to be updated in the published OAuth app integration \ No newline at end of file diff --git a/docs/dbdataclasses/apps.rst b/docs/dbdataclasses/apps.rst new file mode 100644 index 000000000..827a563b8 --- /dev/null +++ b/docs/dbdataclasses/apps.rst @@ -0,0 +1,144 @@ +Apps +==== + +These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.apps`` module. + +.. py:currentmodule:: databricks.sdk.service.apps +.. autoclass:: App + :members: + :undoc-members: + +.. autoclass:: AppAccessControlRequest + :members: + :undoc-members: + +.. autoclass:: AppAccessControlResponse + :members: + :undoc-members: + +.. autoclass:: AppDeployment + :members: + :undoc-members: + +.. autoclass:: AppDeploymentArtifacts + :members: + :undoc-members: + +.. py:class:: AppDeploymentMode + + .. py:attribute:: AUTO_SYNC + :value: "AUTO_SYNC" + + .. py:attribute:: SNAPSHOT + :value: "SNAPSHOT" + +.. py:class:: AppDeploymentState + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: IN_PROGRESS + :value: "IN_PROGRESS" + + .. py:attribute:: STOPPED + :value: "STOPPED" + + .. py:attribute:: SUCCEEDED + :value: "SUCCEEDED" + +.. autoclass:: AppDeploymentStatus + :members: + :undoc-members: + +.. autoclass:: AppPermission + :members: + :undoc-members: + +.. py:class:: AppPermissionLevel + + Permission level + + .. py:attribute:: CAN_MANAGE + :value: "CAN_MANAGE" + + .. py:attribute:: CAN_USE + :value: "CAN_USE" + +.. autoclass:: AppPermissions + :members: + :undoc-members: + +.. autoclass:: AppPermissionsDescription + :members: + :undoc-members: + +.. autoclass:: AppPermissionsRequest + :members: + :undoc-members: + +.. py:class:: AppState + + .. py:attribute:: CREATING + :value: "CREATING" + + .. py:attribute:: DELETED + :value: "DELETED" + + .. py:attribute:: DELETING + :value: "DELETING" + + .. py:attribute:: ERROR + :value: "ERROR" + + .. py:attribute:: IDLE + :value: "IDLE" + + .. py:attribute:: RUNNING + :value: "RUNNING" + + .. py:attribute:: STARTING + :value: "STARTING" + +.. autoclass:: AppStatus + :members: + :undoc-members: + +.. autoclass:: CreateAppDeploymentRequest + :members: + :undoc-members: + +.. autoclass:: CreateAppRequest + :members: + :undoc-members: + +.. autoclass:: DeleteResponse + :members: + :undoc-members: + +.. autoclass:: GetAppPermissionLevelsResponse + :members: + :undoc-members: + +.. autoclass:: ListAppDeploymentsResponse + :members: + :undoc-members: + +.. autoclass:: ListAppsResponse + :members: + :undoc-members: + +.. autoclass:: StartAppRequest + :members: + :undoc-members: + +.. autoclass:: StopAppRequest + :members: + :undoc-members: + +.. autoclass:: StopAppResponse + :members: + :undoc-members: + +.. autoclass:: UpdateAppRequest + :members: + :undoc-members: diff --git a/docs/dbdataclasses/billing.rst b/docs/dbdataclasses/billing.rst index 27abdd35a..25deb0a18 100644 --- a/docs/dbdataclasses/billing.rst +++ b/docs/dbdataclasses/billing.rst @@ -4,23 +4,84 @@ Billing These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.billing`` module. .. py:currentmodule:: databricks.sdk.service.billing -.. autoclass:: Budget +.. autoclass:: ActionConfiguration :members: :undoc-members: -.. autoclass:: BudgetAlert +.. py:class:: ActionConfigurationType + + .. py:attribute:: EMAIL_NOTIFICATION + :value: "EMAIL_NOTIFICATION" + +.. autoclass:: AlertConfiguration + :members: + :undoc-members: + +.. py:class:: AlertConfigurationQuantityType + + .. py:attribute:: LIST_PRICE_DOLLARS_USD + :value: "LIST_PRICE_DOLLARS_USD" + +.. py:class:: AlertConfigurationTimePeriod + + .. py:attribute:: MONTH + :value: "MONTH" + +.. py:class:: AlertConfigurationTriggerType + + .. py:attribute:: CUMULATIVE_SPENDING_EXCEEDED + :value: "CUMULATIVE_SPENDING_EXCEEDED" + +.. autoclass:: BudgetConfiguration + :members: + :undoc-members: + +.. autoclass:: BudgetConfigurationFilter :members: :undoc-members: -.. autoclass:: BudgetList +.. autoclass:: BudgetConfigurationFilterClause :members: :undoc-members: -.. autoclass:: BudgetWithStatus +.. py:class:: BudgetConfigurationFilterOperator + + .. py:attribute:: IN + :value: "IN" + +.. autoclass:: BudgetConfigurationFilterTagClause :members: :undoc-members: -.. autoclass:: BudgetWithStatusStatusDailyItem +.. autoclass:: BudgetConfigurationFilterWorkspaceIdClause + :members: + :undoc-members: + +.. autoclass:: CreateBillingUsageDashboardRequest + :members: + :undoc-members: + +.. autoclass:: CreateBillingUsageDashboardResponse + :members: + :undoc-members: + +.. autoclass:: CreateBudgetConfigurationBudget + :members: + :undoc-members: + +.. autoclass:: CreateBudgetConfigurationBudgetActionConfigurations + :members: + :undoc-members: + +.. autoclass:: CreateBudgetConfigurationBudgetAlertConfigurations + :members: + :undoc-members: + +.. autoclass:: CreateBudgetConfigurationRequest + :members: + :undoc-members: + +.. autoclass:: CreateBudgetConfigurationResponse :members: :undoc-members: @@ -28,7 +89,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteResponse +.. autoclass:: DeleteBudgetConfigurationResponse :members: :undoc-members: @@ -55,6 +116,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GetBillingUsageDashboardResponse + :members: + :undoc-members: + +.. autoclass:: GetBudgetConfigurationResponse + :members: + :undoc-members: + +.. autoclass:: ListBudgetConfigurationsResponse + :members: + :undoc-members: + .. py:class:: LogDeliveryConfigStatus Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed. @@ -102,22 +175,30 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: UpdateLogDeliveryConfigurationStatusRequest +.. autoclass:: UpdateBudgetConfigurationBudget :members: :undoc-members: -.. autoclass:: UpdateResponse +.. autoclass:: UpdateBudgetConfigurationRequest :members: :undoc-members: -.. autoclass:: WrappedBudget +.. autoclass:: UpdateBudgetConfigurationResponse :members: :undoc-members: -.. autoclass:: WrappedBudgetWithStatus +.. autoclass:: UpdateLogDeliveryConfigurationStatusRequest :members: :undoc-members: +.. py:class:: UsageDashboardType + + .. py:attribute:: USAGE_DASHBOARD_TYPE_GLOBAL + :value: "USAGE_DASHBOARD_TYPE_GLOBAL" + + .. py:attribute:: USAGE_DASHBOARD_TYPE_WORKSPACE + :value: "USAGE_DASHBOARD_TYPE_WORKSPACE" + .. autoclass:: WrappedCreateLogDeliveryConfiguration :members: :undoc-members: diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst index e2c120bc9..d1195dd44 100644 --- a/docs/dbdataclasses/catalog.rst +++ b/docs/dbdataclasses/catalog.rst @@ -647,6 +647,17 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: PARAM :value: "PARAM" +.. py:class:: GetBindingsSecurableType + + .. py:attribute:: CATALOG + :value: "CATALOG" + + .. py:attribute:: EXTERNAL_LOCATION + :value: "EXTERNAL_LOCATION" + + .. py:attribute:: STORAGE_CREDENTIAL + :value: "STORAGE_CREDENTIAL" + .. autoclass:: GetMetastoreSummaryResponse :members: :undoc-members: @@ -940,9 +951,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: ONLINE_PIPELINE_FAILED :value: "ONLINE_PIPELINE_FAILED" - .. py:attribute:: ONLINE_TABLE_STATE_UNSPECIFIED - :value: "ONLINE_TABLE_STATE_UNSPECIFIED" - .. py:attribute:: ONLINE_TRIGGERED_UPDATE :value: "ONLINE_TRIGGERED_UPDATE" @@ -1052,6 +1060,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: EXECUTE :value: "EXECUTE" + .. py:attribute:: MANAGE + :value: "MANAGE" + .. py:attribute:: MANAGE_ALLOWLIST :value: "MANAGE_ALLOWLIST" @@ -1304,6 +1315,17 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: UpdateBindingsSecurableType + + .. py:attribute:: CATALOG + :value: "CATALOG" + + .. py:attribute:: EXTERNAL_LOCATION + :value: "EXTERNAL_LOCATION" + + .. py:attribute:: STORAGE_CREDENTIAL + :value: "STORAGE_CREDENTIAL" + .. autoclass:: UpdateCatalog :members: :undoc-members: diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst index 64ab42682..7b280c519 100644 --- a/docs/dbdataclasses/compute.rst +++ b/docs/dbdataclasses/compute.rst @@ -817,10 +817,38 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ListClustersFilterBy + :members: + :undoc-members: + .. autoclass:: ListClustersResponse :members: :undoc-members: +.. autoclass:: ListClustersSortBy + :members: + :undoc-members: + +.. py:class:: ListClustersSortByDirection + + The direction to sort by. + + .. py:attribute:: ASC + :value: "ASC" + + .. py:attribute:: DESC + :value: "DESC" + +.. py:class:: ListClustersSortByField + + The sorting criteria. By default, clusters are sorted by 3 columns from highest to lowest precedence: cluster state, pinned or unpinned, then cluster name. + + .. py:attribute:: CLUSTER_NAME + :value: "CLUSTER_NAME" + + .. py:attribute:: DEFAULT + :value: "DEFAULT" + .. autoclass:: ListGlobalInitScriptsResponse :members: :undoc-members: @@ -855,6 +883,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ListSortOrder + A generic ordering enum for list-based queries. + .. py:attribute:: ASC :value: "ASC" @@ -1308,6 +1338,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: UpdateCluster + :members: + :undoc-members: + +.. autoclass:: UpdateClusterResource + :members: + :undoc-members: + +.. autoclass:: UpdateClusterResponse + :members: + :undoc-members: + .. autoclass:: UpdateResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/dashboards.rst b/docs/dbdataclasses/dashboards.rst index dca31d64b..8765ee695 100644 --- a/docs/dbdataclasses/dashboards.rst +++ b/docs/dbdataclasses/dashboards.rst @@ -29,9 +29,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DASHBOARD_VIEW_BASIC :value: "DASHBOARD_VIEW_BASIC" - .. py:attribute:: DASHBOARD_VIEW_FULL - :value: "DASHBOARD_VIEW_FULL" - .. autoclass:: DeleteScheduleResponse :members: :undoc-members: @@ -40,6 +37,34 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GenieAttachment + :members: + :undoc-members: + +.. autoclass:: GenieConversation + :members: + :undoc-members: + +.. autoclass:: GenieCreateConversationMessageRequest + :members: + :undoc-members: + +.. autoclass:: GenieGetMessageQueryResultResponse + :members: + :undoc-members: + +.. autoclass:: GenieMessage + :members: + :undoc-members: + +.. autoclass:: GenieStartConversationMessageRequest + :members: + :undoc-members: + +.. autoclass:: GenieStartConversationResponse + :members: + :undoc-members: + .. py:class:: LifecycleState .. py:attribute:: ACTIVE @@ -60,6 +85,154 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: MessageError + :members: + :undoc-members: + +.. py:class:: MessageErrorType + + .. py:attribute:: BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION + :value: "BLOCK_MULTIPLE_EXECUTIONS_EXCEPTION" + + .. py:attribute:: CHAT_COMPLETION_CLIENT_EXCEPTION + :value: "CHAT_COMPLETION_CLIENT_EXCEPTION" + + .. py:attribute:: CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION + :value: "CHAT_COMPLETION_CLIENT_TIMEOUT_EXCEPTION" + + .. py:attribute:: CHAT_COMPLETION_NETWORK_EXCEPTION + :value: "CHAT_COMPLETION_NETWORK_EXCEPTION" + + .. py:attribute:: CONTENT_FILTER_EXCEPTION + :value: "CONTENT_FILTER_EXCEPTION" + + .. py:attribute:: CONTEXT_EXCEEDED_EXCEPTION + :value: "CONTEXT_EXCEEDED_EXCEPTION" + + .. py:attribute:: COULD_NOT_GET_UC_SCHEMA_EXCEPTION + :value: "COULD_NOT_GET_UC_SCHEMA_EXCEPTION" + + .. py:attribute:: DEPLOYMENT_NOT_FOUND_EXCEPTION + :value: "DEPLOYMENT_NOT_FOUND_EXCEPTION" + + .. py:attribute:: FUNCTIONS_NOT_AVAILABLE_EXCEPTION + :value: "FUNCTIONS_NOT_AVAILABLE_EXCEPTION" + + .. py:attribute:: FUNCTION_ARGUMENTS_INVALID_EXCEPTION + :value: "FUNCTION_ARGUMENTS_INVALID_EXCEPTION" + + .. py:attribute:: FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION + :value: "FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION" + + .. py:attribute:: FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION + :value: "FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION" + + .. py:attribute:: GENERIC_CHAT_COMPLETION_EXCEPTION + :value: "GENERIC_CHAT_COMPLETION_EXCEPTION" + + .. py:attribute:: GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION + :value: "GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION" + + .. py:attribute:: GENERIC_SQL_EXEC_API_CALL_EXCEPTION + :value: "GENERIC_SQL_EXEC_API_CALL_EXCEPTION" + + .. py:attribute:: ILLEGAL_PARAMETER_DEFINITION_EXCEPTION + :value: "ILLEGAL_PARAMETER_DEFINITION_EXCEPTION" + + .. py:attribute:: INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION + :value: "INVALID_CERTIFIED_ANSWER_FUNCTION_EXCEPTION" + + .. py:attribute:: INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION + :value: "INVALID_CERTIFIED_ANSWER_IDENTIFIER_EXCEPTION" + + .. py:attribute:: INVALID_CHAT_COMPLETION_JSON_EXCEPTION + :value: "INVALID_CHAT_COMPLETION_JSON_EXCEPTION" + + .. py:attribute:: INVALID_COMPLETION_REQUEST_EXCEPTION + :value: "INVALID_COMPLETION_REQUEST_EXCEPTION" + + .. py:attribute:: INVALID_FUNCTION_CALL_EXCEPTION + :value: "INVALID_FUNCTION_CALL_EXCEPTION" + + .. py:attribute:: INVALID_TABLE_IDENTIFIER_EXCEPTION + :value: "INVALID_TABLE_IDENTIFIER_EXCEPTION" + + .. py:attribute:: LOCAL_CONTEXT_EXCEEDED_EXCEPTION + :value: "LOCAL_CONTEXT_EXCEEDED_EXCEPTION" + + .. py:attribute:: MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION + :value: "MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION" + + .. py:attribute:: MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION + :value: "MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION" + + .. py:attribute:: NO_TABLES_TO_QUERY_EXCEPTION + :value: "NO_TABLES_TO_QUERY_EXCEPTION" + + .. py:attribute:: RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION + :value: "RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION" + + .. py:attribute:: RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION + :value: "RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION" + + .. py:attribute:: REPLY_PROCESS_TIMEOUT_EXCEPTION + :value: "REPLY_PROCESS_TIMEOUT_EXCEPTION" + + .. py:attribute:: RETRYABLE_PROCESSING_EXCEPTION + :value: "RETRYABLE_PROCESSING_EXCEPTION" + + .. py:attribute:: SQL_EXECUTION_EXCEPTION + :value: "SQL_EXECUTION_EXCEPTION" + + .. py:attribute:: TABLES_MISSING_EXCEPTION + :value: "TABLES_MISSING_EXCEPTION" + + .. py:attribute:: TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION + :value: "TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION" + + .. py:attribute:: TOO_MANY_TABLES_EXCEPTION + :value: "TOO_MANY_TABLES_EXCEPTION" + + .. py:attribute:: UNEXPECTED_REPLY_PROCESS_EXCEPTION + :value: "UNEXPECTED_REPLY_PROCESS_EXCEPTION" + + .. py:attribute:: UNKNOWN_AI_MODEL + :value: "UNKNOWN_AI_MODEL" + + .. py:attribute:: WAREHOUSE_ACCESS_MISSING_EXCEPTION + :value: "WAREHOUSE_ACCESS_MISSING_EXCEPTION" + + .. py:attribute:: WAREHOUSE_NOT_FOUND_EXCEPTION + :value: "WAREHOUSE_NOT_FOUND_EXCEPTION" + +.. py:class:: MessageStatus + + MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data sources. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message status will stay in the `EXECUTING_QUERY` until a client calls [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message processing is completed. Results are in the `attachments` field. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user needs to execute the query again. * `CANCELLED`: Message has been cancelled. + + .. py:attribute:: ASKING_AI + :value: "ASKING_AI" + + .. py:attribute:: CANCELLED + :value: "CANCELLED" + + .. py:attribute:: COMPLETED + :value: "COMPLETED" + + .. py:attribute:: EXECUTING_QUERY + :value: "EXECUTING_QUERY" + + .. py:attribute:: FAILED + :value: "FAILED" + + .. py:attribute:: FETCHING_METADATA + :value: "FETCHING_METADATA" + + .. py:attribute:: QUERY_RESULT_EXPIRED + :value: "QUERY_RESULT_EXPIRED" + + .. py:attribute:: SUBMITTED + :value: "SUBMITTED" + .. autoclass:: MigrateDashboardRequest :members: :undoc-members: @@ -72,6 +245,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: QueryAttachment + :members: + :undoc-members: + +.. autoclass:: Result + :members: + :undoc-members: + .. autoclass:: Schedule :members: :undoc-members: @@ -100,6 +281,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: TextAttachment + :members: + :undoc-members: + .. autoclass:: TrashDashboardResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/iam.rst b/docs/dbdataclasses/iam.rst index 9cafb78df..643da3d47 100644 --- a/docs/dbdataclasses/iam.rst +++ b/docs/dbdataclasses/iam.rst @@ -20,7 +20,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: DeleteWorkspaceAssignments +.. autoclass:: DeleteWorkspacePermissionAssignmentResponse :members: :undoc-members: @@ -82,6 +82,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: MigratePermissionsRequest + :members: + :undoc-members: + +.. autoclass:: MigratePermissionsResponse + :members: + :undoc-members: + .. autoclass:: Name :members: :undoc-members: @@ -191,6 +199,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: CAN_MANAGE_STAGING_VERSIONS :value: "CAN_MANAGE_STAGING_VERSIONS" + .. py:attribute:: CAN_MONITOR + :value: "CAN_MONITOR" + .. py:attribute:: CAN_QUERY :value: "CAN_QUERY" @@ -215,14 +226,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: IS_OWNER :value: "IS_OWNER" -.. autoclass:: PermissionMigrationRequest - :members: - :undoc-members: - -.. autoclass:: PermissionMigrationResponse - :members: - :undoc-members: - .. autoclass:: PermissionOutput :members: :undoc-members: diff --git a/docs/dbdataclasses/index.rst b/docs/dbdataclasses/index.rst index 893e488d7..987bee7f5 100644 --- a/docs/dbdataclasses/index.rst +++ b/docs/dbdataclasses/index.rst @@ -5,6 +5,7 @@ Dataclasses .. toctree:: :maxdepth: 1 + apps billing catalog compute diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst index 81d81020a..0f501f77a 100644 --- a/docs/dbdataclasses/jobs.rst +++ b/docs/dbdataclasses/jobs.rst @@ -365,9 +365,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: HOURS :value: "HOURS" - .. py:attribute:: TIME_UNIT_UNSPECIFIED - :value: "TIME_UNIT_UNSPECIFIED" - .. py:attribute:: WEEKS :value: "WEEKS" diff --git a/docs/dbdataclasses/marketplace.rst b/docs/dbdataclasses/marketplace.rst index 5204dd1ee..bb48967db 100644 --- a/docs/dbdataclasses/marketplace.rst +++ b/docs/dbdataclasses/marketplace.rst @@ -29,9 +29,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: ASSET_TYPE_NOTEBOOK :value: "ASSET_TYPE_NOTEBOOK" - .. py:attribute:: ASSET_TYPE_UNSPECIFIED - :value: "ASSET_TYPE_UNSPECIFIED" - .. autoclass:: BatchGetListingsResponse :members: :undoc-members: @@ -288,11 +285,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: FILE_STATUS_STAGING :value: "FILE_STATUS_STAGING" -.. py:class:: FilterType - - .. py:attribute:: METASTORE - :value: "METASTORE" - .. py:class:: FulfillmentType .. py:attribute:: INSTALL @@ -453,9 +445,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: LISTING_TAG_TYPE_TASK :value: "LISTING_TAG_TYPE_TASK" - .. py:attribute:: LISTING_TAG_TYPE_UNSPECIFIED - :value: "LISTING_TAG_TYPE_UNSPECIFIED" - .. py:class:: ListingType .. py:attribute:: PERSONALIZED @@ -494,29 +483,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ProviderIconFile - :members: - :undoc-members: - -.. py:class:: ProviderIconType - - .. py:attribute:: DARK - :value: "DARK" - - .. py:attribute:: PRIMARY - :value: "PRIMARY" - - .. py:attribute:: PROVIDER_ICON_TYPE_UNSPECIFIED - :value: "PROVIDER_ICON_TYPE_UNSPECIFIED" - .. autoclass:: ProviderInfo :members: :undoc-members: -.. autoclass:: ProviderListingSummaryInfo - :members: - :undoc-members: - .. autoclass:: RegionInfo :members: :undoc-members: @@ -545,20 +515,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. py:class:: SortBy - - .. py:attribute:: SORT_BY_DATE - :value: "SORT_BY_DATE" - - .. py:attribute:: SORT_BY_RELEVANCE - :value: "SORT_BY_RELEVANCE" - - .. py:attribute:: SORT_BY_TITLE - :value: "SORT_BY_TITLE" - - .. py:attribute:: SORT_BY_UNSPECIFIED - :value: "SORT_BY_UNSPECIFIED" - .. autoclass:: TokenDetail :members: :undoc-members: @@ -630,7 +586,3 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: PUBLIC :value: "PUBLIC" - -.. autoclass:: VisibilityFilter - :members: - :undoc-members: diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst index 385bf2021..9d3d9c8a7 100644 --- a/docs/dbdataclasses/pipelines.rst +++ b/docs/dbdataclasses/pipelines.rst @@ -97,19 +97,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ListPipelineEventsResponse +.. autoclass:: IngestionPipelineDefinition :members: :undoc-members: -.. autoclass:: ListPipelinesResponse +.. autoclass:: ListPipelineEventsResponse :members: :undoc-members: -.. autoclass:: ListUpdatesResponse +.. autoclass:: ListPipelinesResponse :members: :undoc-members: -.. autoclass:: ManagedIngestionPipelineDefinition +.. autoclass:: ListUpdatesResponse :members: :undoc-members: @@ -251,6 +251,16 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: PipelineStateInfoHealth + + The health of a pipeline. + + .. py:attribute:: HEALTHY + :value: "HEALTHY" + + .. py:attribute:: UNHEALTHY + :value: "UNHEALTHY" + .. autoclass:: PipelineTrigger :members: :undoc-members: diff --git a/docs/dbdataclasses/serving.rst b/docs/dbdataclasses/serving.rst index 46cfe6a35..23ef3c257 100644 --- a/docs/dbdataclasses/serving.rst +++ b/docs/dbdataclasses/serving.rst @@ -32,84 +32,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: App - :members: - :undoc-members: - -.. autoclass:: AppDeployment - :members: - :undoc-members: - -.. autoclass:: AppDeploymentArtifacts - :members: - :undoc-members: - -.. py:class:: AppDeploymentMode - - .. py:attribute:: AUTO_SYNC - :value: "AUTO_SYNC" - - .. py:attribute:: MODE_UNSPECIFIED - :value: "MODE_UNSPECIFIED" - - .. py:attribute:: SNAPSHOT - :value: "SNAPSHOT" - -.. py:class:: AppDeploymentState - - .. py:attribute:: FAILED - :value: "FAILED" - - .. py:attribute:: IN_PROGRESS - :value: "IN_PROGRESS" - - .. py:attribute:: STATE_UNSPECIFIED - :value: "STATE_UNSPECIFIED" - - .. py:attribute:: STOPPED - :value: "STOPPED" - - .. py:attribute:: SUCCEEDED - :value: "SUCCEEDED" - -.. autoclass:: AppDeploymentStatus - :members: - :undoc-members: - -.. autoclass:: AppEnvironment - :members: - :undoc-members: - -.. py:class:: AppState - - .. py:attribute:: CREATING - :value: "CREATING" - - .. py:attribute:: DELETED - :value: "DELETED" - - .. py:attribute:: DELETING - :value: "DELETING" - - .. py:attribute:: ERROR - :value: "ERROR" - - .. py:attribute:: IDLE - :value: "IDLE" - - .. py:attribute:: RUNNING - :value: "RUNNING" - - .. py:attribute:: STARTING - :value: "STARTING" - - .. py:attribute:: STATE_UNSPECIFIED - :value: "STATE_UNSPECIFIED" - -.. autoclass:: AppStatus - :members: - :undoc-members: - .. autoclass:: AutoCaptureConfigInput :members: :undoc-members: @@ -147,14 +69,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: CreateAppDeploymentRequest - :members: - :undoc-members: - -.. autoclass:: CreateAppRequest - :members: - :undoc-members: - .. autoclass:: CreateServingEndpoint :members: :undoc-members: @@ -212,6 +126,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: NOT_UPDATING :value: "NOT_UPDATING" + .. py:attribute:: UPDATE_CANCELED + :value: "UPDATE_CANCELED" + .. py:attribute:: UPDATE_FAILED :value: "UPDATE_FAILED" @@ -229,10 +146,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: EnvVariable - :members: - :undoc-members: - .. autoclass:: ExportMetricsResponse :members: :undoc-members: @@ -243,7 +156,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ExternalModelProvider - The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'openai', and 'palm'.", + The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.", .. py:attribute:: AI21LABS :value: "AI21LABS" @@ -260,6 +173,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DATABRICKS_MODEL_SERVING :value: "DATABRICKS_MODEL_SERVING" + .. py:attribute:: GOOGLE_CLOUD_VERTEX_AI + :value: "GOOGLE_CLOUD_VERTEX_AI" + .. py:attribute:: OPENAI :value: "OPENAI" @@ -282,11 +198,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: ListAppDeploymentsResponse - :members: - :undoc-members: - -.. autoclass:: ListAppsResponse +.. autoclass:: GoogleCloudVertexAiConfig :members: :undoc-members: @@ -506,26 +418,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: StartAppRequest - :members: - :undoc-members: - -.. autoclass:: StopAppRequest - :members: - :undoc-members: - -.. autoclass:: StopAppResponse - :members: - :undoc-members: - .. autoclass:: TrafficConfig :members: :undoc-members: -.. autoclass:: UpdateAppRequest - :members: - :undoc-members: - .. autoclass:: V1ResponseChoiceElement :members: :undoc-members: diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst index cc142abf3..0031512e7 100644 --- a/docs/dbdataclasses/settings.rst +++ b/docs/dbdataclasses/settings.rst @@ -22,9 +22,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ClusterAutoRestartMessageMaintenanceWindowDayOfWeek - .. py:attribute:: DAY_OF_WEEK_UNSPECIFIED - :value: "DAY_OF_WEEK_UNSPECIFIED" - .. py:attribute:: FRIDAY :value: "FRIDAY" @@ -73,9 +70,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: THIRD_OF_MONTH :value: "THIRD_OF_MONTH" - .. py:attribute:: WEEK_DAY_FREQUENCY_UNSPECIFIED - :value: "WEEK_DAY_FREQUENCY_UNSPECIFIED" - .. autoclass:: ClusterAutoRestartMessageMaintenanceWindowWindowStartTime :members: :undoc-members: @@ -92,8 +86,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo Compliance stardard for SHIELD customers - .. py:attribute:: COMPLIANCE_STANDARD_UNSPECIFIED - :value: "COMPLIANCE_STANDARD_UNSPECIFIED" + .. py:attribute:: CANADA_PROTECTED_B + :value: "CANADA_PROTECTED_B" .. py:attribute:: CYBER_ESSENTIAL_PLUS :value: "CYBER_ESSENTIAL_PLUS" @@ -122,6 +116,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: PCI_DSS :value: "PCI_DSS" +.. autoclass:: Config + :members: + :undoc-members: + .. autoclass:: CreateIpAccessList :members: :undoc-members: @@ -134,6 +132,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: CreateNotificationDestinationRequest + :members: + :undoc-members: + .. autoclass:: CreateOboTokenRequest :members: :undoc-members: @@ -202,6 +204,31 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: DestinationType + + .. py:attribute:: EMAIL + :value: "EMAIL" + + .. py:attribute:: MICROSOFT_TEAMS + :value: "MICROSOFT_TEAMS" + + .. py:attribute:: PAGERDUTY + :value: "PAGERDUTY" + + .. py:attribute:: SLACK + :value: "SLACK" + + .. py:attribute:: WEBHOOK + :value: "WEBHOOK" + +.. autoclass:: EmailConfig + :members: + :undoc-members: + +.. autoclass:: Empty + :members: + :undoc-members: + .. autoclass:: EnhancedSecurityMonitoring :members: :undoc-members: @@ -234,6 +261,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GenericWebhookConfig + :members: + :undoc-members: + .. autoclass:: GetIpAccessListResponse :members: :undoc-members: @@ -266,6 +297,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ListNotificationDestinationsResponse + :members: + :undoc-members: + +.. autoclass:: ListNotificationDestinationsResult + :members: + :undoc-members: + .. autoclass:: ListPublicTokensResponse :members: :undoc-members: @@ -285,6 +324,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: BLOCK :value: "BLOCK" +.. autoclass:: MicrosoftTeamsConfig + :members: + :undoc-members: + .. autoclass:: NccAwsStableIpRule :members: :undoc-members: @@ -349,6 +392,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: NotificationDestination + :members: + :undoc-members: + +.. autoclass:: PagerdutyConfig + :members: + :undoc-members: + .. autoclass:: PartitionId :members: :undoc-members: @@ -395,9 +446,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: RESTRICT_TOKENS_AND_JOB_RUN_AS :value: "RESTRICT_TOKENS_AND_JOB_RUN_AS" - .. py:attribute:: STATUS_UNSPECIFIED - :value: "STATUS_UNSPECIFIED" - .. autoclass:: RestrictWorkspaceAdminsSetting :members: :undoc-members: @@ -414,6 +462,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: SlackConfig + :members: + :undoc-members: + .. autoclass:: StringMessage :members: :undoc-members: @@ -488,6 +540,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: UpdateNotificationDestinationRequest + :members: + :undoc-members: + .. autoclass:: UpdatePersonalComputeSettingRequest :members: :undoc-members: diff --git a/docs/dbdataclasses/sharing.rst b/docs/dbdataclasses/sharing.rst index f25f3f575..ded587fe5 100644 --- a/docs/dbdataclasses/sharing.rst +++ b/docs/dbdataclasses/sharing.rst @@ -265,6 +265,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: EXECUTE :value: "EXECUTE" + .. py:attribute:: MANAGE + :value: "MANAGE" + .. py:attribute:: MANAGE_ALLOWLIST :value: "MANAGE_ALLOWLIST" diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst index fe1469a30..b39ea9edf 100644 --- a/docs/dbdataclasses/sql.rst +++ b/docs/dbdataclasses/sql.rst @@ -12,6 +12,49 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: AlertCondition + :members: + :undoc-members: + +.. autoclass:: AlertConditionOperand + :members: + :undoc-members: + +.. autoclass:: AlertConditionThreshold + :members: + :undoc-members: + +.. autoclass:: AlertOperandColumn + :members: + :undoc-members: + +.. autoclass:: AlertOperandValue + :members: + :undoc-members: + +.. py:class:: AlertOperator + + .. py:attribute:: EQUAL + :value: "EQUAL" + + .. py:attribute:: GREATER_THAN + :value: "GREATER_THAN" + + .. py:attribute:: GREATER_THAN_OR_EQUAL + :value: "GREATER_THAN_OR_EQUAL" + + .. py:attribute:: IS_NULL + :value: "IS_NULL" + + .. py:attribute:: LESS_THAN + :value: "LESS_THAN" + + .. py:attribute:: LESS_THAN_OR_EQUAL + :value: "LESS_THAN_OR_EQUAL" + + .. py:attribute:: NOT_EQUAL + :value: "NOT_EQUAL" + .. autoclass:: AlertOptions :members: :undoc-members: @@ -35,8 +78,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: AlertState - State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions). - .. py:attribute:: OK :value: "OK" @@ -64,8 +105,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: ChannelName - Name of the channel - .. py:attribute:: CHANNEL_NAME_CURRENT :value: "CHANNEL_NAME_CURRENT" @@ -81,6 +120,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: CHANNEL_NAME_UNSPECIFIED :value: "CHANNEL_NAME_UNSPECIFIED" +.. autoclass:: ClientCallContext + :members: + :undoc-members: + .. autoclass:: ColumnInfo :members: :undoc-members: @@ -146,10 +189,38 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: USER_DEFINED_TYPE :value: "USER_DEFINED_TYPE" +.. autoclass:: ContextFilter + :members: + :undoc-members: + .. autoclass:: CreateAlert :members: :undoc-members: +.. autoclass:: CreateAlertRequest + :members: + :undoc-members: + +.. autoclass:: CreateAlertRequestAlert + :members: + :undoc-members: + +.. autoclass:: CreateQueryRequest + :members: + :undoc-members: + +.. autoclass:: CreateQueryRequestQuery + :members: + :undoc-members: + +.. autoclass:: CreateVisualizationRequest + :members: + :undoc-members: + +.. autoclass:: CreateVisualizationRequestVisualization + :members: + :undoc-members: + .. autoclass:: CreateWarehouseRequest :members: :undoc-members: @@ -195,6 +266,90 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: DatePrecision + + .. py:attribute:: DAY_PRECISION + :value: "DAY_PRECISION" + + .. py:attribute:: MINUTE_PRECISION + :value: "MINUTE_PRECISION" + + .. py:attribute:: SECOND_PRECISION + :value: "SECOND_PRECISION" + +.. autoclass:: DateRange + :members: + :undoc-members: + +.. autoclass:: DateRangeValue + :members: + :undoc-members: + +.. py:class:: DateRangeValueDynamicDateRange + + .. py:attribute:: LAST_12_MONTHS + :value: "LAST_12_MONTHS" + + .. py:attribute:: LAST_14_DAYS + :value: "LAST_14_DAYS" + + .. py:attribute:: LAST_24_HOURS + :value: "LAST_24_HOURS" + + .. py:attribute:: LAST_30_DAYS + :value: "LAST_30_DAYS" + + .. py:attribute:: LAST_60_DAYS + :value: "LAST_60_DAYS" + + .. py:attribute:: LAST_7_DAYS + :value: "LAST_7_DAYS" + + .. py:attribute:: LAST_8_HOURS + :value: "LAST_8_HOURS" + + .. py:attribute:: LAST_90_DAYS + :value: "LAST_90_DAYS" + + .. py:attribute:: LAST_HOUR + :value: "LAST_HOUR" + + .. py:attribute:: LAST_MONTH + :value: "LAST_MONTH" + + .. py:attribute:: LAST_WEEK + :value: "LAST_WEEK" + + .. py:attribute:: LAST_YEAR + :value: "LAST_YEAR" + + .. py:attribute:: THIS_MONTH + :value: "THIS_MONTH" + + .. py:attribute:: THIS_WEEK + :value: "THIS_WEEK" + + .. py:attribute:: THIS_YEAR + :value: "THIS_YEAR" + + .. py:attribute:: TODAY + :value: "TODAY" + + .. py:attribute:: YESTERDAY + :value: "YESTERDAY" + +.. autoclass:: DateValue + :members: + :undoc-members: + +.. py:class:: DateValueDynamicDate + + .. py:attribute:: NOW + :value: "NOW" + + .. py:attribute:: YESTERDAY + :value: "YESTERDAY" + .. autoclass:: DeleteResponse :members: :undoc-members: @@ -205,13 +360,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: Disposition - The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`. - Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY` format, in a series of chunks. If a given statement produces a result set with a size larger than 25 MiB, that statement execution is aborted, and no result set will be available. - **NOTE** Byte limits are computed based upon internal representations of the result set data, and might not match the sizes visible in JSON responses. - Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links: URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The resulting links have two important properties: - 1. They point to resources _external_ to the Databricks compute; therefore any associated authentication information (typically a personal access token, OAuth token, or similar) _must be removed_ when fetching from these links. - 2. These are presigned URLs with a specific expiration, indicated in the response. The behavior when attempting to use an expired link is cloud specific. - .. py:attribute:: EXTERNAL_LINKS :value: "EXTERNAL_LINKS" @@ -243,6 +391,24 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: Empty + :members: + :undoc-members: + +.. autoclass:: EncodedText + :members: + :undoc-members: + +.. py:class:: EncodedTextEncoding + + Carry text data in different form. + + .. py:attribute:: BASE64 + :value: "BASE64" + + .. py:attribute:: PLAIN + :value: "PLAIN" + .. autoclass:: EndpointConfPair :members: :undoc-members: @@ -276,6 +442,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: EnumValue + :members: + :undoc-members: + .. autoclass:: ExecuteStatementRequest :members: :undoc-members: @@ -290,10 +460,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: CONTINUE :value: "CONTINUE" -.. autoclass:: ExecuteStatementResponse - :members: - :undoc-members: - .. autoclass:: ExternalLink :members: :undoc-members: @@ -313,10 +479,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: -.. autoclass:: GetStatementResponse - :members: - :undoc-members: - .. autoclass:: GetWarehousePermissionLevelsResponse :members: :undoc-members: @@ -355,6 +517,47 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: PASSTHROUGH :value: "PASSTHROUGH" +.. autoclass:: LegacyAlert + :members: + :undoc-members: + +.. py:class:: LegacyAlertState + + State of the alert. Possible values are: `unknown` (yet to be evaluated), `triggered` (evaluated and fulfilled trigger conditions), or `ok` (evaluated and did not fulfill trigger conditions). + + .. py:attribute:: OK + :value: "OK" + + .. py:attribute:: TRIGGERED + :value: "TRIGGERED" + + .. py:attribute:: UNKNOWN + :value: "UNKNOWN" + +.. autoclass:: LegacyQuery + :members: + :undoc-members: + +.. autoclass:: LegacyVisualization + :members: + :undoc-members: + +.. py:class:: LifecycleState + + .. py:attribute:: ACTIVE + :value: "ACTIVE" + + .. py:attribute:: TRASHED + :value: "TRASHED" + +.. autoclass:: ListAlertsResponse + :members: + :undoc-members: + +.. autoclass:: ListAlertsResponseAlert + :members: + :undoc-members: + .. py:class:: ListOrder .. py:attribute:: CREATED_AT @@ -367,10 +570,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ListQueryObjectsResponse + :members: + :undoc-members: + +.. autoclass:: ListQueryObjectsResponseQuery + :members: + :undoc-members: + .. autoclass:: ListResponse :members: :undoc-members: +.. autoclass:: ListVisualizationsForQueryResponse + :members: + :undoc-members: + .. autoclass:: ListWarehousesResponse :members: :undoc-members: @@ -379,6 +594,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: NumericValue + :members: + :undoc-members: + .. py:class:: ObjectType A singular noun object type. @@ -469,7 +688,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: PlansState - Whether plans exist for the execution, or the reason why they are missing + Possible Reasons for which we have not saved plans in the database .. py:attribute:: EMPTY :value: "EMPTY" @@ -493,6 +712,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: QueryBackedValue + :members: + :undoc-members: + .. autoclass:: QueryEditContent :members: :undoc-members: @@ -517,13 +740,87 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: QueryParameter + :members: + :undoc-members: + .. autoclass:: QueryPostContent :members: :undoc-members: -.. py:class:: QueryStatementType +.. autoclass:: QuerySource + :members: + :undoc-members: + +.. autoclass:: QuerySourceDriverInfo + :members: + :undoc-members: + +.. py:class:: QuerySourceEntryPoint + + Spark service that received and processed the query + + .. py:attribute:: DLT + :value: "DLT" + + .. py:attribute:: SPARK_CONNECT + :value: "SPARK_CONNECT" + + .. py:attribute:: THRIFT_SERVER + :value: "THRIFT_SERVER" + +.. py:class:: QuerySourceJobManager + + Copied from elastic-spark-common/api/messages/manager.proto with enum values changed by 1 to accommodate JOB_MANAGER_UNSPECIFIED - Type of statement for this query + .. py:attribute:: APP_SYSTEM_TABLE + :value: "APP_SYSTEM_TABLE" + + .. py:attribute:: AUTOML + :value: "AUTOML" + + .. py:attribute:: AUTO_MAINTENANCE + :value: "AUTO_MAINTENANCE" + + .. py:attribute:: CLEAN_ROOMS + :value: "CLEAN_ROOMS" + + .. py:attribute:: DATA_MONITORING + :value: "DATA_MONITORING" + + .. py:attribute:: DATA_SHARING + :value: "DATA_SHARING" + + .. py:attribute:: ENCRYPTION + :value: "ENCRYPTION" + + .. py:attribute:: FABRIC_CRAWLER + :value: "FABRIC_CRAWLER" + + .. py:attribute:: JOBS + :value: "JOBS" + + .. py:attribute:: LAKEVIEW + :value: "LAKEVIEW" + + .. py:attribute:: MANAGED_RAG + :value: "MANAGED_RAG" + + .. py:attribute:: SCHEDULED_MV_REFRESH + :value: "SCHEDULED_MV_REFRESH" + + .. py:attribute:: TESTING + :value: "TESTING" + +.. py:class:: QuerySourceTrigger + + .. py:attribute:: MANUAL + :value: "MANUAL" + + .. py:attribute:: SCHEDULED + :value: "SCHEDULED" + +.. py:class:: QueryStatementType .. py:attribute:: ALTER :value: "ALTER" @@ -593,11 +890,17 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:class:: QueryStatus - Query status with one the following values: * `QUEUED`: Query has been received and queued. * `RUNNING`: Query has started. * `CANCELED`: Query has been cancelled by the user. * `FAILED`: Query has failed. * `FINISHED`: Query has completed. + Statuses which are also used by OperationStatus in runtime .. py:attribute:: CANCELED :value: "CANCELED" + .. py:attribute:: COMPILED + :value: "COMPILED" + + .. py:attribute:: COMPILING + :value: "COMPILING" + .. py:attribute:: FAILED :value: "FAILED" @@ -610,6 +913,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: RUNNING :value: "RUNNING" + .. py:attribute:: STARTED + :value: "STARTED" + .. autoclass:: RepeatedEndpointConfPairs :members: :undoc-members: @@ -630,6 +936,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: RunAsMode + + .. py:attribute:: OWNER + :value: "OWNER" + + .. py:attribute:: VIEWER + :value: "VIEWER" + .. py:class:: RunAsRole Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as viewer" behavior) or `"owner"` (signifying "run as owner" behavior) @@ -640,6 +954,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: VIEWER :value: "VIEWER" +.. autoclass:: ServerlessChannelInfo + :members: + :undoc-members: + .. autoclass:: ServiceError :members: :undoc-members: @@ -756,6 +1074,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: StatementResponse + :members: + :undoc-members: + .. py:class:: StatementState Statement execution state: - `PENDING`: waiting for warehouse - `RUNNING`: running - `SUCCEEDED`: execution was successful, result data available for fetch - `FAILED`: execution failed; reason for failure described in accomanying error message - `CANCELED`: user canceled; can come from explicit cancel call, or timeout with `on_wait_timeout=CANCEL` - `CLOSED`: execution successful, and statement closed; result no longer available for fetch @@ -1072,6 +1394,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: SUCCESS :value: "SUCCESS" +.. autoclass:: TextValue + :members: + :undoc-members: + .. autoclass:: TimeRange :members: :undoc-members: @@ -1080,10 +1406,34 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: UpdateAlertRequest + :members: + :undoc-members: + +.. autoclass:: UpdateAlertRequestAlert + :members: + :undoc-members: + +.. autoclass:: UpdateQueryRequest + :members: + :undoc-members: + +.. autoclass:: UpdateQueryRequestQuery + :members: + :undoc-members: + .. autoclass:: UpdateResponse :members: :undoc-members: +.. autoclass:: UpdateVisualizationRequest + :members: + :undoc-members: + +.. autoclass:: UpdateVisualizationRequestVisualization + :members: + :undoc-members: + .. autoclass:: User :members: :undoc-members: @@ -1111,6 +1461,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: CAN_MANAGE :value: "CAN_MANAGE" + .. py:attribute:: CAN_MONITOR + :value: "CAN_MONITOR" + .. py:attribute:: CAN_USE :value: "CAN_USE" diff --git a/docs/workspace/apps/apps.rst b/docs/workspace/apps/apps.rst new file mode 100644 index 000000000..455bb81cc --- /dev/null +++ b/docs/workspace/apps/apps.rst @@ -0,0 +1,220 @@ +``w.apps``: Apps +================ +.. currentmodule:: databricks.sdk.service.apps + +.. py:class:: AppsAPI + + Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend + Databricks services, and enable users to interact through single sign-on. + + .. py:method:: create(name: str [, description: Optional[str]]) -> Wait[App] + + Create an app. + + Creates a new app. + + :param name: str + The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It + must be unique within the workspace. + :param description: str (optional) + The description of the app. + + :returns: + Long-running operation waiter for :class:`App`. + See :method:wait_get_app_idle for more details. + + + .. py:method:: create_and_wait(name: str [, description: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> App + + + .. py:method:: delete(name: str) + + Delete an app. + + Deletes an app. + + :param name: str + The name of the app. + + + + + .. py:method:: deploy(app_name: str, source_code_path: str [, mode: Optional[AppDeploymentMode]]) -> Wait[AppDeployment] + + Create an app deployment. + + Creates an app deployment for the app with the supplied name. + + :param app_name: str + The name of the app. + :param source_code_path: str + The workspace file system path of the source code used to create the app deployment. This is + different from `deployment_artifacts.source_code_path`, which is the path used by the deployed app. + The former refers to the original source code location of the app in the workspace during deployment + creation, whereas the latter provides a system generated stable snapshotted source code path used by + the deployment. + :param mode: :class:`AppDeploymentMode` (optional) + The mode of which the deployment will manage the source code. + + :returns: + Long-running operation waiter for :class:`AppDeployment`. + See :method:wait_get_deployment_app_succeeded for more details. + + + .. py:method:: deploy_and_wait(app_name: str, source_code_path: str [, mode: Optional[AppDeploymentMode], timeout: datetime.timedelta = 0:20:00]) -> AppDeployment + + + .. py:method:: get(name: str) -> App + + Get an app. + + Retrieves information for the app with the supplied name. + + :param name: str + The name of the app. + + :returns: :class:`App` + + + .. py:method:: get_deployment(app_name: str, deployment_id: str) -> AppDeployment + + Get an app deployment. + + Retrieves information for the app deployment with the supplied name and deployment id. + + :param app_name: str + The name of the app. + :param deployment_id: str + The unique id of the deployment. + + :returns: :class:`AppDeployment` + + + .. py:method:: get_permission_levels(app_name: str) -> GetAppPermissionLevelsResponse + + Get app permission levels. + + Gets the permission levels that a user can have on an object. + + :param app_name: str + The app for which to get or manage permissions. + + :returns: :class:`GetAppPermissionLevelsResponse` + + + .. py:method:: get_permissions(app_name: str) -> AppPermissions + + Get app permissions. + + Gets the permissions of an app. Apps can inherit permissions from their root object. + + :param app_name: str + The app for which to get or manage permissions. + + :returns: :class:`AppPermissions` + + + .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[App] + + List apps. + + Lists all apps in the workspace. + + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of apps. Requests first page if absent. + + :returns: Iterator over :class:`App` + + + .. py:method:: list_deployments(app_name: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[AppDeployment] + + List app deployments. + + Lists all app deployments for the app with the supplied name. + + :param app_name: str + The name of the app. + :param page_size: int (optional) + Upper bound for items returned. + :param page_token: str (optional) + Pagination token to go to the next page of apps. Requests first page if absent. + + :returns: Iterator over :class:`AppDeployment` + + + .. py:method:: set_permissions(app_name: str [, access_control_list: Optional[List[AppAccessControlRequest]]]) -> AppPermissions + + Set app permissions. + + Sets permissions on an app. Apps can inherit permissions from their root object. + + :param app_name: str + The app for which to get or manage permissions. + :param access_control_list: List[:class:`AppAccessControlRequest`] (optional) + + :returns: :class:`AppPermissions` + + + .. py:method:: start(name: str) -> Wait[AppDeployment] + + Start an app. + + Start the last active deployment of the app in the workspace. + + :param name: str + The name of the app. + + :returns: + Long-running operation waiter for :class:`AppDeployment`. + See :method:wait_get_deployment_app_succeeded for more details. + + + .. py:method:: start_and_wait(name: str, timeout: datetime.timedelta = 0:20:00) -> AppDeployment + + + .. py:method:: stop(name: str) + + Stop an app. + + Stops the active deployment of the app in the workspace. + + :param name: str + The name of the app. + + + + + .. py:method:: update(name: str [, description: Optional[str]]) -> App + + Update an app. + + Updates the app with the supplied name. + + :param name: str + The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It + must be unique within the workspace. + :param description: str (optional) + The description of the app. + + :returns: :class:`App` + + + .. py:method:: update_permissions(app_name: str [, access_control_list: Optional[List[AppAccessControlRequest]]]) -> AppPermissions + + Update app permissions. + + Updates the permissions on an app. Apps can inherit permissions from their root object. + + :param app_name: str + The app for which to get or manage permissions. + :param access_control_list: List[:class:`AppAccessControlRequest`] (optional) + + :returns: :class:`AppPermissions` + + + .. py:method:: wait_get_app_idle(name: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[App], None]]) -> App + + + .. py:method:: wait_get_deployment_app_succeeded(app_name: str, deployment_id: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[AppDeployment], None]]) -> AppDeployment diff --git a/docs/workspace/apps/index.rst b/docs/workspace/apps/index.rst new file mode 100644 index 000000000..bd21c93a5 --- /dev/null +++ b/docs/workspace/apps/index.rst @@ -0,0 +1,10 @@ + +Apps +==== + +Build custom applications on Databricks + +.. toctree:: + :maxdepth: 1 + + apps \ No newline at end of file diff --git a/docs/workspace/catalog/model_versions.rst b/docs/workspace/catalog/model_versions.rst index 017a6aa15..bae6f25f8 100644 --- a/docs/workspace/catalog/model_versions.rst +++ b/docs/workspace/catalog/model_versions.rst @@ -30,7 +30,7 @@ - .. py:method:: get(full_name: str, version: int [, include_browse: Optional[bool]]) -> RegisteredModelInfo + .. py:method:: get(full_name: str, version: int [, include_aliases: Optional[bool], include_browse: Optional[bool]]) -> ModelVersionInfo Get a Model Version. @@ -44,14 +44,16 @@ The three-level (fully qualified) name of the model version :param version: int The integer version number of the model version + :param include_aliases: bool (optional) + Whether to include aliases associated with the model version in the response :param include_browse: bool (optional) Whether to include model versions in the response for which the principal can only access selective metadata for - :returns: :class:`RegisteredModelInfo` + :returns: :class:`ModelVersionInfo` - .. py:method:: get_by_alias(full_name: str, alias: str) -> ModelVersionInfo + .. py:method:: get_by_alias(full_name: str, alias: str [, include_aliases: Optional[bool]]) -> ModelVersionInfo Get Model Version By Alias. @@ -65,6 +67,8 @@ The three-level (fully qualified) name of the registered model :param alias: str The name of the alias + :param include_aliases: bool (optional) + Whether to include aliases associated with the model version in the response :returns: :class:`ModelVersionInfo` diff --git a/docs/workspace/catalog/registered_models.rst b/docs/workspace/catalog/registered_models.rst index 6a60c4f6d..b05a702b5 100644 --- a/docs/workspace/catalog/registered_models.rst +++ b/docs/workspace/catalog/registered_models.rst @@ -91,7 +91,7 @@ - .. py:method:: get(full_name: str [, include_browse: Optional[bool]]) -> RegisteredModelInfo + .. py:method:: get(full_name: str [, include_aliases: Optional[bool], include_browse: Optional[bool]]) -> RegisteredModelInfo Get a Registered Model. @@ -103,6 +103,8 @@ :param full_name: str The three-level (fully qualified) name of the registered model + :param include_aliases: bool (optional) + Whether to include registered model aliases in the response :param include_browse: bool (optional) Whether to include registered models in the response for which the principal can only access selective metadata for diff --git a/docs/workspace/catalog/schemas.rst b/docs/workspace/catalog/schemas.rst index 1c9fcbbd0..feaf7c7a0 100644 --- a/docs/workspace/catalog/schemas.rst +++ b/docs/workspace/catalog/schemas.rst @@ -49,7 +49,7 @@ :returns: :class:`SchemaInfo` - .. py:method:: delete(full_name: str) + .. py:method:: delete(full_name: str [, force: Optional[bool]]) Delete a schema. @@ -58,6 +58,8 @@ :param full_name: str Full name of the schema. + :param force: bool (optional) + Force deletion even if the schema is not empty. diff --git a/docs/workspace/catalog/system_schemas.rst b/docs/workspace/catalog/system_schemas.rst index b9ab3b0f9..2028a3623 100644 --- a/docs/workspace/catalog/system_schemas.rst +++ b/docs/workspace/catalog/system_schemas.rst @@ -37,7 +37,7 @@ - .. py:method:: list(metastore_id: str) -> Iterator[SystemSchemaInfo] + .. py:method:: list(metastore_id: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[SystemSchemaInfo] List system schemas. @@ -46,6 +46,13 @@ :param metastore_id: str The ID for the metastore in which the system schema resides. + :param max_results: int (optional) + Maximum number of schemas to return. - When set to 0, the page length is set to a server configured + value (recommended); - When set to a value greater than 0, the page length is the minimum of this + value and a server configured value; - When set to a value less than 0, an invalid parameter error + is returned; - If not set, all the schemas are returned (not recommended). + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`SystemSchemaInfo` \ No newline at end of file diff --git a/docs/workspace/catalog/workspace_bindings.rst b/docs/workspace/catalog/workspace_bindings.rst index e1ec753d4..08a74b29e 100644 --- a/docs/workspace/catalog/workspace_bindings.rst +++ b/docs/workspace/catalog/workspace_bindings.rst @@ -17,7 +17,7 @@ the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which introduces the ability to bind a securable in READ_ONLY mode (catalogs only). - Securables that support binding: - catalog + Securable types that support binding: - catalog - storage_credential - external_location .. py:method:: get(name: str) -> CurrentWorkspaceBindings @@ -50,19 +50,26 @@ :returns: :class:`CurrentWorkspaceBindings` - .. py:method:: get_bindings(securable_type: str, securable_name: str) -> WorkspaceBindingsResponse + .. py:method:: get_bindings(securable_type: GetBindingsSecurableType, securable_name: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[WorkspaceBinding] Get securable workspace bindings. Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. - :param securable_type: str - The type of the securable. + :param securable_type: :class:`GetBindingsSecurableType` + The type of the securable to bind to a workspace. :param securable_name: str The name of the securable. + :param max_results: int (optional) + Maximum number of workspace bindings to return. - When set to 0, the page length is set to a server + configured value (recommended); - When set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - When set to a value less than 0, an invalid parameter + error is returned; - If not set, all the workspace bindings are returned (not recommended). + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. - :returns: :class:`WorkspaceBindingsResponse` + :returns: Iterator over :class:`WorkspaceBinding` .. py:method:: update(name: str [, assign_workspaces: Optional[List[int]], unassign_workspaces: Optional[List[int]]]) -> CurrentWorkspaceBindings @@ -103,15 +110,15 @@ :returns: :class:`CurrentWorkspaceBindings` - .. py:method:: update_bindings(securable_type: str, securable_name: str [, add: Optional[List[WorkspaceBinding]], remove: Optional[List[WorkspaceBinding]]]) -> WorkspaceBindingsResponse + .. py:method:: update_bindings(securable_type: UpdateBindingsSecurableType, securable_name: str [, add: Optional[List[WorkspaceBinding]], remove: Optional[List[WorkspaceBinding]]]) -> WorkspaceBindingsResponse Update securable workspace bindings. Updates workspace bindings of the securable. The caller must be a metastore admin or an owner of the securable. - :param securable_type: str - The type of the securable. + :param securable_type: :class:`UpdateBindingsSecurableType` + The type of the securable to bind to a workspace. :param securable_name: str The name of the securable. :param add: List[:class:`WorkspaceBinding`] (optional) diff --git a/docs/workspace/compute/cluster_policies.rst b/docs/workspace/compute/cluster_policies.rst index b6e67acff..1cefc8ca6 100644 --- a/docs/workspace/compute/cluster_policies.rst +++ b/docs/workspace/compute/cluster_policies.rst @@ -22,7 +22,7 @@ If no policies exist in the workspace, the Policy drop-down doesn't appear. Only admin users can create, edit, and delete policies. Admin users also have access to all policies. - .. py:method:: create(name: str [, definition: Optional[str], description: Optional[str], libraries: Optional[List[Library]], max_clusters_per_user: Optional[int], policy_family_definition_overrides: Optional[str], policy_family_id: Optional[str]]) -> CreatePolicyResponse + .. py:method:: create( [, definition: Optional[str], description: Optional[str], libraries: Optional[List[Library]], max_clusters_per_user: Optional[int], name: Optional[str], policy_family_definition_overrides: Optional[str], policy_family_id: Optional[str]]) -> CreatePolicyResponse Usage: @@ -51,9 +51,6 @@ Creates a new policy with prescribed settings. - :param name: str - Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 - characters. :param definition: str (optional) Policy definition document expressed in [Databricks Cluster Policy Definition Language]. @@ -66,6 +63,9 @@ :param max_clusters_per_user: int (optional) Max number of clusters per user that can be active using this policy. If not present, there is no max limit. + :param name: str (optional) + Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 + characters. :param policy_family_definition_overrides: str (optional) Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. @@ -96,7 +96,7 @@ - .. py:method:: edit(policy_id: str, name: str [, definition: Optional[str], description: Optional[str], libraries: Optional[List[Library]], max_clusters_per_user: Optional[int], policy_family_definition_overrides: Optional[str], policy_family_id: Optional[str]]) + .. py:method:: edit(policy_id: str [, definition: Optional[str], description: Optional[str], libraries: Optional[List[Library]], max_clusters_per_user: Optional[int], name: Optional[str], policy_family_definition_overrides: Optional[str], policy_family_id: Optional[str]]) Usage: @@ -140,9 +140,6 @@ :param policy_id: str The ID of the policy to update. - :param name: str - Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 - characters. :param definition: str (optional) Policy definition document expressed in [Databricks Cluster Policy Definition Language]. @@ -155,6 +152,9 @@ :param max_clusters_per_user: int (optional) Max number of clusters per user that can be active using this policy. If not present, there is no max limit. + :param name: str (optional) + Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100 + characters. :param policy_family_definition_overrides: str (optional) Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON document must be passed as a string and cannot be embedded in the requests. @@ -205,7 +205,7 @@ Get a cluster policy entity. Creation and editing is available to admins only. :param policy_id: str - Canonical unique identifier for the cluster policy. + Canonical unique identifier for the Cluster Policy. :returns: :class:`Policy` diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst index 58362d05e..601b55812 100644 --- a/docs/workspace/compute/clusters.rst +++ b/docs/workspace/compute/clusters.rst @@ -21,9 +21,8 @@ restart an all-purpose cluster. Multiple users can share such clusters to do collaborative interactive analysis. - IMPORTANT: Databricks retains cluster configuration information for up to 200 all-purpose clusters - terminated in the last 30 days and up to 30 job clusters recently terminated by the job scheduler. To keep - an all-purpose cluster configuration even after it has been terminated for more than 30 days, an + IMPORTANT: Databricks retains cluster configuration information for terminated clusters for 30 days. To + keep an all-purpose cluster configuration even after it has been terminated for more than 30 days, an administrator can pin a cluster to the cluster list. .. py:method:: change_owner(cluster_id: str, owner_username: str) @@ -604,7 +603,7 @@ :returns: :class:`ClusterPermissions` - .. py:method:: list( [, can_use_client: Optional[str]]) -> Iterator[ClusterDetails] + .. py:method:: list( [, filter_by: Optional[ListClustersFilterBy], page_size: Optional[int], page_token: Optional[str], sort_by: Optional[ListClustersSortBy]]) -> Iterator[ClusterDetails] Usage: @@ -618,21 +617,21 @@ all = w.clusters.list(compute.ListClustersRequest()) - List all clusters. + List clusters. - Return information about all pinned clusters, active clusters, up to 200 of the most recently - terminated all-purpose clusters in the past 30 days, and up to 30 of the most recently terminated job - clusters in the past 30 days. + Return information about all pinned and active clusters, and all clusters terminated within the last + 30 days. Clusters terminated prior to this period are not included. - For example, if there is 1 pinned cluster, 4 active clusters, 45 terminated all-purpose clusters in - the past 30 days, and 50 terminated job clusters in the past 30 days, then this API returns the 1 - pinned cluster, 4 active clusters, all 45 terminated all-purpose clusters, and the 30 most recently - terminated job clusters. - - :param can_use_client: str (optional) - Filter clusters based on what type of client it can be used for. Could be either NOTEBOOKS or JOBS. - No input for this field will get all clusters in the workspace without filtering on its supported - client + :param filter_by: :class:`ListClustersFilterBy` (optional) + Filters to apply to the list of clusters. + :param page_size: int (optional) + Use this field to specify the maximum number of results to be returned by the server. The server may + further constrain the maximum number of results returned in a single page. + :param page_token: str (optional) + Use next_page_token or prev_page_token returned from the previous request to list the next or + previous page of clusters respectively. + :param sort_by: :class:`ListClustersSortBy` (optional) + Sort the list of clusters by a specific criteria. :returns: Iterator over :class:`ClusterDetails` @@ -1000,6 +999,37 @@ + .. py:method:: update(cluster_id: str, update_mask: str [, cluster: Optional[UpdateClusterResource]]) -> Wait[ClusterDetails] + + Update cluster configuration (partial). + + Updates the configuration of a cluster to match the partial set of attributes and size. Denote which + fields to update using the `update_mask` field in the request body. A cluster can be updated if it is + in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a `RUNNING` state, it will be + restarted so that the new attributes can take effect. If a cluster is updated while in a `TERMINATED` + state, it will remain `TERMINATED`. The updated attributes will take effect the next time the cluster + is started using the `clusters/start` API. Attempts to update a cluster in any other state will be + rejected with an `INVALID_STATE` error code. Clusters created by the Databricks Jobs service cannot be + updated. + + :param cluster_id: str + ID of the cluster. + :param update_mask: str + Specifies which fields of the cluster will be updated. This is required in the POST request. The + update mask should be supplied as a single string. To specify multiple fields, separate them with + commas (no spaces). To delete a field from a cluster configuration, add it to the `update_mask` + string but omit it from the `cluster` object. + :param cluster: :class:`UpdateClusterResource` (optional) + The cluster to be updated. + + :returns: + Long-running operation waiter for :class:`ClusterDetails`. + See :method:wait_get_cluster_running for more details. + + + .. py:method:: update_and_wait(cluster_id: str, update_mask: str [, cluster: Optional[UpdateClusterResource], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails + + .. py:method:: update_permissions(cluster_id: str [, access_control_list: Optional[List[ClusterAccessControlRequest]]]) -> ClusterPermissions Update cluster permissions. diff --git a/docs/workspace/compute/command_execution.rst b/docs/workspace/compute/command_execution.rst index a5b94b5a5..916a48ba5 100644 --- a/docs/workspace/compute/command_execution.rst +++ b/docs/workspace/compute/command_execution.rst @@ -4,7 +4,8 @@ .. py:class:: CommandExecutionAPI - This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters. + This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters. This API + only supports (classic) all-purpose clusters. Serverless compute is not supported. .. py:method:: cancel( [, cluster_id: Optional[str], command_id: Optional[str], context_id: Optional[str]]) -> Wait[CommandStatusResponse] diff --git a/docs/workspace/compute/policy_families.rst b/docs/workspace/compute/policy_families.rst index 43194ef01..56e4f4275 100644 --- a/docs/workspace/compute/policy_families.rst +++ b/docs/workspace/compute/policy_families.rst @@ -14,7 +14,7 @@ policy family. Cluster policies created using a policy family inherit the policy family's policy definition. - .. py:method:: get(policy_family_id: str) -> PolicyFamily + .. py:method:: get(policy_family_id: str [, version: Optional[int]]) -> PolicyFamily Usage: @@ -32,9 +32,12 @@ Get policy family information. - Retrieve the information for an policy family based on its identifier. + Retrieve the information for an policy family based on its identifier and version :param policy_family_id: str + The family ID about which to retrieve information. + :param version: int (optional) + The version number for the family to fetch. Defaults to the latest version. :returns: :class:`PolicyFamily` @@ -55,10 +58,11 @@ List policy families. - Retrieve a list of policy families. This API is paginated. + Returns the list of policy definition types available to use at their latest version. This API is + paginated. :param max_results: int (optional) - The max number of policy families to return. + Maximum number of policy families to return. :param page_token: str (optional) A token that can be used to get the next page of results. diff --git a/docs/workspace/dashboards/genie.rst b/docs/workspace/dashboards/genie.rst new file mode 100644 index 000000000..5581870b9 --- /dev/null +++ b/docs/workspace/dashboards/genie.rst @@ -0,0 +1,102 @@ +``w.genie``: Genie +================== +.. currentmodule:: databricks.sdk.service.dashboards + +.. py:class:: GenieAPI + + Genie provides a no-code experience for business users, powered by AI/BI. Analysts set up spaces that + business users can use to ask questions using natural language. Genie uses data registered to Unity + Catalog and requires at least CAN USE permission on a Pro or Serverless SQL warehouse. Also, Databricks + Assistant must be enabled. + + .. py:method:: create_message(space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage] + + Create conversation message. + + Create new message in [conversation](:method:genie/startconversation). The AI response uses all + previously created messages in the conversation to respond. + + :param space_id: str + The ID associated with the Genie space where the conversation is started. + :param conversation_id: str + The ID associated with the conversation. + :param content: str + User message content. + + :returns: + Long-running operation waiter for :class:`GenieMessage`. + See :method:wait_get_message_genie_completed for more details. + + + .. py:method:: create_message_and_wait(space_id: str, conversation_id: str, content: str, timeout: datetime.timedelta = 0:20:00) -> GenieMessage + + + .. py:method:: execute_message_query(space_id: str, conversation_id: str, message_id: str) -> GenieGetMessageQueryResultResponse + + Execute SQL query in a conversation message. + + Execute the SQL query in the message. + + :param space_id: str + Genie space ID + :param conversation_id: str + Conversation ID + :param message_id: str + Message ID + + :returns: :class:`GenieGetMessageQueryResultResponse` + + + .. py:method:: get_message(space_id: str, conversation_id: str, message_id: str) -> GenieMessage + + Get conversation message. + + Get message from conversation. + + :param space_id: str + The ID associated with the Genie space where the target conversation is located. + :param conversation_id: str + The ID associated with the target conversation. + :param message_id: str + The ID associated with the target message from the identified conversation. + + :returns: :class:`GenieMessage` + + + .. py:method:: get_message_query_result(space_id: str, conversation_id: str, message_id: str) -> GenieGetMessageQueryResultResponse + + Get conversation message SQL query result. + + Get the result of SQL query if the message has a query attachment. This is only available if a message + has a query attachment and the message status is `EXECUTING_QUERY`. + + :param space_id: str + Genie space ID + :param conversation_id: str + Conversation ID + :param message_id: str + Message ID + + :returns: :class:`GenieGetMessageQueryResultResponse` + + + .. py:method:: start_conversation(space_id: str, content: str) -> Wait[GenieMessage] + + Start conversation. + + Start a new conversation. + + :param space_id: str + The ID associated with the Genie space where you want to start a conversation. + :param content: str + The text of the message that starts the conversation. + + :returns: + Long-running operation waiter for :class:`GenieMessage`. + See :method:wait_get_message_genie_completed for more details. + + + .. py:method:: start_conversation_and_wait(space_id: str, content: str, timeout: datetime.timedelta = 0:20:00) -> GenieMessage + + + .. py:method:: wait_get_message_genie_completed(conversation_id: str, message_id: str, space_id: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[GenieMessage], None]]) -> GenieMessage diff --git a/docs/workspace/dashboards/index.rst b/docs/workspace/dashboards/index.rst index 756c9b549..6d1565bb6 100644 --- a/docs/workspace/dashboards/index.rst +++ b/docs/workspace/dashboards/index.rst @@ -7,4 +7,5 @@ Manage Lakeview dashboards .. toctree:: :maxdepth: 1 + genie lakeview \ No newline at end of file diff --git a/docs/workspace/dashboards/lakeview.rst b/docs/workspace/dashboards/lakeview.rst index 17f82960e..d3257b79e 100644 --- a/docs/workspace/dashboards/lakeview.rst +++ b/docs/workspace/dashboards/lakeview.rst @@ -151,8 +151,7 @@ The flag to include dashboards located in the trash. If unspecified, only active dashboards will be returned. :param view: :class:`DashboardView` (optional) - Indicates whether to include all metadata from the dashboard in the response. If unset, the response - defaults to `DASHBOARD_VIEW_BASIC` which only includes summary metadata from the dashboard. + `DASHBOARD_VIEW_BASIC`only includes summary metadata from the dashboard. :returns: Iterator over :class:`Dashboard` diff --git a/docs/workspace/iam/permission_migration.rst b/docs/workspace/iam/permission_migration.rst index 16d15f734..8eef6e0e1 100644 --- a/docs/workspace/iam/permission_migration.rst +++ b/docs/workspace/iam/permission_migration.rst @@ -1,20 +1,17 @@ -``w.permission_migration``: Permission Migration -================================================ +``w.permission_migration``: PermissionMigration +=============================================== .. currentmodule:: databricks.sdk.service.iam .. py:class:: PermissionMigrationAPI - This spec contains undocumented permission migration APIs used in https://github.com/databrickslabs/ucx. + APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx - .. py:method:: migrate_permissions(workspace_id: int, from_workspace_group_name: str, to_account_group_name: str [, size: Optional[int]]) -> PermissionMigrationResponse + .. py:method:: migrate_permissions(workspace_id: int, from_workspace_group_name: str, to_account_group_name: str [, size: Optional[int]]) -> MigratePermissionsResponse Migrate Permissions. - Migrate a batch of permissions from a workspace local group to an account group. - :param workspace_id: int - WorkspaceId of the associated workspace where the permission migration will occur. Both workspace - group and account group must be in this workspace. + WorkspaceId of the associated workspace where the permission migration will occur. :param from_workspace_group_name: str The name of the workspace group that permissions will be migrated from. :param to_account_group_name: str @@ -22,5 +19,5 @@ :param size: int (optional) The maximum number of permissions that will be migrated. - :returns: :class:`PermissionMigrationResponse` + :returns: :class:`MigratePermissionsResponse` \ No newline at end of file diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst index 47ff4f37f..7deb9eafb 100644 --- a/docs/workspace/iam/permissions.rst +++ b/docs/workspace/iam/permissions.rst @@ -7,6 +7,8 @@ Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints. + * **[Apps permissions](:service:apps)** — Manage which users can manage or use apps. + * **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or attach to clusters. @@ -42,7 +44,7 @@ * **[Token permissions](:service:tokenmanagement)** — Manage which users can create or use tokens. * **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, edit, or - manage directories, files, and notebooks. + manage alerts, dbsql-dashboards, directories, files, notebooks and queries. For the mapping of the required permissions for specific actions or abilities and other important information, see [Access Control]. @@ -78,9 +80,9 @@ object. :param request_object_type: str - The type of the request object. Can be one of the following: authorization, clusters, - cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, - registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, authorization, clusters, + cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, + notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. @@ -155,9 +157,9 @@ object. :param request_object_type: str - The type of the request object. Can be one of the following: authorization, clusters, - cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, - registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, authorization, clusters, + cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, + notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) @@ -173,9 +175,9 @@ root object. :param request_object_type: str - The type of the request object. Can be one of the following: authorization, clusters, - cluster-policies, directories, experiments, files, instance-pools, jobs, notebooks, pipelines, - registered-models, repos, serving-endpoints, or warehouses. + The type of the request object. Can be one of the following: alerts, authorization, clusters, + cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, + notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) diff --git a/docs/workspace/index.rst b/docs/workspace/index.rst index 4d7eabff8..1b6c5708c 100644 --- a/docs/workspace/index.rst +++ b/docs/workspace/index.rst @@ -7,6 +7,7 @@ These APIs are available from WorkspaceClient .. toctree:: :maxdepth: 1 + apps/index catalog/index compute/index dashboards/index diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst index 773f6fb85..c07c8e28e 100644 --- a/docs/workspace/jobs/jobs.rst +++ b/docs/workspace/jobs/jobs.rst @@ -120,7 +120,7 @@ .. py:method:: cancel_run_and_wait(run_id: int, timeout: datetime.timedelta = 0:20:00) -> Run - .. py:method:: create( [, access_control_list: Optional[List[iam.AccessControlRequest]], continuous: Optional[Continuous], deployment: Optional[JobDeployment], description: Optional[str], edit_mode: Optional[JobEditMode], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], format: Optional[Format], git_source: Optional[GitSource], health: Optional[JobsHealthRules], job_clusters: Optional[List[JobCluster]], max_concurrent_runs: Optional[int], name: Optional[str], notification_settings: Optional[JobNotificationSettings], parameters: Optional[List[JobParameterDefinition]], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], schedule: Optional[CronSchedule], tags: Optional[Dict[str, str]], tasks: Optional[List[Task]], timeout_seconds: Optional[int], trigger: Optional[TriggerSettings], webhook_notifications: Optional[WebhookNotifications]]) -> CreateResponse + .. py:method:: create( [, access_control_list: Optional[List[JobAccessControlRequest]], continuous: Optional[Continuous], deployment: Optional[JobDeployment], description: Optional[str], edit_mode: Optional[JobEditMode], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], format: Optional[Format], git_source: Optional[GitSource], health: Optional[JobsHealthRules], job_clusters: Optional[List[JobCluster]], max_concurrent_runs: Optional[int], name: Optional[str], notification_settings: Optional[JobNotificationSettings], parameters: Optional[List[JobParameterDefinition]], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], schedule: Optional[CronSchedule], tags: Optional[Dict[str, str]], tasks: Optional[List[Task]], timeout_seconds: Optional[int], trigger: Optional[TriggerSettings], webhook_notifications: Optional[WebhookNotifications]]) -> CreateResponse Usage: @@ -156,7 +156,7 @@ Create a new job. - :param access_control_list: List[:class:`AccessControlRequest`] (optional) + :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) List of permissions to set on the job. :param continuous: :class:`Continuous` (optional) An optional continuous property for this job. The continuous property will ensure that there is @@ -164,7 +164,7 @@ :param deployment: :class:`JobDeployment` (optional) Deployment information for jobs managed by external sources. :param description: str (optional) - An optional description for the job. The maximum length is 1024 characters in UTF-8 encoding. + An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding. :param edit_mode: :class:`JobEditMode` (optional) Edit mode of the job. @@ -376,7 +376,7 @@ :returns: :class:`JobPermissions` - .. py:method:: get_run(run_id: int [, include_history: Optional[bool], include_resolved_values: Optional[bool]]) -> Run + .. py:method:: get_run(run_id: int [, include_history: Optional[bool], include_resolved_values: Optional[bool], page_token: Optional[str]]) -> Run Usage: @@ -418,6 +418,9 @@ Whether to include the repair history in the response. :param include_resolved_values: bool (optional) Whether to include resolved parameter values in the response. + :param page_token: str (optional) + To list the next page or the previous page of job tasks, set this field to the value of the + `next_page_token` or `prev_page_token` returned in the GetJob response. :returns: :class:`Run` @@ -924,7 +927,7 @@ :returns: :class:`JobPermissions` - .. py:method:: submit( [, access_control_list: Optional[List[iam.AccessControlRequest]], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications]]) -> Wait[Run] + .. py:method:: submit( [, access_control_list: Optional[List[JobAccessControlRequest]], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications]]) -> Wait[Run] Usage: @@ -960,7 +963,7 @@ Runs submitted using this endpoint don’t display in the UI. Use the `jobs/runs/get` API to check the run state after the job is submitted. - :param access_control_list: List[:class:`AccessControlRequest`] (optional) + :param access_control_list: List[:class:`JobAccessControlRequest`] (optional) List of permissions to set on the job. :param email_notifications: :class:`JobEmailNotifications` (optional) An optional set of email addresses notified when the run begins or completes. @@ -1011,7 +1014,7 @@ See :method:wait_get_run_job_terminated_or_skipped for more details. - .. py:method:: submit_and_wait( [, access_control_list: Optional[List[iam.AccessControlRequest]], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications], timeout: datetime.timedelta = 0:20:00]) -> Run + .. py:method:: submit_and_wait( [, access_control_list: Optional[List[JobAccessControlRequest]], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications], timeout: datetime.timedelta = 0:20:00]) -> Run .. py:method:: update(job_id: int [, fields_to_remove: Optional[List[str]], new_settings: Optional[JobSettings]]) diff --git a/docs/workspace/marketplace/consumer_listings.rst b/docs/workspace/marketplace/consumer_listings.rst index 654fe82d4..242a8fce7 100644 --- a/docs/workspace/marketplace/consumer_listings.rst +++ b/docs/workspace/marketplace/consumer_listings.rst @@ -29,7 +29,7 @@ :returns: :class:`GetListingResponse` - .. py:method:: list( [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_ascending: Optional[bool], is_free: Optional[bool], is_private_exchange: Optional[bool], is_staff_pick: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]], sort_by: Optional[SortBy], tags: Optional[List[ListingTag]]]) -> Iterator[Listing] + .. py:method:: list( [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_free: Optional[bool], is_private_exchange: Optional[bool], is_staff_pick: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]], tags: Optional[List[ListingTag]]]) -> Iterator[Listing] List listings. @@ -39,7 +39,6 @@ Matches any of the following asset types :param categories: List[:class:`Category`] (optional) Matches any of the following categories - :param is_ascending: bool (optional) :param is_free: bool (optional) Filters each listing based on if it is free. :param is_private_exchange: bool (optional) @@ -50,15 +49,13 @@ :param page_token: str (optional) :param provider_ids: List[str] (optional) Matches any of the following provider ids - :param sort_by: :class:`SortBy` (optional) - Criteria for sorting the resulting set of listings. :param tags: List[:class:`ListingTag`] (optional) Matches any of the following tags :returns: Iterator over :class:`Listing` - .. py:method:: search(query: str [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_ascending: Optional[bool], is_free: Optional[bool], is_private_exchange: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]], sort_by: Optional[SortBy]]) -> Iterator[Listing] + .. py:method:: search(query: str [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_free: Optional[bool], is_private_exchange: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]]]) -> Iterator[Listing] Search listings. @@ -71,14 +68,12 @@ Matches any of the following asset types :param categories: List[:class:`Category`] (optional) Matches any of the following categories - :param is_ascending: bool (optional) :param is_free: bool (optional) :param is_private_exchange: bool (optional) :param page_size: int (optional) :param page_token: str (optional) :param provider_ids: List[str] (optional) Matches any of the following provider ids - :param sort_by: :class:`SortBy` (optional) :returns: Iterator over :class:`Listing` \ No newline at end of file diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst index a80e7c799..ce98ac5d4 100644 --- a/docs/workspace/pipelines/pipelines.rst +++ b/docs/workspace/pipelines/pipelines.rst @@ -15,7 +15,7 @@ also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected data quality and specify how to handle records that fail those expectations. - .. py:method:: create( [, allow_duplicate_names: Optional[bool], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[ManagedIngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse + .. py:method:: create( [, allow_duplicate_names: Optional[bool], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse Usage: @@ -80,7 +80,7 @@ The definition of a gateway pipeline to support CDC. :param id: str (optional) Unique identifier for this pipeline. - :param ingestion_definition: :class:`ManagedIngestionPipelineDefinition` (optional) + :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional) The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'target' or 'catalog' settings. :param libraries: List[:class:`PipelineLibrary`] (optional) @@ -371,7 +371,7 @@ .. py:method:: stop_and_wait(pipeline_id: str, timeout: datetime.timedelta = 0:20:00) -> GetPipelineResponse - .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[ManagedIngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) + .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) Usage: @@ -452,7 +452,7 @@ The definition of a gateway pipeline to support CDC. :param id: str (optional) Unique identifier for this pipeline. - :param ingestion_definition: :class:`ManagedIngestionPipelineDefinition` (optional) + :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional) The configuration for a managed ingestion pipeline. These settings cannot be used with the 'libraries', 'target' or 'catalog' settings. :param libraries: List[:class:`PipelineLibrary`] (optional) diff --git a/docs/workspace/serving/index.rst b/docs/workspace/serving/index.rst index 1d0bdf7fc..7a39a4043 100644 --- a/docs/workspace/serving/index.rst +++ b/docs/workspace/serving/index.rst @@ -7,6 +7,5 @@ Use real-time inference for machine learning .. toctree:: :maxdepth: 1 - apps serving_endpoints serving_endpoints_data_plane \ No newline at end of file diff --git a/docs/workspace/settings/index.rst b/docs/workspace/settings/index.rst index 5b56652ec..d513ea9fd 100644 --- a/docs/workspace/settings/index.rst +++ b/docs/workspace/settings/index.rst @@ -9,6 +9,7 @@ Manage security settings for Accounts and Workspaces credentials_manager ip_access_lists + notification_destinations settings automatic_cluster_update compliance_security_profile diff --git a/docs/workspace/settings/notification_destinations.rst b/docs/workspace/settings/notification_destinations.rst new file mode 100644 index 000000000..29d947f55 --- /dev/null +++ b/docs/workspace/settings/notification_destinations.rst @@ -0,0 +1,74 @@ +``w.notification_destinations``: Notification Destinations +========================================================== +.. currentmodule:: databricks.sdk.service.settings + +.. py:class:: NotificationDestinationsAPI + + The notification destinations API lets you programmatically manage a workspace's notification + destinations. Notification destinations are used to send notifications for query alerts and jobs to + destinations outside of Databricks. Only workspace admins can create, update, and delete notification + destinations. + + .. py:method:: create( [, config: Optional[Config], display_name: Optional[str]]) -> NotificationDestination + + Create a notification destination. + + Creates a notification destination. Requires workspace admin permissions. + + :param config: :class:`Config` (optional) + The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. + :param display_name: str (optional) + The display name for the notification destination. + + :returns: :class:`NotificationDestination` + + + .. py:method:: delete(id: str) + + Delete a notification destination. + + Deletes a notification destination. Requires workspace admin permissions. + + :param id: str + + + + + .. py:method:: get(id: str) -> NotificationDestination + + Get a notification destination. + + Gets a notification destination. + + :param id: str + + :returns: :class:`NotificationDestination` + + + .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListNotificationDestinationsResult] + + List notification destinations. + + Lists notification destinations. + + :param page_size: int (optional) + :param page_token: str (optional) + + :returns: Iterator over :class:`ListNotificationDestinationsResult` + + + .. py:method:: update(id: str [, config: Optional[Config], display_name: Optional[str]]) -> NotificationDestination + + Update a notification destination. + + Updates a notification destination. Requires workspace admin permissions. At least one field is + required in the request body. + + :param id: str + :param config: :class:`Config` (optional) + The configuration for the notification destination. Must wrap EXACTLY one of the nested configs. + :param display_name: str (optional) + The display name for the notification destination. + + :returns: :class:`NotificationDestination` + \ No newline at end of file diff --git a/docs/workspace/sharing/providers.rst b/docs/workspace/sharing/providers.rst index 1382b5a92..7cf398ac0 100644 --- a/docs/workspace/sharing/providers.rst +++ b/docs/workspace/sharing/providers.rst @@ -100,7 +100,7 @@ :returns: :class:`ProviderInfo` - .. py:method:: list( [, data_provider_global_metastore_id: Optional[str]]) -> Iterator[ProviderInfo] + .. py:method:: list( [, data_provider_global_metastore_id: Optional[str], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderInfo] Usage: @@ -123,11 +123,21 @@ :param data_provider_global_metastore_id: str (optional) If not provided, all providers will be returned. If no providers exist with this ID, no results will be returned. + :param max_results: int (optional) + Maximum number of providers to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid providers are returned (not recommended). - Note: The + number of returned providers might be less than the specified max_results size, even zero. The only + definitive indication that no further providers can be fetched is when the next_page_token is unset + from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`ProviderInfo` - .. py:method:: list_shares(name: str) -> Iterator[ProviderShare] + .. py:method:: list_shares(name: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderShare] Usage: @@ -162,6 +172,16 @@ :param name: str Name of the provider in which to list shares. + :param max_results: int (optional) + Maximum number of shares to return. - when set to 0, the page length is set to a server configured + value (recommended); - when set to a value greater than 0, the page length is the minimum of this + value and a server configured value; - when set to a value less than 0, an invalid parameter error + is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of + returned shares might be less than the specified max_results size, even zero. The only definitive + indication that no further shares can be fetched is when the next_page_token is unset from the + response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`ProviderShare` diff --git a/docs/workspace/sharing/recipients.rst b/docs/workspace/sharing/recipients.rst index 86a004d36..44f2042bb 100644 --- a/docs/workspace/sharing/recipients.rst +++ b/docs/workspace/sharing/recipients.rst @@ -18,7 +18,7 @@ recipient follows the activation link to download the credential file, and then uses the credential file to establish a secure connection to receive the shared data. This sharing mode is called **open sharing**. - .. py:method:: create(name: str, authentication_type: AuthenticationType [, comment: Optional[str], data_recipient_global_metastore_id: Optional[str], ip_access_list: Optional[IpAccessList], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs], sharing_code: Optional[str]]) -> RecipientInfo + .. py:method:: create(name: str, authentication_type: AuthenticationType [, comment: Optional[str], data_recipient_global_metastore_id: Optional[str], expiration_time: Optional[int], ip_access_list: Optional[IpAccessList], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs], sharing_code: Optional[str]]) -> RecipientInfo Usage: @@ -51,6 +51,8 @@ The global Unity Catalog metastore id provided by the data recipient. This field is required when the __authentication_type__ is **DATABRICKS**. The identifier is of format __cloud__:__region__:__metastore-uuid__. + :param expiration_time: int (optional) + Expiration timestamp of the token, in epoch milliseconds. :param ip_access_list: :class:`IpAccessList` (optional) IP Access List :param owner: str (optional) @@ -108,7 +110,7 @@ :returns: :class:`RecipientInfo` - .. py:method:: list( [, data_recipient_global_metastore_id: Optional[str]]) -> Iterator[RecipientInfo] + .. py:method:: list( [, data_recipient_global_metastore_id: Optional[str], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[RecipientInfo] Usage: @@ -132,6 +134,16 @@ :param data_recipient_global_metastore_id: str (optional) If not provided, all recipients will be returned. If no recipients exist with this ID, no results will be returned. + :param max_results: int (optional) + Maximum number of recipients to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid recipients are returned (not recommended). - Note: The + number of returned recipients might be less than the specified max_results size, even zero. The only + definitive indication that no further recipients can be fetched is when the next_page_token is unset + from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: Iterator over :class:`RecipientInfo` @@ -171,7 +183,7 @@ :returns: :class:`RecipientInfo` - .. py:method:: share_permissions(name: str) -> GetRecipientSharePermissionsResponse + .. py:method:: share_permissions(name: str [, max_results: Optional[int], page_token: Optional[str]]) -> GetRecipientSharePermissionsResponse Usage: @@ -198,11 +210,21 @@ :param name: str The name of the Recipient. + :param max_results: int (optional) + Maximum number of permissions to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The + number of returned permissions might be less than the specified max_results size, even zero. The + only definitive indication that no further permissions can be fetched is when the next_page_token is + unset from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: :class:`GetRecipientSharePermissionsResponse` - .. py:method:: update(name: str [, comment: Optional[str], ip_access_list: Optional[IpAccessList], new_name: Optional[str], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs]]) + .. py:method:: update(name: str [, comment: Optional[str], expiration_time: Optional[int], ip_access_list: Optional[IpAccessList], new_name: Optional[str], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs]]) Usage: @@ -232,6 +254,8 @@ Name of the recipient. :param comment: str (optional) Description about the recipient. + :param expiration_time: int (optional) + Expiration timestamp of the token, in epoch milliseconds. :param ip_access_list: :class:`IpAccessList` (optional) IP Access List :param new_name: str (optional) diff --git a/docs/workspace/sharing/shares.rst b/docs/workspace/sharing/shares.rst index 82cdd4e6f..4d14b811d 100644 --- a/docs/workspace/sharing/shares.rst +++ b/docs/workspace/sharing/shares.rst @@ -87,7 +87,7 @@ :returns: :class:`ShareInfo` - .. py:method:: list() -> Iterator[ShareInfo] + .. py:method:: list( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ShareInfo] Usage: @@ -95,20 +95,32 @@ .. code-block:: from databricks.sdk import WorkspaceClient + from databricks.sdk.service import sharing w = WorkspaceClient() - all = w.shares.list() + all = w.shares.list(sharing.ListSharesRequest()) List shares. Gets an array of data object shares from the metastore. The caller must be a metastore admin or the owner of the share. There is no guarantee of a specific ordering of the elements in the array. + :param max_results: int (optional) + Maximum number of shares to return. - when set to 0, the page length is set to a server configured + value (recommended); - when set to a value greater than 0, the page length is the minimum of this + value and a server configured value; - when set to a value less than 0, an invalid parameter error + is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of + returned shares might be less than the specified max_results size, even zero. The only definitive + indication that no further shares can be fetched is when the next_page_token is unset from the + response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. + :returns: Iterator over :class:`ShareInfo` - .. py:method:: share_permissions(name: str) -> catalog.PermissionsList + .. py:method:: share_permissions(name: str [, max_results: Optional[int], page_token: Optional[str]]) -> catalog.PermissionsList Get permissions. @@ -117,6 +129,16 @@ :param name: str The name of the share. + :param max_results: int (optional) + Maximum number of permissions to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The + number of returned permissions might be less than the specified max_results size, even zero. The + only definitive indication that no further permissions can be fetched is when the next_page_token is + unset from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. :returns: :class:`PermissionsList` @@ -200,7 +222,7 @@ :returns: :class:`ShareInfo` - .. py:method:: update_permissions(name: str [, changes: Optional[List[catalog.PermissionsChange]]]) + .. py:method:: update_permissions(name: str [, changes: Optional[List[catalog.PermissionsChange]], max_results: Optional[int], page_token: Optional[str]]) Update permissions. @@ -214,6 +236,16 @@ The name of the share. :param changes: List[:class:`PermissionsChange`] (optional) Array of permission changes. + :param max_results: int (optional) + Maximum number of permissions to return. - when set to 0, the page length is set to a server + configured value (recommended); - when set to a value greater than 0, the page length is the minimum + of this value and a server configured value; - when set to a value less than 0, an invalid parameter + error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The + number of returned permissions might be less than the specified max_results size, even zero. The + only definitive indication that no further permissions can be fetched is when the next_page_token is + unset from the response. + :param page_token: str (optional) + Opaque pagination token to go to next page based on previous query. \ No newline at end of file diff --git a/docs/workspace/sql/alerts.rst b/docs/workspace/sql/alerts.rst index 26ae453a2..c552d5f80 100644 --- a/docs/workspace/sql/alerts.rst +++ b/docs/workspace/sql/alerts.rst @@ -8,12 +8,8 @@ periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - .. py:method:: create(name: str, options: AlertOptions, query_id: str [, parent: Optional[str], rearm: Optional[int]]) -> Alert + .. py:method:: create( [, alert: Optional[CreateAlertRequestAlert]]) -> Alert Usage: @@ -29,60 +25,48 @@ srcs = w.data_sources.list() - query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SELECT 1") - - alert = w.alerts.create(options=sql.AlertOptions(column="1", op="==", value="1"), - name=f'sdk-{time.time_ns()}', - query_id=query.id) + query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SELECT 1")) + + alert = w.alerts.create( + alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand( + column=sql.AlertOperandColumn(name="1")), + op=sql.AlertOperator.EQUAL, + threshold=sql.AlertConditionThreshold( + value=sql.AlertOperandValue( + double_value=1))), + display_name=f'sdk-{time.time_ns()}', + query_id=query.id)) # cleanup - w.queries.delete(query_id=query.id) - w.alerts.delete(alert_id=alert.id) + w.queries.delete(id=query.id) + w.alerts.delete(id=alert.id) Create an alert. - Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a - condition of its result, and notifies users or notification destinations if the condition was met. - - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + Creates an alert. - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - - :param name: str - Name of the alert. - :param options: :class:`AlertOptions` - Alert configuration options. - :param query_id: str - Query ID. - :param parent: str (optional) - The identifier of the workspace folder containing the object. - :param rearm: int (optional) - Number of seconds after being triggered before the alert rearms itself and can be triggered again. - If `null`, alert will never be triggered again. + :param alert: :class:`CreateAlertRequestAlert` (optional) :returns: :class:`Alert` - .. py:method:: delete(alert_id: str) + .. py:method:: delete(id: str) Delete an alert. - Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike - queries and dashboards, alerts cannot be moved to the trash. - - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and + can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently + deleted after 30 days. - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - - :param alert_id: str + :param id: str - .. py:method:: get(alert_id: str) -> Alert + .. py:method:: get(id: str) -> Alert Usage: @@ -98,35 +82,37 @@ srcs = w.data_sources.list() - query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SELECT 1") + query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SELECT 1")) - alert = w.alerts.create(options=sql.AlertOptions(column="1", op="==", value="1"), - name=f'sdk-{time.time_ns()}', - query_id=query.id) + alert = w.alerts.create( + alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand( + column=sql.AlertOperandColumn(name="1")), + op=sql.AlertOperator.EQUAL, + threshold=sql.AlertConditionThreshold( + value=sql.AlertOperandValue( + double_value=1))), + display_name=f'sdk-{time.time_ns()}', + query_id=query.id)) - by_id = w.alerts.get(alert_id=alert.id) + by_id = w.alerts.get(id=alert.id) # cleanup - w.queries.delete(query_id=query.id) - w.alerts.delete(alert_id=alert.id) + w.queries.delete(id=query.id) + w.alerts.delete(id=alert.id) Get an alert. Gets an alert. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - - :param alert_id: str + :param id: str :returns: :class:`Alert` - .. py:method:: list() -> Iterator[Alert] + .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListAlertsResponseAlert] Usage: @@ -134,23 +120,24 @@ .. code-block:: from databricks.sdk import WorkspaceClient + from databricks.sdk.service import sql w = WorkspaceClient() - all = w.alerts.list() + all = w.alerts.list(sql.ListAlertsRequest()) - Get alerts. + List alerts. - Gets a list of alerts. + Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API + concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + :param page_size: int (optional) + :param page_token: str (optional) - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - - :returns: Iterator over :class:`Alert` + :returns: Iterator over :class:`ListAlertsResponseAlert` - .. py:method:: update(alert_id: str, name: str, options: AlertOptions, query_id: str [, rearm: Optional[int]]) + .. py:method:: update(id: str, update_mask: str [, alert: Optional[UpdateAlertRequestAlert]]) -> Alert Usage: @@ -166,42 +153,39 @@ srcs = w.data_sources.list() - query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SELECT 1") - - alert = w.alerts.create(options=sql.AlertOptions(column="1", op="==", value="1"), - name=f'sdk-{time.time_ns()}', - query_id=query.id) - - w.alerts.update(options=sql.AlertOptions(column="1", op="==", value="1"), - alert_id=alert.id, - name=f'sdk-{time.time_ns()}', - query_id=query.id) + query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SELECT 1")) + + alert = w.alerts.create( + alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand( + column=sql.AlertOperandColumn(name="1")), + op=sql.AlertOperator.EQUAL, + threshold=sql.AlertConditionThreshold( + value=sql.AlertOperandValue( + double_value=1))), + display_name=f'sdk-{time.time_ns()}', + query_id=query.id)) + + _ = w.alerts.update(id=alert.id, + alert=sql.UpdateAlertRequestAlert(display_name=f'sdk-{time.time_ns()}'), + update_mask="display_name") # cleanup - w.queries.delete(query_id=query.id) - w.alerts.delete(alert_id=alert.id) + w.queries.delete(id=query.id) + w.alerts.delete(id=alert.id) Update an alert. Updates an alert. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - - :param alert_id: str - :param name: str - Name of the alert. - :param options: :class:`AlertOptions` - Alert configuration options. - :param query_id: str - Query ID. - :param rearm: int (optional) - Number of seconds after being triggered before the alert rearms itself and can be triggered again. - If `null`, alert will never be triggered again. - + :param id: str + :param update_mask: str + Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the + setting payload will be updated. The field mask needs to be supplied as single string. To specify + multiple fields in the field mask, use comma as the separator (no space). + :param alert: :class:`UpdateAlertRequestAlert` (optional) + :returns: :class:`Alert` \ No newline at end of file diff --git a/docs/workspace/sql/alerts_legacy.rst b/docs/workspace/sql/alerts_legacy.rst new file mode 100644 index 000000000..6dfd96128 --- /dev/null +++ b/docs/workspace/sql/alerts_legacy.rst @@ -0,0 +1,114 @@ +``w.alerts_legacy``: Alerts (legacy) +==================================== +.. currentmodule:: databricks.sdk.service.sql + +.. py:class:: AlertsLegacyAPI + + The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that + periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or + notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of + the Jobs API, e.g. :method:jobs/create. + + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn + more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + .. py:method:: create(name: str, options: AlertOptions, query_id: str [, parent: Optional[str], rearm: Optional[int]]) -> LegacyAlert + + Create an alert. + + Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a + condition of its result, and notifies users or notification destinations if the condition was met. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param name: str + Name of the alert. + :param options: :class:`AlertOptions` + Alert configuration options. + :param query_id: str + Query ID. + :param parent: str (optional) + The identifier of the workspace folder containing the object. + :param rearm: int (optional) + Number of seconds after being triggered before the alert rearms itself and can be triggered again. + If `null`, alert will never be triggered again. + + :returns: :class:`LegacyAlert` + + + .. py:method:: delete(alert_id: str) + + Delete an alert. + + Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike + queries and dashboards, alerts cannot be moved to the trash. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param alert_id: str + + + + + .. py:method:: get(alert_id: str) -> LegacyAlert + + Get an alert. + + Gets an alert. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param alert_id: str + + :returns: :class:`LegacyAlert` + + + .. py:method:: list() -> Iterator[LegacyAlert] + + Get alerts. + + Gets a list of alerts. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :returns: Iterator over :class:`LegacyAlert` + + + .. py:method:: update(alert_id: str, name: str, options: AlertOptions, query_id: str [, rearm: Optional[int]]) + + Update an alert. + + Updates an alert. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param alert_id: str + :param name: str + Name of the alert. + :param options: :class:`AlertOptions` + Alert configuration options. + :param query_id: str + Query ID. + :param rearm: int (optional) + Number of seconds after being triggered before the alert rearms itself and can be triggered again. + If `null`, alert will never be triggered again. + + + \ No newline at end of file diff --git a/docs/workspace/sql/data_sources.rst b/docs/workspace/sql/data_sources.rst index dcab75063..8f7321fa0 100644 --- a/docs/workspace/sql/data_sources.rst +++ b/docs/workspace/sql/data_sources.rst @@ -1,5 +1,5 @@ -``w.data_sources``: Data Sources -================================ +``w.data_sources``: Data Sources (legacy) +========================================= .. currentmodule:: databricks.sdk.service.sql .. py:class:: DataSourcesAPI @@ -12,9 +12,9 @@ advise you to use any text editor, REST client, or `grep` to search the response from this API for the name of your SQL warehouse as it appears in Databricks SQL. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + **Note**: A new version of the Databricks SQL API is now available. [Learn more] - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html .. py:method:: list() -> Iterator[DataSource] @@ -35,9 +35,10 @@ API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new queries against it. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + **Note**: A new version of the Databricks SQL API is now available. Please use :method:warehouses/list + instead. [Learn more] - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :returns: Iterator over :class:`DataSource` \ No newline at end of file diff --git a/docs/workspace/sql/dbsql_permissions.rst b/docs/workspace/sql/dbsql_permissions.rst index fbf1aac2c..7f9e5d19c 100644 --- a/docs/workspace/sql/dbsql_permissions.rst +++ b/docs/workspace/sql/dbsql_permissions.rst @@ -16,9 +16,9 @@ - `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`) - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + **Note**: A new version of the Databricks SQL API is now available. [Learn more] - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html .. py:method:: get(object_type: ObjectTypePlural, object_id: str) -> GetResponse @@ -26,9 +26,10 @@ Gets a JSON representation of the access control list (ACL) for a specified object. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:workspace/getpermissions instead. [Learn more] - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param object_type: :class:`ObjectTypePlural` The type of object permissions to check. @@ -45,9 +46,10 @@ Sets the access control list (ACL) for a specified object. This operation will complete rewrite the ACL. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:workspace/setpermissions instead. [Learn more] - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param object_type: :class:`ObjectTypePlural` The type of object permission to set. @@ -64,9 +66,10 @@ Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + **Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use + :method:queries/update and :method:alerts/update respectively instead. [Learn more] - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html :param object_type: :class:`OwnableObjectType` The type of object on which to change ownership. diff --git a/docs/workspace/sql/index.rst b/docs/workspace/sql/index.rst index 397de5c72..728730209 100644 --- a/docs/workspace/sql/index.rst +++ b/docs/workspace/sql/index.rst @@ -8,12 +8,15 @@ Manage Databricks SQL assets, including warehouses, dashboards, queries and quer :maxdepth: 1 alerts + alerts_legacy dashboard_widgets dashboards data_sources dbsql_permissions queries + queries_legacy query_history query_visualizations + query_visualizations_legacy statement_execution warehouses \ No newline at end of file diff --git a/docs/workspace/sql/queries.rst b/docs/workspace/sql/queries.rst index d26ff2ba9..1f01c2f1d 100644 --- a/docs/workspace/sql/queries.rst +++ b/docs/workspace/sql/queries.rst @@ -1,18 +1,14 @@ -``w.queries``: Queries / Results -================================ +``w.queries``: Queries +====================== .. currentmodule:: databricks.sdk.service.sql .. py:class:: QueriesAPI - These endpoints are used for CRUD operations on query definitions. Query definitions include the target - SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be + The queries API can be used to perform CRUD operations on queries. A query is a Databricks SQL object that + includes the target SQL warehouse, query text, name, description, tags, and parameters. Queries can be scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. - - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - .. py:method:: create( [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], parent: Optional[str], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> Query + .. py:method:: create( [, query: Optional[CreateQueryRequestQuery]]) -> Query Usage: @@ -22,76 +18,43 @@ import time from databricks.sdk import WorkspaceClient + from databricks.sdk.service import sql w = WorkspaceClient() srcs = w.data_sources.list() - query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SHOW TABLES") + query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SHOW TABLES")) # cleanup - w.queries.delete(query_id=query.id) - - Create a new query definition. - - Creates a new query definition. Queries created with this endpoint belong to the authenticated user - making the request. - - The `data_source_id` field specifies the ID of the SQL warehouse to run this query against. You can - use the Data Sources API to see a complete list of available SQL warehouses. Or you can copy the - `data_source_id` from an existing query. - - **Note**: You cannot add a visualization until you create the query. - - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - - :param data_source_id: str (optional) - Data source ID maps to the ID of the data source used by the resource and is distinct from the - warehouse ID. [Learn more] - - [Learn more]: https://docs.databricks.com/api/workspace/datasources/list - :param description: str (optional) - General description that conveys additional information about this query such as usage notes. - :param name: str (optional) - The title of this query that appears in list views, widget headings, and on the query page. - :param options: Any (optional) - Exclusively used for storing a list parameter definitions. A parameter is an object with `title`, - `name`, `type`, and `value` properties. The `value` field here is the default value. It can be - overridden at runtime. - :param parent: str (optional) - The identifier of the workspace folder containing the object. - :param query: str (optional) - The text of the query to be run. - :param run_as_role: :class:`RunAsRole` (optional) - Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as - viewer" behavior) or `"owner"` (signifying "run as owner" behavior) - :param tags: List[str] (optional) + w.queries.delete(id=query.id) + + Create a query. + + Creates a query. + + :param query: :class:`CreateQueryRequestQuery` (optional) :returns: :class:`Query` - .. py:method:: delete(query_id: str) + .. py:method:: delete(id: str) Delete a query. Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and - they cannot be used for alerts. The trash is deleted after 30 days. - - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + cannot be used for alerts. You can restore a trashed query through the UI. A trashed query is + permanently deleted after 30 days. - :param query_id: str + :param id: str - .. py:method:: get(query_id: str) -> Query + .. py:method:: get(id: str) -> Query Usage: @@ -101,89 +64,58 @@ import time from databricks.sdk import WorkspaceClient + from databricks.sdk.service import sql w = WorkspaceClient() srcs = w.data_sources.list() - query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SHOW TABLES") + query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SHOW TABLES")) - by_id = w.queries.get(query_id=query.id) + by_id = w.queries.get(id=query.id) # cleanup - w.queries.delete(query_id=query.id) + w.queries.delete(id=query.id) - Get a query definition. + Get a query. - Retrieve a query object definition along with contextual permissions information about the currently - authenticated user. + Gets a query. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - - :param query_id: str + :param id: str :returns: :class:`Query` - .. py:method:: list( [, order: Optional[str], page: Optional[int], page_size: Optional[int], q: Optional[str]]) -> Iterator[Query] + .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListQueryObjectsResponseQuery] - Get a list of queries. - - Gets a list of queries. Optionally, this list can be filtered by a search term. - - **Warning**: Calling this API concurrently 10 or more times could result in throttling, service - degradation, or a temporary ban. + List queries. - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] + Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API + concurrently 10 or more times could result in throttling, service degradation, or a temporary ban. - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - - :param order: str (optional) - Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order - descending instead. - - - `name`: The name of the query. - - - `created_at`: The timestamp the query was created. - - - `runtime`: The time it took to run this query. This is blank for parameterized queries. A blank - value is treated as the highest value for sorting. - - - `executed_at`: The timestamp when the query was last run. - - - `created_by`: The user name of the user that created the query. - :param page: int (optional) - Page number to retrieve. :param page_size: int (optional) - Number of queries to return per page. - :param q: str (optional) - Full text search term + :param page_token: str (optional) - :returns: Iterator over :class:`Query` + :returns: Iterator over :class:`ListQueryObjectsResponseQuery` - .. py:method:: restore(query_id: str) + .. py:method:: list_visualizations(id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Visualization] - Restore a query. - - Restore a query that has been moved to the trash. A restored query appears in list views and searches. - You can use restored queries for alerts. - - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources + List visualizations on a query. - :param query_id: str + Gets a list of visualizations on a query. + :param id: str + :param page_size: int (optional) + :param page_token: str (optional) + :returns: Iterator over :class:`Visualization` - .. py:method:: update(query_id: str [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> Query + .. py:method:: update(id: str, update_mask: str [, query: Optional[UpdateQueryRequestQuery]]) -> Query Usage: @@ -193,55 +125,36 @@ import time from databricks.sdk import WorkspaceClient + from databricks.sdk.service import sql w = WorkspaceClient() srcs = w.data_sources.list() - query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SHOW TABLES") + query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SHOW TABLES")) - updated = w.queries.update(query_id=query.id, - name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="UPDATED: test query from Go SDK", - query="SELECT 2+2") + updated = w.queries.update(id=query.id, + query=sql.UpdateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + description="UPDATED: test query from Go SDK", + query_text="SELECT 2+2"), + update_mask="display_name,description,query_text") # cleanup - w.queries.delete(query_id=query.id) - - Change a query definition. - - Modify this query definition. - - **Note**: You cannot undo this operation. - - **Note**: A new version of the Databricks SQL API will soon be available. [Learn more] - - [Learn more]: https://docs.databricks.com/en/whats-coming.html#updates-to-the-databricks-sql-api-for-managing-queries-alerts-and-data-sources - - :param query_id: str - :param data_source_id: str (optional) - Data source ID maps to the ID of the data source used by the resource and is distinct from the - warehouse ID. [Learn more] - - [Learn more]: https://docs.databricks.com/api/workspace/datasources/list - :param description: str (optional) - General description that conveys additional information about this query such as usage notes. - :param name: str (optional) - The title of this query that appears in list views, widget headings, and on the query page. - :param options: Any (optional) - Exclusively used for storing a list parameter definitions. A parameter is an object with `title`, - `name`, `type`, and `value` properties. The `value` field here is the default value. It can be - overridden at runtime. - :param query: str (optional) - The text of the query to be run. - :param run_as_role: :class:`RunAsRole` (optional) - Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as - viewer" behavior) or `"owner"` (signifying "run as owner" behavior) - :param tags: List[str] (optional) + w.queries.delete(id=query.id) + + Update a query. + + Updates a query. + + :param id: str + :param update_mask: str + Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the + setting payload will be updated. The field mask needs to be supplied as single string. To specify + multiple fields in the field mask, use comma as the separator (no space). + :param query: :class:`UpdateQueryRequestQuery` (optional) :returns: :class:`Query` \ No newline at end of file diff --git a/docs/workspace/sql/queries_legacy.rst b/docs/workspace/sql/queries_legacy.rst new file mode 100644 index 000000000..a7ab56836 --- /dev/null +++ b/docs/workspace/sql/queries_legacy.rst @@ -0,0 +1,183 @@ +``w.queries_legacy``: Queries (legacy) +====================================== +.. currentmodule:: databricks.sdk.service.sql + +.. py:class:: QueriesLegacyAPI + + These endpoints are used for CRUD operations on query definitions. Query definitions include the target + SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be + scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create. + + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn + more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + .. py:method:: create( [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], parent: Optional[str], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> LegacyQuery + + Create a new query definition. + + Creates a new query definition. Queries created with this endpoint belong to the authenticated user + making the request. + + The `data_source_id` field specifies the ID of the SQL warehouse to run this query against. You can + use the Data Sources API to see a complete list of available SQL warehouses. Or you can copy the + `data_source_id` from an existing query. + + **Note**: You cannot add a visualization until you create the query. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/create + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param data_source_id: str (optional) + Data source ID maps to the ID of the data source used by the resource and is distinct from the + warehouse ID. [Learn more] + + [Learn more]: https://docs.databricks.com/api/workspace/datasources/list + :param description: str (optional) + General description that conveys additional information about this query such as usage notes. + :param name: str (optional) + The title of this query that appears in list views, widget headings, and on the query page. + :param options: Any (optional) + Exclusively used for storing a list parameter definitions. A parameter is an object with `title`, + `name`, `type`, and `value` properties. The `value` field here is the default value. It can be + overridden at runtime. + :param parent: str (optional) + The identifier of the workspace folder containing the object. + :param query: str (optional) + The text of the query to be run. + :param run_as_role: :class:`RunAsRole` (optional) + Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as + viewer" behavior) or `"owner"` (signifying "run as owner" behavior) + :param tags: List[str] (optional) + + :returns: :class:`LegacyQuery` + + + .. py:method:: delete(query_id: str) + + Delete a query. + + Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and + they cannot be used for alerts. The trash is deleted after 30 days. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param query_id: str + + + + + .. py:method:: get(query_id: str) -> LegacyQuery + + Get a query definition. + + Retrieve a query object definition along with contextual permissions information about the currently + authenticated user. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param query_id: str + + :returns: :class:`LegacyQuery` + + + .. py:method:: list( [, order: Optional[str], page: Optional[int], page_size: Optional[int], q: Optional[str]]) -> Iterator[LegacyQuery] + + Get a list of queries. + + Gets a list of queries. Optionally, this list can be filtered by a search term. + + **Warning**: Calling this API concurrently 10 or more times could result in throttling, service + degradation, or a temporary ban. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/list + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param order: str (optional) + Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order + descending instead. + + - `name`: The name of the query. + + - `created_at`: The timestamp the query was created. + + - `runtime`: The time it took to run this query. This is blank for parameterized queries. A blank + value is treated as the highest value for sorting. + + - `executed_at`: The timestamp when the query was last run. + + - `created_by`: The user name of the user that created the query. + :param page: int (optional) + Page number to retrieve. + :param page_size: int (optional) + Number of queries to return per page. + :param q: str (optional) + Full text search term + + :returns: Iterator over :class:`LegacyQuery` + + + .. py:method:: restore(query_id: str) + + Restore a query. + + Restore a query that has been moved to the trash. A restored query appears in list views and searches. + You can use restored queries for alerts. + + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. + [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param query_id: str + + + + + .. py:method:: update(query_id: str [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> LegacyQuery + + Change a query definition. + + Modify this query definition. + + **Note**: You cannot undo this operation. + + **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/update + instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param query_id: str + :param data_source_id: str (optional) + Data source ID maps to the ID of the data source used by the resource and is distinct from the + warehouse ID. [Learn more] + + [Learn more]: https://docs.databricks.com/api/workspace/datasources/list + :param description: str (optional) + General description that conveys additional information about this query such as usage notes. + :param name: str (optional) + The title of this query that appears in list views, widget headings, and on the query page. + :param options: Any (optional) + Exclusively used for storing a list parameter definitions. A parameter is an object with `title`, + `name`, `type`, and `value` properties. The `value` field here is the default value. It can be + overridden at runtime. + :param query: str (optional) + The text of the query to be run. + :param run_as_role: :class:`RunAsRole` (optional) + Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as + viewer" behavior) or `"owner"` (signifying "run as owner" behavior) + :param tags: List[str] (optional) + + :returns: :class:`LegacyQuery` + \ No newline at end of file diff --git a/docs/workspace/sql/query_history.rst b/docs/workspace/sql/query_history.rst index 6aacd3c78..5fa003c0e 100644 --- a/docs/workspace/sql/query_history.rst +++ b/docs/workspace/sql/query_history.rst @@ -4,9 +4,10 @@ .. py:class:: QueryHistoryAPI - Access the history of queries through SQL warehouses. + A service responsible for storing and retrieving the list of queries run against SQL endpoints, serverless + compute, and DLT. - .. py:method:: list( [, filter_by: Optional[QueryFilter], include_metrics: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[QueryInfo] + .. py:method:: list( [, filter_by: Optional[QueryFilter], max_results: Optional[int], page_token: Optional[str]]) -> ListQueriesResponse Usage: @@ -23,20 +24,20 @@ List Queries. - List the history of queries through SQL warehouses. + List the history of queries through SQL warehouses, serverless compute, and DLT. - You can filter by user ID, warehouse ID, status, and time range. + You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are + returned first (up to max_results in request). The pagination token returned in response can be used + to list subsequent query statuses. :param filter_by: :class:`QueryFilter` (optional) A filter to limit query history results. This field is optional. - :param include_metrics: bool (optional) - Whether to include metrics about query. :param max_results: int (optional) - Limit the number of results returned in one page. The default is 100. + Limit the number of results returned in one page. Must be less than 1000 and the default is 100. :param page_token: str (optional) A token that can be used to get the next page of results. The token can contains characters that need to be encoded before using it in a URL. For example, the character '+' needs to be replaced by - %2B. + %2B. This field is optional. - :returns: Iterator over :class:`QueryInfo` + :returns: :class:`ListQueriesResponse` \ No newline at end of file diff --git a/docs/workspace/sql/query_visualizations.rst b/docs/workspace/sql/query_visualizations.rst index 53888cee7..95095fb20 100644 --- a/docs/workspace/sql/query_visualizations.rst +++ b/docs/workspace/sql/query_visualizations.rst @@ -4,56 +4,43 @@ .. py:class:: QueryVisualizationsAPI - This is an evolving API that facilitates the addition and removal of vizualisations from existing queries - within the Databricks Workspace. Data structures may change over time. + This is an evolving API that facilitates the addition and removal of visualizations from existing queries + in the Databricks Workspace. Data structures can change over time. - .. py:method:: create(query_id: str, type: str, options: Any [, description: Optional[str], name: Optional[str]]) -> Visualization + .. py:method:: create( [, visualization: Optional[CreateVisualizationRequestVisualization]]) -> Visualization - Add visualization to a query. + Add a visualization to a query. - :param query_id: str - The identifier returned by :method:queries/create - :param type: str - The type of visualization: chart, table, pivot table, and so on. - :param options: Any - The options object varies widely from one visualization type to the next and is unsupported. - Databricks does not recommend modifying visualization settings in JSON. - :param description: str (optional) - A short description of this visualization. This is not displayed in the UI. - :param name: str (optional) - The name of the visualization that appears on dashboards and the query screen. + Adds a visualization to a query. + + :param visualization: :class:`CreateVisualizationRequestVisualization` (optional) :returns: :class:`Visualization` .. py:method:: delete(id: str) - Remove visualization. + Remove a visualization. + + Removes a visualization. :param id: str - Widget ID returned by :method:queryvizualisations/create - .. py:method:: update(id: str [, created_at: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[Query], type: Optional[str], updated_at: Optional[str]]) -> Visualization + .. py:method:: update(id: str, update_mask: str [, visualization: Optional[UpdateVisualizationRequestVisualization]]) -> Visualization - Edit existing visualization. + Update a visualization. + + Updates a visualization. :param id: str - The UUID for this visualization. - :param created_at: str (optional) - :param description: str (optional) - A short description of this visualization. This is not displayed in the UI. - :param name: str (optional) - The name of the visualization that appears on dashboards and the query screen. - :param options: Any (optional) - The options object varies widely from one visualization type to the next and is unsupported. - Databricks does not recommend modifying visualization settings in JSON. - :param query: :class:`Query` (optional) - :param type: str (optional) - The type of visualization: chart, table, pivot table, and so on. - :param updated_at: str (optional) + :param update_mask: str + Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the + setting payload will be updated. The field mask needs to be supplied as single string. To specify + multiple fields in the field mask, use comma as the separator (no space). + :param visualization: :class:`UpdateVisualizationRequestVisualization` (optional) :returns: :class:`Visualization` \ No newline at end of file diff --git a/docs/workspace/sql/query_visualizations_legacy.rst b/docs/workspace/sql/query_visualizations_legacy.rst new file mode 100644 index 000000000..f56f78a5f --- /dev/null +++ b/docs/workspace/sql/query_visualizations_legacy.rst @@ -0,0 +1,85 @@ +``w.query_visualizations_legacy``: Query Visualizations (legacy) +================================================================ +.. currentmodule:: databricks.sdk.service.sql + +.. py:class:: QueryVisualizationsLegacyAPI + + This is an evolving API that facilitates the addition and removal of vizualisations from existing queries + within the Databricks Workspace. Data structures may change over time. + + **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn + more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + .. py:method:: create(query_id: str, type: str, options: Any [, description: Optional[str], name: Optional[str]]) -> LegacyVisualization + + Add visualization to a query. + + Creates visualization in the query. + + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:queryvisualizations/create instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param query_id: str + The identifier returned by :method:queries/create + :param type: str + The type of visualization: chart, table, pivot table, and so on. + :param options: Any + The options object varies widely from one visualization type to the next and is unsupported. + Databricks does not recommend modifying visualization settings in JSON. + :param description: str (optional) + A short description of this visualization. This is not displayed in the UI. + :param name: str (optional) + The name of the visualization that appears on dashboards and the query screen. + + :returns: :class:`LegacyVisualization` + + + .. py:method:: delete(id: str) + + Remove visualization. + + Removes a visualization from the query. + + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:queryvisualizations/delete instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param id: str + Widget ID returned by :method:queryvizualisations/create + + + + + .. py:method:: update(id: str [, created_at: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[LegacyQuery], type: Optional[str], updated_at: Optional[str]]) -> LegacyVisualization + + Edit existing visualization. + + Updates visualization in the query. + + **Note**: A new version of the Databricks SQL API is now available. Please use + :method:queryvisualizations/update instead. [Learn more] + + [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html + + :param id: str + The UUID for this visualization. + :param created_at: str (optional) + :param description: str (optional) + A short description of this visualization. This is not displayed in the UI. + :param name: str (optional) + The name of the visualization that appears on dashboards and the query screen. + :param options: Any (optional) + The options object varies widely from one visualization type to the next and is unsupported. + Databricks does not recommend modifying visualization settings in JSON. + :param query: :class:`LegacyQuery` (optional) + :param type: str (optional) + The type of visualization: chart, table, pivot table, and so on. + :param updated_at: str (optional) + + :returns: :class:`LegacyVisualization` + \ No newline at end of file diff --git a/docs/workspace/sql/statement_execution.rst b/docs/workspace/sql/statement_execution.rst index 7914977c2..4d1337623 100644 --- a/docs/workspace/sql/statement_execution.rst +++ b/docs/workspace/sql/statement_execution.rst @@ -101,7 +101,7 @@ - .. py:method:: execute_statement(statement: str, warehouse_id: str [, byte_limit: Optional[int], catalog: Optional[str], disposition: Optional[Disposition], format: Optional[Format], on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout], parameters: Optional[List[StatementParameterListItem]], row_limit: Optional[int], schema: Optional[str], wait_timeout: Optional[str]]) -> ExecuteStatementResponse + .. py:method:: execute_statement(statement: str, warehouse_id: str [, byte_limit: Optional[int], catalog: Optional[str], disposition: Optional[Disposition], format: Optional[Format], on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout], parameters: Optional[List[StatementParameterListItem]], row_limit: Optional[int], schema: Optional[str], wait_timeout: Optional[str]]) -> StatementResponse Execute a SQL statement. @@ -122,26 +122,6 @@ [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html :param disposition: :class:`Disposition` (optional) - The fetch disposition provides two modes of fetching results: `INLINE` and `EXTERNAL_LINKS`. - - Statements executed with `INLINE` disposition will return result data inline, in `JSON_ARRAY` - format, in a series of chunks. If a given statement produces a result set with a size larger than 25 - MiB, that statement execution is aborted, and no result set will be available. - - **NOTE** Byte limits are computed based upon internal representations of the result set data, and - might not match the sizes visible in JSON responses. - - Statements executed with `EXTERNAL_LINKS` disposition will return result data as external links: - URLs that point to cloud storage internal to the workspace. Using `EXTERNAL_LINKS` disposition - allows statements to generate arbitrarily sized result sets for fetching up to 100 GiB. The - resulting links have two important properties: - - 1. They point to resources _external_ to the Databricks compute; therefore any associated - authentication information (typically a personal access token, OAuth token, or similar) _must be - removed_ when fetching from these links. - - 2. These are presigned URLs with a specific expiration, indicated in the response. The behavior when - attempting to use an expired link is cloud specific. :param format: :class:`Format` (optional) Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and `CSV`. @@ -229,10 +209,10 @@ the statement takes longer to execute, `on_wait_timeout` determines what should happen after the timeout is reached. - :returns: :class:`ExecuteStatementResponse` + :returns: :class:`StatementResponse` - .. py:method:: get_statement(statement_id: str) -> GetStatementResponse + .. py:method:: get_statement(statement_id: str) -> StatementResponse Get status, manifest, and result first chunk. @@ -248,7 +228,7 @@ The statement ID is returned upon successfully submitting a SQL statement, and is a required reference for all subsequent calls. - :returns: :class:`GetStatementResponse` + :returns: :class:`StatementResponse` .. py:method:: get_statement_result_chunk_n(statement_id: str, chunk_index: int) -> ResultData diff --git a/docs/workspace/sql/warehouses.rst b/docs/workspace/sql/warehouses.rst index 793852680..8a5da4302 100644 --- a/docs/workspace/sql/warehouses.rst +++ b/docs/workspace/sql/warehouses.rst @@ -17,13 +17,18 @@ import time from databricks.sdk import WorkspaceClient + from databricks.sdk.service import sql w = WorkspaceClient() - created = w.warehouses.create(name=f'sdk-{time.time_ns()}', - cluster_size="2X-Small", - max_num_clusters=1, - auto_stop_mins=10).result() + created = w.warehouses.create( + name=f'sdk-{time.time_ns()}', + cluster_size="2X-Small", + max_num_clusters=1, + auto_stop_mins=10, + tags=sql.EndpointTags( + custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com") + ])).result() # cleanup w.warehouses.delete(id=created.id) @@ -117,13 +122,18 @@ import time from databricks.sdk import WorkspaceClient + from databricks.sdk.service import sql w = WorkspaceClient() - created = w.warehouses.create(name=f'sdk-{time.time_ns()}', - cluster_size="2X-Small", - max_num_clusters=1, - auto_stop_mins=10).result() + created = w.warehouses.create( + name=f'sdk-{time.time_ns()}', + cluster_size="2X-Small", + max_num_clusters=1, + auto_stop_mins=10, + tags=sql.EndpointTags( + custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com") + ])).result() _ = w.warehouses.edit(id=created.id, name=f'sdk-{time.time_ns()}', @@ -213,13 +223,18 @@ import time from databricks.sdk import WorkspaceClient + from databricks.sdk.service import sql w = WorkspaceClient() - created = w.warehouses.create(name=f'sdk-{time.time_ns()}', - cluster_size="2X-Small", - max_num_clusters=1, - auto_stop_mins=10).result() + created = w.warehouses.create( + name=f'sdk-{time.time_ns()}', + cluster_size="2X-Small", + max_num_clusters=1, + auto_stop_mins=10, + tags=sql.EndpointTags( + custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com") + ])).result() wh = w.warehouses.get(id=created.id) diff --git a/examples/account/budgets/create_budgets.py b/examples/account/budgets/create_budgets.py index 12f20786a..030cc8a57 100755 --- a/examples/account/budgets/create_budgets.py +++ b/examples/account/budgets/create_budgets.py @@ -5,13 +5,26 @@ a = AccountClient() -created = a.budgets.create(budget=billing.Budget( - name=f'sdk-{time.time_ns()}', - filter="tag.tagName = 'all'", - period="1 month", - start_date="2022-01-01", - target_amount="100", - alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)])) +created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget( + display_name=f'sdk-{time.time_ns()}', + filter=billing.BudgetConfigurationFilter(tags=[ + billing.BudgetConfigurationFilterTagClause(key="tagName", + value=billing.BudgetConfigurationFilterClause( + operator=billing.BudgetConfigurationFilterOperator.IN, + values=["all"])) + ]), + alert_configurations=[ + billing.CreateBudgetConfigurationBudgetAlertConfigurations( + time_period=billing.AlertConfigurationTimePeriod.MONTH, + quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD, + trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED, + quantity_threshold="100", + action_configurations=[ + billing.CreateBudgetConfigurationBudgetActionConfigurations( + action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION, + target="admin@example.com") + ]) + ])) # cleanup -a.budgets.delete(budget_id=created.budget.budget_id) +a.budgets.delete(budget_id=created.budget.budget_configuration_id) diff --git a/examples/account/budgets/get_budgets.py b/examples/account/budgets/get_budgets.py index 8640fc974..9c2973110 100755 --- a/examples/account/budgets/get_budgets.py +++ b/examples/account/budgets/get_budgets.py @@ -5,15 +5,28 @@ a = AccountClient() -created = a.budgets.create(budget=billing.Budget( - name=f'sdk-{time.time_ns()}', - filter="tag.tagName = 'all'", - period="1 month", - start_date="2022-01-01", - target_amount="100", - alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)])) +created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget( + display_name=f'sdk-{time.time_ns()}', + filter=billing.BudgetConfigurationFilter(tags=[ + billing.BudgetConfigurationFilterTagClause(key="tagName", + value=billing.BudgetConfigurationFilterClause( + operator=billing.BudgetConfigurationFilterOperator.IN, + values=["all"])) + ]), + alert_configurations=[ + billing.CreateBudgetConfigurationBudgetAlertConfigurations( + time_period=billing.AlertConfigurationTimePeriod.MONTH, + quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD, + trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED, + quantity_threshold="100", + action_configurations=[ + billing.CreateBudgetConfigurationBudgetActionConfigurations( + action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION, + target="admin@example.com") + ]) + ])) -by_id = a.budgets.get(budget_id=created.budget.budget_id) +by_id = a.budgets.get(budget_id=created.budget.budget_configuration_id) # cleanup -a.budgets.delete(budget_id=created.budget.budget_id) +a.budgets.delete(budget_id=created.budget.budget_configuration_id) diff --git a/examples/account/budgets/list_budgets.py b/examples/account/budgets/list_budgets.py index 303690ab7..dd425dba4 100755 --- a/examples/account/budgets/list_budgets.py +++ b/examples/account/budgets/list_budgets.py @@ -1,5 +1,6 @@ from databricks.sdk import AccountClient +from databricks.sdk.service import billing a = AccountClient() -all = a.budgets.list() +all = a.budgets.list(billing.ListBudgetConfigurationsRequest()) diff --git a/examples/account/budgets/update_budgets.py b/examples/account/budgets/update_budgets.py index 1a0193b1d..e19630113 100755 --- a/examples/account/budgets/update_budgets.py +++ b/examples/account/budgets/update_budgets.py @@ -5,24 +5,49 @@ a = AccountClient() -created = a.budgets.create(budget=billing.Budget( - name=f'sdk-{time.time_ns()}', - filter="tag.tagName = 'all'", - period="1 month", - start_date="2022-01-01", - target_amount="100", - alerts=[billing.BudgetAlert(email_notifications=["admin@example.com"], min_percentage=50)])) +created = a.budgets.create(budget=billing.CreateBudgetConfigurationBudget( + display_name=f'sdk-{time.time_ns()}', + filter=billing.BudgetConfigurationFilter(tags=[ + billing.BudgetConfigurationFilterTagClause(key="tagName", + value=billing.BudgetConfigurationFilterClause( + operator=billing.BudgetConfigurationFilterOperator.IN, + values=["all"])) + ]), + alert_configurations=[ + billing.CreateBudgetConfigurationBudgetAlertConfigurations( + time_period=billing.AlertConfigurationTimePeriod.MONTH, + quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD, + trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED, + quantity_threshold="100", + action_configurations=[ + billing.CreateBudgetConfigurationBudgetActionConfigurations( + action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION, + target="admin@example.com") + ]) + ])) -a.budgets.update(budget_id=created.budget.budget_id, - budget=billing.Budget(name=f'sdk-{time.time_ns()}', - filter="tag.tagName = 'all'", - period="1 month", - start_date="2022-01-01", - target_amount="100", - alerts=[ - billing.BudgetAlert(email_notifications=["admin@example.com"], - min_percentage=70) - ])) +_ = a.budgets.update( + budget_id=created.budget.budget_configuration_id, + budget=billing.UpdateBudgetConfigurationBudget( + display_name=f'sdk-{time.time_ns()}', + filter=billing.BudgetConfigurationFilter(tags=[ + billing.BudgetConfigurationFilterTagClause( + key="tagName", + value=billing.BudgetConfigurationFilterClause( + operator=billing.BudgetConfigurationFilterOperator.IN, values=["all"])) + ]), + alert_configurations=[ + billing.AlertConfiguration( + time_period=billing.AlertConfigurationTimePeriod.MONTH, + quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD, + trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED, + quantity_threshold="50", + action_configurations=[ + billing.ActionConfiguration( + action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION, + target="admin@example.com") + ]) + ])) # cleanup -a.budgets.delete(budget_id=created.budget.budget_id) +a.budgets.delete(budget_id=created.budget.budget_configuration_id) diff --git a/examples/workspace/alerts/create_alerts.py b/examples/workspace/alerts/create_alerts.py index 72367ca88..bae1ecf45 100755 --- a/examples/workspace/alerts/create_alerts.py +++ b/examples/workspace/alerts/create_alerts.py @@ -7,15 +7,21 @@ srcs = w.data_sources.list() -query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SELECT 1") +query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SELECT 1")) -alert = w.alerts.create(options=sql.AlertOptions(column="1", op="==", value="1"), - name=f'sdk-{time.time_ns()}', - query_id=query.id) +alert = w.alerts.create( + alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand( + column=sql.AlertOperandColumn(name="1")), + op=sql.AlertOperator.EQUAL, + threshold=sql.AlertConditionThreshold( + value=sql.AlertOperandValue( + double_value=1))), + display_name=f'sdk-{time.time_ns()}', + query_id=query.id)) # cleanup -w.queries.delete(query_id=query.id) -w.alerts.delete(alert_id=alert.id) +w.queries.delete(id=query.id) +w.alerts.delete(id=alert.id) diff --git a/examples/workspace/alerts/get_alerts.py b/examples/workspace/alerts/get_alerts.py index 3c24e8566..a1a861b14 100755 --- a/examples/workspace/alerts/get_alerts.py +++ b/examples/workspace/alerts/get_alerts.py @@ -7,17 +7,23 @@ srcs = w.data_sources.list() -query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SELECT 1") +query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SELECT 1")) -alert = w.alerts.create(options=sql.AlertOptions(column="1", op="==", value="1"), - name=f'sdk-{time.time_ns()}', - query_id=query.id) +alert = w.alerts.create( + alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand( + column=sql.AlertOperandColumn(name="1")), + op=sql.AlertOperator.EQUAL, + threshold=sql.AlertConditionThreshold( + value=sql.AlertOperandValue( + double_value=1))), + display_name=f'sdk-{time.time_ns()}', + query_id=query.id)) -by_id = w.alerts.get(alert_id=alert.id) +by_id = w.alerts.get(id=alert.id) # cleanup -w.queries.delete(query_id=query.id) -w.alerts.delete(alert_id=alert.id) +w.queries.delete(id=query.id) +w.alerts.delete(id=alert.id) diff --git a/examples/workspace/alerts/list_alerts.py b/examples/workspace/alerts/list_alerts.py index 2009772c5..35e4ce0a2 100755 --- a/examples/workspace/alerts/list_alerts.py +++ b/examples/workspace/alerts/list_alerts.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import sql w = WorkspaceClient() -all = w.alerts.list() +all = w.alerts.list(sql.ListAlertsRequest()) diff --git a/examples/workspace/alerts/update_alerts.py b/examples/workspace/alerts/update_alerts.py index 130f71913..5d1827f9b 100755 --- a/examples/workspace/alerts/update_alerts.py +++ b/examples/workspace/alerts/update_alerts.py @@ -7,20 +7,25 @@ srcs = w.data_sources.list() -query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SELECT 1") +query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SELECT 1")) -alert = w.alerts.create(options=sql.AlertOptions(column="1", op="==", value="1"), - name=f'sdk-{time.time_ns()}', - query_id=query.id) +alert = w.alerts.create( + alert=sql.CreateAlertRequestAlert(condition=sql.AlertCondition(operand=sql.AlertConditionOperand( + column=sql.AlertOperandColumn(name="1")), + op=sql.AlertOperator.EQUAL, + threshold=sql.AlertConditionThreshold( + value=sql.AlertOperandValue( + double_value=1))), + display_name=f'sdk-{time.time_ns()}', + query_id=query.id)) -w.alerts.update(options=sql.AlertOptions(column="1", op="==", value="1"), - alert_id=alert.id, - name=f'sdk-{time.time_ns()}', - query_id=query.id) +_ = w.alerts.update(id=alert.id, + alert=sql.UpdateAlertRequestAlert(display_name=f'sdk-{time.time_ns()}'), + update_mask="display_name") # cleanup -w.queries.delete(query_id=query.id) -w.alerts.delete(alert_id=alert.id) +w.queries.delete(id=query.id) +w.alerts.delete(id=alert.id) diff --git a/examples/workspace/queries/create_alerts.py b/examples/workspace/queries/create_alerts.py index 37d71ac60..f0213aea9 100755 --- a/examples/workspace/queries/create_alerts.py +++ b/examples/workspace/queries/create_alerts.py @@ -1,15 +1,16 @@ import time from databricks.sdk import WorkspaceClient +from databricks.sdk.service import sql w = WorkspaceClient() srcs = w.data_sources.list() -query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SELECT 1") +query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SELECT 1")) # cleanup -w.queries.delete(query_id=query.id) +w.queries.delete(id=query.id) diff --git a/examples/workspace/queries/create_queries.py b/examples/workspace/queries/create_queries.py index c8d5ac93d..ce293d410 100755 --- a/examples/workspace/queries/create_queries.py +++ b/examples/workspace/queries/create_queries.py @@ -1,15 +1,16 @@ import time from databricks.sdk import WorkspaceClient +from databricks.sdk.service import sql w = WorkspaceClient() srcs = w.data_sources.list() -query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SHOW TABLES") +query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SHOW TABLES")) # cleanup -w.queries.delete(query_id=query.id) +w.queries.delete(id=query.id) diff --git a/examples/workspace/queries/get_queries.py b/examples/workspace/queries/get_queries.py index d29b75982..f1854d306 100755 --- a/examples/workspace/queries/get_queries.py +++ b/examples/workspace/queries/get_queries.py @@ -1,17 +1,18 @@ import time from databricks.sdk import WorkspaceClient +from databricks.sdk.service import sql w = WorkspaceClient() srcs = w.data_sources.list() -query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SHOW TABLES") +query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SHOW TABLES")) -by_id = w.queries.get(query_id=query.id) +by_id = w.queries.get(id=query.id) # cleanup -w.queries.delete(query_id=query.id) +w.queries.delete(id=query.id) diff --git a/examples/workspace/queries/update_queries.py b/examples/workspace/queries/update_queries.py index 85a9609ad..948d9a916 100755 --- a/examples/workspace/queries/update_queries.py +++ b/examples/workspace/queries/update_queries.py @@ -1,21 +1,22 @@ import time from databricks.sdk import WorkspaceClient +from databricks.sdk.service import sql w = WorkspaceClient() srcs = w.data_sources.list() -query = w.queries.create(name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="test query from Go SDK", - query="SHOW TABLES") +query = w.queries.create(query=sql.CreateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + warehouse_id=srcs[0].warehouse_id, + description="test query from Go SDK", + query_text="SHOW TABLES")) -updated = w.queries.update(query_id=query.id, - name=f'sdk-{time.time_ns()}', - data_source_id=srcs[0].id, - description="UPDATED: test query from Go SDK", - query="SELECT 2+2") +updated = w.queries.update(id=query.id, + query=sql.UpdateQueryRequestQuery(display_name=f'sdk-{time.time_ns()}', + description="UPDATED: test query from Go SDK", + query_text="SELECT 2+2"), + update_mask="display_name,description,query_text") # cleanup -w.queries.delete(query_id=query.id) +w.queries.delete(id=query.id) diff --git a/examples/workspace/shares/list_shares.py b/examples/workspace/shares/list_shares.py index b8668e7f7..d432854c6 100755 --- a/examples/workspace/shares/list_shares.py +++ b/examples/workspace/shares/list_shares.py @@ -1,5 +1,6 @@ from databricks.sdk import WorkspaceClient +from databricks.sdk.service import sharing w = WorkspaceClient() -all = w.shares.list() +all = w.shares.list(sharing.ListSharesRequest()) diff --git a/examples/workspace/warehouses/create_sql_warehouses.py b/examples/workspace/warehouses/create_sql_warehouses.py index 15e8f474c..f01b9d5f9 100755 --- a/examples/workspace/warehouses/create_sql_warehouses.py +++ b/examples/workspace/warehouses/create_sql_warehouses.py @@ -1,13 +1,18 @@ import time from databricks.sdk import WorkspaceClient +from databricks.sdk.service import sql w = WorkspaceClient() -created = w.warehouses.create(name=f'sdk-{time.time_ns()}', - cluster_size="2X-Small", - max_num_clusters=1, - auto_stop_mins=10).result() +created = w.warehouses.create( + name=f'sdk-{time.time_ns()}', + cluster_size="2X-Small", + max_num_clusters=1, + auto_stop_mins=10, + tags=sql.EndpointTags( + custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com") + ])).result() # cleanup w.warehouses.delete(id=created.id) diff --git a/examples/workspace/warehouses/edit_sql_warehouses.py b/examples/workspace/warehouses/edit_sql_warehouses.py index 0e3c8e8f0..acf06035a 100755 --- a/examples/workspace/warehouses/edit_sql_warehouses.py +++ b/examples/workspace/warehouses/edit_sql_warehouses.py @@ -1,13 +1,18 @@ import time from databricks.sdk import WorkspaceClient +from databricks.sdk.service import sql w = WorkspaceClient() -created = w.warehouses.create(name=f'sdk-{time.time_ns()}', - cluster_size="2X-Small", - max_num_clusters=1, - auto_stop_mins=10).result() +created = w.warehouses.create( + name=f'sdk-{time.time_ns()}', + cluster_size="2X-Small", + max_num_clusters=1, + auto_stop_mins=10, + tags=sql.EndpointTags( + custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com") + ])).result() _ = w.warehouses.edit(id=created.id, name=f'sdk-{time.time_ns()}', diff --git a/examples/workspace/warehouses/get_sql_warehouses.py b/examples/workspace/warehouses/get_sql_warehouses.py index 7b59844ca..9f8184ab7 100755 --- a/examples/workspace/warehouses/get_sql_warehouses.py +++ b/examples/workspace/warehouses/get_sql_warehouses.py @@ -1,13 +1,18 @@ import time from databricks.sdk import WorkspaceClient +from databricks.sdk.service import sql w = WorkspaceClient() -created = w.warehouses.create(name=f'sdk-{time.time_ns()}', - cluster_size="2X-Small", - max_num_clusters=1, - auto_stop_mins=10).result() +created = w.warehouses.create( + name=f'sdk-{time.time_ns()}', + cluster_size="2X-Small", + max_num_clusters=1, + auto_stop_mins=10, + tags=sql.EndpointTags( + custom_tags=[sql.EndpointTagPair(key="Owner", value="eng-dev-ecosystem-team_at_databricks.com") + ])).result() wh = w.warehouses.get(id=created.id) From c3d1db58cfd5a4f87f01943c94b2d8ff594b8350 Mon Sep 17 00:00:00 2001 From: Renaud Hartert Date: Thu, 15 Aug 2024 14:50:30 +0200 Subject: [PATCH 023/136] [Internal] Escape single quotes in regex matchers (#727) ## Changes This PR makes sure that single quotes are properly escaped when passing regex pattern to match errors. ## Tests Verified that SDK can properly be generated when the pattern contains single quotes. Note that `downstreams / compatibility (ucx, databrickslabs)` was already failing and that this PR should not affect downstream consumers. - [x] `make test` run locally - [x] `make fmt` applied - [x] relevant integration tests applied --- .codegen/error_overrides.py.tmpl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.codegen/error_overrides.py.tmpl b/.codegen/error_overrides.py.tmpl index 6bb85d6ca..adcfea555 100644 --- a/.codegen/error_overrides.py.tmpl +++ b/.codegen/error_overrides.py.tmpl @@ -11,9 +11,9 @@ _ALL_OVERRIDES = [ debug_name="{{.Name}}", path_regex=re.compile(r'{{.PathRegex}}'), verb="{{.Verb}}", - status_code_matcher=re.compile(r'{{.StatusCodeMatcher}}'), - error_code_matcher=re.compile(r'{{.ErrorCodeMatcher}}'), - message_matcher=re.compile(r'{{.MessageMatcher}}'), + status_code_matcher=re.compile(r'{{replaceAll "'" "\\'" .StatusCodeMatcher}}'), + error_code_matcher=re.compile(r'{{replaceAll "'" "\\'" .ErrorCodeMatcher}}'), + message_matcher=re.compile(r'{{replaceAll "'" "\\'" .MessageMatcher}}'), custom_error={{.OverrideErrorCode.PascalName}}, ), {{- end }} From 19fe05c91a7ab78d0c9606dee61faf9ea871917e Mon Sep 17 00:00:00 2001 From: Serge Smertin <259697+nfx@users.noreply.github.com> Date: Mon, 19 Aug 2024 11:39:22 +0200 Subject: [PATCH 024/136] [Fix] Fixed regression introduced in v0.30.0 causing `ValueError: Invalid semantic version: 0.33.1+420240816190912` (#729) ## Changes This PR fixes SemVer regex to follow the official recommendation to capture more patterns. It also ensures that patterns are both SemVer and PEP440 compliant. ## Tests - [x] `make test` run locally - [x] `make fmt` applied - [ ] relevant integration tests applied --- databricks/sdk/useragent.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/databricks/sdk/useragent.py b/databricks/sdk/useragent.py index df8761600..5b15d2822 100644 --- a/databricks/sdk/useragent.py +++ b/databricks/sdk/useragent.py @@ -21,7 +21,14 @@ # Precompiled regex patterns alphanum_pattern = re.compile(r'^[a-zA-Z0-9_.+-]+$') -semver_pattern = re.compile(r'^v?(\d+\.)?(\d+\.)?(\*|\d+)$') + +# official https://semver.org/ recommendation: https://regex101.com/r/Ly7O1x/ +# with addition of "x" wildcards for minor/patch versions. Also, patch version may be omitted. +semver_pattern = re.compile(r"^" + r"(?P0|[1-9]\d*)\.(?Px|0|[1-9]\d*)(\.(?Px|0|[1-9x]\d*))?" + r"(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)" + r"(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?" + r"(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$") def _match_alphanum(value): From fb30ed98912a4e62a4b535188765f792b7bc6f47 Mon Sep 17 00:00:00 2001 From: Renaud Hartert Date: Wed, 21 Aug 2024 14:42:11 +0200 Subject: [PATCH 025/136] [Release] Release v0.31.0 (#733) ### Bug Fixes * Fixed regression introduced in v0.30.0 causing `ValueError: Invalid semantic version: 0.33.1+420240816190912` ([#729](https://github.com/databricks/databricks-sdk-py/pull/729)). ### Internal Changes * Escape single quotes in regex matchers ([#727](https://github.com/databricks/databricks-sdk-py/pull/727)). ### API Changes: * Added [w.policy_compliance_for_clusters](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/policy_compliance_for_clusters.html) workspace-level service. * Added [w.policy_compliance_for_jobs](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/policy_compliance_for_jobs.html) workspace-level service. * Added [w.resource_quotas](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/resource_quotas.html) workspace-level service. * Added `databricks.sdk.service.catalog.GetQuotaRequest`, `databricks.sdk.service.catalog.GetQuotaResponse`, `databricks.sdk.service.catalog.ListQuotasRequest`, `databricks.sdk.service.catalog.ListQuotasResponse` and `databricks.sdk.service.catalog.QuotaInfo` dataclasses. * Added `databricks.sdk.service.compute.ClusterCompliance`, `databricks.sdk.service.compute.ClusterSettingsChange`, `databricks.sdk.service.compute.EnforceClusterComplianceRequest`, `databricks.sdk.service.compute.EnforceClusterComplianceResponse`, `databricks.sdk.service.compute.GetClusterComplianceRequest`, `databricks.sdk.service.compute.GetClusterComplianceResponse`, `databricks.sdk.service.compute.ListClusterCompliancesRequest` and `databricks.sdk.service.compute.ListClusterCompliancesResponse` dataclasses. * Added `databricks.sdk.service.jobs.EnforcePolicyComplianceForJobResponseJobClusterSettingsChange`, `databricks.sdk.service.jobs.EnforcePolicyComplianceRequest`, `databricks.sdk.service.jobs.EnforcePolicyComplianceResponse`, `databricks.sdk.service.jobs.GetPolicyComplianceRequest`, `databricks.sdk.service.jobs.GetPolicyComplianceResponse`, `databricks.sdk.service.jobs.JobCompliance`, `databricks.sdk.service.jobs.ListJobComplianceForPolicyResponse` and `databricks.sdk.service.jobs.ListJobComplianceRequest` dataclasses. * Added `fallback` field for `databricks.sdk.service.catalog.CreateExternalLocation`. * Added `fallback` field for `databricks.sdk.service.catalog.ExternalLocationInfo`. * Added `fallback` field for `databricks.sdk.service.catalog.UpdateExternalLocation`. * Added `job_run_id` field for `databricks.sdk.service.jobs.BaseRun`. * Added `job_run_id` field for `databricks.sdk.service.jobs.Run`. * Added `include_metrics` field for `databricks.sdk.service.sql.ListQueryHistoryRequest`. * Added `statement_ids` field for `databricks.sdk.service.sql.QueryFilter`. * Removed `databricks.sdk.service.sql.ContextFilter` dataclass. * Removed `context_filter` field for `databricks.sdk.service.sql.QueryFilter`. * Removed `pipeline_id` and `pipeline_update_id` fields for `databricks.sdk.service.sql.QuerySource`. OpenAPI SHA: 3eae49b444cac5a0118a3503e5b7ecef7f96527a, Date: 2024-08-21 --- .codegen/_openapi_sha | 2 +- CHANGELOG.md | 33 ++ databricks/sdk/__init__.py | 26 +- databricks/sdk/errors/overrides.py | 8 + databricks/sdk/errors/platform.py | 5 + databricks/sdk/service/catalog.py | 193 +++++++++++ databricks/sdk/service/compute.py | 272 ++++++++++++++++ databricks/sdk/service/dashboards.py | 33 +- databricks/sdk/service/jobs.py | 306 +++++++++++++++++- databricks/sdk/service/sql.py | 95 +----- databricks/sdk/version.py | 2 +- docs/dbdataclasses/catalog.rst | 27 ++ docs/dbdataclasses/compute.rst | 24 ++ docs/dbdataclasses/jobs.rst | 24 ++ docs/dbdataclasses/sql.rst | 4 - docs/workspace/catalog/external_locations.rst | 12 +- docs/workspace/catalog/index.rst | 1 + docs/workspace/catalog/resource_quotas.rst | 45 +++ docs/workspace/compute/index.rst | 1 + .../policy_compliance_for_clusters.rst | 71 ++++ docs/workspace/dashboards/lakeview.rst | 10 +- docs/workspace/jobs/index.rst | 3 +- .../jobs/policy_compliance_for_jobs.rst | 66 ++++ docs/workspace/sql/query_history.rst | 11 +- 24 files changed, 1152 insertions(+), 122 deletions(-) create mode 100644 docs/workspace/catalog/resource_quotas.rst create mode 100644 docs/workspace/compute/policy_compliance_for_clusters.rst create mode 100644 docs/workspace/jobs/policy_compliance_for_jobs.rst diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index fef6f268b..8b01a2422 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -f98c07f9c71f579de65d2587bb0292f83d10e55d \ No newline at end of file +3eae49b444cac5a0118a3503e5b7ecef7f96527a \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 278eec3e2..ee73d57f9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,38 @@ # Version changelog +## [Release] Release v0.31.0 + +### Bug Fixes + + * Fixed regression introduced in v0.30.0 causing `ValueError: Invalid semantic version: 0.33.1+420240816190912` ([#729](https://github.com/databricks/databricks-sdk-py/pull/729)). + + +### Internal Changes + + * Escape single quotes in regex matchers ([#727](https://github.com/databricks/databricks-sdk-py/pull/727)). + + +### API Changes: + + * Added [w.policy_compliance_for_clusters](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/policy_compliance_for_clusters.html) workspace-level service. + * Added [w.policy_compliance_for_jobs](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/policy_compliance_for_jobs.html) workspace-level service. + * Added [w.resource_quotas](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/resource_quotas.html) workspace-level service. + * Added `databricks.sdk.service.catalog.GetQuotaRequest`, `databricks.sdk.service.catalog.GetQuotaResponse`, `databricks.sdk.service.catalog.ListQuotasRequest`, `databricks.sdk.service.catalog.ListQuotasResponse` and `databricks.sdk.service.catalog.QuotaInfo` dataclasses. + * Added `databricks.sdk.service.compute.ClusterCompliance`, `databricks.sdk.service.compute.ClusterSettingsChange`, `databricks.sdk.service.compute.EnforceClusterComplianceRequest`, `databricks.sdk.service.compute.EnforceClusterComplianceResponse`, `databricks.sdk.service.compute.GetClusterComplianceRequest`, `databricks.sdk.service.compute.GetClusterComplianceResponse`, `databricks.sdk.service.compute.ListClusterCompliancesRequest` and `databricks.sdk.service.compute.ListClusterCompliancesResponse` dataclasses. + * Added `databricks.sdk.service.jobs.EnforcePolicyComplianceForJobResponseJobClusterSettingsChange`, `databricks.sdk.service.jobs.EnforcePolicyComplianceRequest`, `databricks.sdk.service.jobs.EnforcePolicyComplianceResponse`, `databricks.sdk.service.jobs.GetPolicyComplianceRequest`, `databricks.sdk.service.jobs.GetPolicyComplianceResponse`, `databricks.sdk.service.jobs.JobCompliance`, `databricks.sdk.service.jobs.ListJobComplianceForPolicyResponse` and `databricks.sdk.service.jobs.ListJobComplianceRequest` dataclasses. + * Added `fallback` field for `databricks.sdk.service.catalog.CreateExternalLocation`. + * Added `fallback` field for `databricks.sdk.service.catalog.ExternalLocationInfo`. + * Added `fallback` field for `databricks.sdk.service.catalog.UpdateExternalLocation`. + * Added `job_run_id` field for `databricks.sdk.service.jobs.BaseRun`. + * Added `job_run_id` field for `databricks.sdk.service.jobs.Run`. + * Added `include_metrics` field for `databricks.sdk.service.sql.ListQueryHistoryRequest`. + * Added `statement_ids` field for `databricks.sdk.service.sql.QueryFilter`. + * Removed `databricks.sdk.service.sql.ContextFilter` dataclass. + * Removed `context_filter` field for `databricks.sdk.service.sql.QueryFilter`. + * Removed `pipeline_id` and `pipeline_update_id` fields for `databricks.sdk.service.sql.QuerySource`. + +OpenAPI SHA: 3eae49b444cac5a0118a3503e5b7ecef7f96527a, Date: 2024-08-21 + ## [Release] Release v0.30.0 ### New Features and Improvements diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 48fe1beb6..50069c315 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -17,7 +17,8 @@ GrantsAPI, MetastoresAPI, ModelVersionsAPI, OnlineTablesAPI, QualityMonitorsAPI, - RegisteredModelsAPI, SchemasAPI, + RegisteredModelsAPI, + ResourceQuotasAPI, SchemasAPI, StorageCredentialsAPI, SystemSchemasAPI, TableConstraintsAPI, TablesAPI, @@ -27,6 +28,7 @@ GlobalInitScriptsAPI, InstancePoolsAPI, InstanceProfilesAPI, LibrariesAPI, + PolicyComplianceForClustersAPI, PolicyFamiliesAPI) from databricks.sdk.service.dashboards import GenieAPI, LakeviewAPI from databricks.sdk.service.files import DbfsAPI, FilesAPI @@ -38,7 +40,7 @@ GroupsAPI, PermissionMigrationAPI, PermissionsAPI, ServicePrincipalsAPI, UsersAPI, WorkspaceAssignmentAPI) -from databricks.sdk.service.jobs import JobsAPI +from databricks.sdk.service.jobs import JobsAPI, PolicyComplianceForJobsAPI from databricks.sdk.service.marketplace import ( ConsumerFulfillmentsAPI, ConsumerInstallationsAPI, ConsumerListingsAPI, ConsumerPersonalizationRequestsAPI, ConsumerProvidersAPI, @@ -214,6 +216,8 @@ def __init__(self, self._permission_migration = PermissionMigrationAPI(self._api_client) self._permissions = PermissionsAPI(self._api_client) self._pipelines = PipelinesAPI(self._api_client) + self._policy_compliance_for_clusters = PolicyComplianceForClustersAPI(self._api_client) + self._policy_compliance_for_jobs = PolicyComplianceForJobsAPI(self._api_client) self._policy_families = PolicyFamiliesAPI(self._api_client) self._provider_exchange_filters = ProviderExchangeFiltersAPI(self._api_client) self._provider_exchanges = ProviderExchangesAPI(self._api_client) @@ -234,6 +238,7 @@ def __init__(self, self._recipients = RecipientsAPI(self._api_client) self._registered_models = RegisteredModelsAPI(self._api_client) self._repos = ReposAPI(self._api_client) + self._resource_quotas = ResourceQuotasAPI(self._api_client) self._schemas = SchemasAPI(self._api_client) self._secrets = SecretsAPI(self._api_client) self._service_principals = ServicePrincipalsAPI(self._api_client) @@ -499,6 +504,16 @@ def pipelines(self) -> PipelinesAPI: """The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.""" return self._pipelines + @property + def policy_compliance_for_clusters(self) -> PolicyComplianceForClustersAPI: + """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace.""" + return self._policy_compliance_for_clusters + + @property + def policy_compliance_for_jobs(self) -> PolicyComplianceForJobsAPI: + """The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace.""" + return self._policy_compliance_for_jobs + @property def policy_families(self) -> PolicyFamiliesAPI: """View available policy families.""" @@ -561,7 +576,7 @@ def queries_legacy(self) -> QueriesLegacyAPI: @property def query_history(self) -> QueryHistoryAPI: - """A service responsible for storing and retrieving the list of queries run against SQL endpoints, serverless compute, and DLT.""" + """A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute.""" return self._query_history @property @@ -594,6 +609,11 @@ def repos(self) -> ReposAPI: """The Repos API allows users to manage their git repos.""" return self._repos + @property + def resource_quotas(self) -> ResourceQuotasAPI: + """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created.""" + return self._resource_quotas + @property def schemas(self) -> SchemasAPI: """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace.""" diff --git a/databricks/sdk/errors/overrides.py b/databricks/sdk/errors/overrides.py index 492b2caad..840bdcfcb 100644 --- a/databricks/sdk/errors/overrides.py +++ b/databricks/sdk/errors/overrides.py @@ -22,4 +22,12 @@ message_matcher=re.compile(r'Job .* does not exist'), custom_error=ResourceDoesNotExist, ), + _ErrorOverride(debug_name="Job Runs InvalidParameterValue=>ResourceDoesNotExist", + path_regex=re.compile(r'^/api/2\.\d/jobs/runs/get'), + verb="GET", + status_code_matcher=re.compile(r'^400$'), + error_code_matcher=re.compile(r'INVALID_PARAMETER_VALUE'), + message_matcher=re.compile(r'(Run .* does not exist|Run: .* in job: .* doesn\'t exist)'), + custom_error=ResourceDoesNotExist, + ), ] diff --git a/databricks/sdk/errors/platform.py b/databricks/sdk/errors/platform.py index df25fad4b..0d923a75c 100755 --- a/databricks/sdk/errors/platform.py +++ b/databricks/sdk/errors/platform.py @@ -47,6 +47,10 @@ class DeadlineExceeded(DatabricksError): """the deadline expired before the operation could complete""" +class InvalidState(BadRequest): + """unexpected state""" + + class InvalidParameterValue(BadRequest): """supplied value for a parameter was invalid""" @@ -99,6 +103,7 @@ class DataLoss(InternalError): } ERROR_CODE_MAPPING = { + 'INVALID_STATE': InvalidState, 'INVALID_PARAMETER_VALUE': InvalidParameterValue, 'RESOURCE_DOES_NOT_EXIST': ResourceDoesNotExist, 'ABORTED': Aborted, diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index 0e81d239f..5c3702daf 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -849,7 +849,10 @@ class ConnectionInfoSecurableKind(Enum): """Kind of connection securable.""" CONNECTION_BIGQUERY = 'CONNECTION_BIGQUERY' + CONNECTION_BUILTIN_HIVE_METASTORE = 'CONNECTION_BUILTIN_HIVE_METASTORE' CONNECTION_DATABRICKS = 'CONNECTION_DATABRICKS' + CONNECTION_EXTERNAL_HIVE_METASTORE = 'CONNECTION_EXTERNAL_HIVE_METASTORE' + CONNECTION_GLUE = 'CONNECTION_GLUE' CONNECTION_MYSQL = 'CONNECTION_MYSQL' CONNECTION_ONLINE_CATALOG = 'CONNECTION_ONLINE_CATALOG' CONNECTION_POSTGRESQL = 'CONNECTION_POSTGRESQL' @@ -864,6 +867,8 @@ class ConnectionType(Enum): BIGQUERY = 'BIGQUERY' DATABRICKS = 'DATABRICKS' + GLUE = 'GLUE' + HIVE_METASTORE = 'HIVE_METASTORE' MYSQL = 'MYSQL' POSTGRESQL = 'POSTGRESQL' REDSHIFT = 'REDSHIFT' @@ -1023,6 +1028,11 @@ class CreateExternalLocation: encryption_details: Optional[EncryptionDetails] = None """Encryption options that apply to clients connecting to cloud storage.""" + fallback: Optional[bool] = None + """Indicates whether fallback mode is enabled for this external location. When fallback mode is + enabled, the access to the location falls back to cluster credentials if UC credentials are not + sufficient.""" + read_only: Optional[bool] = None """Indicates whether the external location is read-only.""" @@ -1036,6 +1046,7 @@ def as_dict(self) -> dict: if self.comment is not None: body['comment'] = self.comment if self.credential_name is not None: body['credential_name'] = self.credential_name if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict() + if self.fallback is not None: body['fallback'] = self.fallback if self.name is not None: body['name'] = self.name if self.read_only is not None: body['read_only'] = self.read_only if self.skip_validation is not None: body['skip_validation'] = self.skip_validation @@ -1049,6 +1060,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateExternalLocation: comment=d.get('comment', None), credential_name=d.get('credential_name', None), encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails), + fallback=d.get('fallback', None), name=d.get('name', None), read_only=d.get('read_only', None), skip_validation=d.get('skip_validation', None), @@ -1974,6 +1986,11 @@ class ExternalLocationInfo: encryption_details: Optional[EncryptionDetails] = None """Encryption options that apply to clients connecting to cloud storage.""" + fallback: Optional[bool] = None + """Indicates whether fallback mode is enabled for this external location. When fallback mode is + enabled, the access to the location falls back to cluster credentials if UC credentials are not + sufficient.""" + isolation_mode: Optional[IsolationMode] = None """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" @@ -2009,6 +2026,7 @@ def as_dict(self) -> dict: if self.credential_id is not None: body['credential_id'] = self.credential_id if self.credential_name is not None: body['credential_name'] = self.credential_name if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict() + if self.fallback is not None: body['fallback'] = self.fallback if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value if self.metastore_id is not None: body['metastore_id'] = self.metastore_id if self.name is not None: body['name'] = self.name @@ -2030,6 +2048,7 @@ def from_dict(cls, d: Dict[str, any]) -> ExternalLocationInfo: credential_id=d.get('credential_id', None), credential_name=d.get('credential_name', None), encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails), + fallback=d.get('fallback', None), isolation_mode=_enum(d, 'isolation_mode', IsolationMode), metastore_id=d.get('metastore_id', None), name=d.get('name', None), @@ -2544,6 +2563,23 @@ class GetMetastoreSummaryResponseDeltaSharingScope(Enum): INTERNAL_AND_EXTERNAL = 'INTERNAL_AND_EXTERNAL' +@dataclass +class GetQuotaResponse: + quota_info: Optional[QuotaInfo] = None + """The returned QuotaInfo.""" + + def as_dict(self) -> dict: + """Serializes the GetQuotaResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.quota_info: body['quota_info'] = self.quota_info.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GetQuotaResponse: + """Deserializes the GetQuotaResponse from a dictionary.""" + return cls(quota_info=_from_dict(d, 'quota_info', QuotaInfo)) + + class IsolationMode(Enum): """Whether the current securable is accessible from all workspaces or a specific set of workspaces.""" @@ -2719,6 +2755,29 @@ def from_dict(cls, d: Dict[str, any]) -> ListModelVersionsResponse: next_page_token=d.get('next_page_token', None)) +@dataclass +class ListQuotasResponse: + next_page_token: Optional[str] = None + """Opaque token to retrieve the next page of results. Absent if there are no more pages. + __page_token__ should be set to this value for the next request.""" + + quotas: Optional[List[QuotaInfo]] = None + """An array of returned QuotaInfos.""" + + def as_dict(self) -> dict: + """Serializes the ListQuotasResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.quotas: body['quotas'] = [v.as_dict() for v in self.quotas] + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListQuotasResponse: + """Deserializes the ListQuotasResponse from a dictionary.""" + return cls(next_page_token=d.get('next_page_token', None), + quotas=_repeated_dict(d, 'quotas', QuotaInfo)) + + @dataclass class ListRegisteredModelsResponse: next_page_token: Optional[str] = None @@ -4048,6 +4107,49 @@ def from_dict(cls, d: Dict[str, any]) -> ProvisioningStatus: initial_pipeline_sync_progress=_from_dict(d, 'initial_pipeline_sync_progress', PipelineProgress)) +@dataclass +class QuotaInfo: + last_refreshed_at: Optional[int] = None + """The timestamp that indicates when the quota count was last updated.""" + + parent_full_name: Optional[str] = None + """Name of the parent resource. Returns metastore ID if the parent is a metastore.""" + + parent_securable_type: Optional[SecurableType] = None + """The quota parent securable type.""" + + quota_count: Optional[int] = None + """The current usage of the resource quota.""" + + quota_limit: Optional[int] = None + """The current limit of the resource quota.""" + + quota_name: Optional[str] = None + """The name of the quota.""" + + def as_dict(self) -> dict: + """Serializes the QuotaInfo into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.last_refreshed_at is not None: body['last_refreshed_at'] = self.last_refreshed_at + if self.parent_full_name is not None: body['parent_full_name'] = self.parent_full_name + if self.parent_securable_type is not None: + body['parent_securable_type'] = self.parent_securable_type.value + if self.quota_count is not None: body['quota_count'] = self.quota_count + if self.quota_limit is not None: body['quota_limit'] = self.quota_limit + if self.quota_name is not None: body['quota_name'] = self.quota_name + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> QuotaInfo: + """Deserializes the QuotaInfo from a dictionary.""" + return cls(last_refreshed_at=d.get('last_refreshed_at', None), + parent_full_name=d.get('parent_full_name', None), + parent_securable_type=_enum(d, 'parent_securable_type', SecurableType), + quota_count=d.get('quota_count', None), + quota_limit=d.get('quota_limit', None), + quota_name=d.get('quota_name', None)) + + @dataclass class RegisteredModelAlias: """Registered model alias.""" @@ -4969,6 +5071,11 @@ class UpdateExternalLocation: encryption_details: Optional[EncryptionDetails] = None """Encryption options that apply to clients connecting to cloud storage.""" + fallback: Optional[bool] = None + """Indicates whether fallback mode is enabled for this external location. When fallback mode is + enabled, the access to the location falls back to cluster credentials if UC credentials are not + sufficient.""" + force: Optional[bool] = None """Force update even if changing url invalidates dependent external tables or mounts.""" @@ -5000,6 +5107,7 @@ def as_dict(self) -> dict: if self.comment is not None: body['comment'] = self.comment if self.credential_name is not None: body['credential_name'] = self.credential_name if self.encryption_details: body['encryption_details'] = self.encryption_details.as_dict() + if self.fallback is not None: body['fallback'] = self.fallback if self.force is not None: body['force'] = self.force if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value if self.name is not None: body['name'] = self.name @@ -5017,6 +5125,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateExternalLocation: comment=d.get('comment', None), credential_name=d.get('credential_name', None), encryption_details=_from_dict(d, 'encryption_details', EncryptionDetails), + fallback=d.get('fallback', None), force=d.get('force', None), isolation_mode=_enum(d, 'isolation_mode', IsolationMode), name=d.get('name', None), @@ -6597,6 +6706,7 @@ def create(self, access_point: Optional[str] = None, comment: Optional[str] = None, encryption_details: Optional[EncryptionDetails] = None, + fallback: Optional[bool] = None, read_only: Optional[bool] = None, skip_validation: Optional[bool] = None) -> ExternalLocationInfo: """Create an external location. @@ -6617,6 +6727,10 @@ def create(self, User-provided free-form text description. :param encryption_details: :class:`EncryptionDetails` (optional) Encryption options that apply to clients connecting to cloud storage. + :param fallback: bool (optional) + Indicates whether fallback mode is enabled for this external location. When fallback mode is + enabled, the access to the location falls back to cluster credentials if UC credentials are not + sufficient. :param read_only: bool (optional) Indicates whether the external location is read-only. :param skip_validation: bool (optional) @@ -6629,6 +6743,7 @@ def create(self, if comment is not None: body['comment'] = comment if credential_name is not None: body['credential_name'] = credential_name if encryption_details is not None: body['encryption_details'] = encryption_details.as_dict() + if fallback is not None: body['fallback'] = fallback if name is not None: body['name'] = name if read_only is not None: body['read_only'] = read_only if skip_validation is not None: body['skip_validation'] = skip_validation @@ -6736,6 +6851,7 @@ def update(self, comment: Optional[str] = None, credential_name: Optional[str] = None, encryption_details: Optional[EncryptionDetails] = None, + fallback: Optional[bool] = None, force: Optional[bool] = None, isolation_mode: Optional[IsolationMode] = None, new_name: Optional[str] = None, @@ -6759,6 +6875,10 @@ def update(self, Name of the storage credential used with this location. :param encryption_details: :class:`EncryptionDetails` (optional) Encryption options that apply to clients connecting to cloud storage. + :param fallback: bool (optional) + Indicates whether fallback mode is enabled for this external location. When fallback mode is + enabled, the access to the location falls back to cluster credentials if UC credentials are not + sufficient. :param force: bool (optional) Force update even if changing url invalidates dependent external tables or mounts. :param isolation_mode: :class:`IsolationMode` (optional) @@ -6781,6 +6901,7 @@ def update(self, if comment is not None: body['comment'] = comment if credential_name is not None: body['credential_name'] = credential_name if encryption_details is not None: body['encryption_details'] = encryption_details.as_dict() + if fallback is not None: body['fallback'] = fallback if force is not None: body['force'] = force if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value if new_name is not None: body['new_name'] = new_name @@ -8178,6 +8299,78 @@ def update(self, return RegisteredModelInfo.from_dict(res) +class ResourceQuotasAPI: + """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that + can be created. Quotas are expressed in terms of a resource type and a parent (for example, tables per + metastore or schemas per catalog). The resource quota APIs enable you to monitor your current usage and + limits. For more information on resource quotas see the [Unity Catalog documentation]. + + [Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas""" + + def __init__(self, api_client): + self._api = api_client + + def get_quota(self, parent_securable_type: str, parent_full_name: str, + quota_name: str) -> GetQuotaResponse: + """Get information for a single resource quota. + + The GetQuota API returns usage information for a single resource quota, defined as a child-parent + pair. This API also refreshes the quota count if it is out of date. Refreshes are triggered + asynchronously. The updated count might not be returned in the first call. + + :param parent_securable_type: str + Securable type of the quota parent. + :param parent_full_name: str + Full name of the parent resource. Provide the metastore ID if the parent is a metastore. + :param quota_name: str + Name of the quota. Follows the pattern of the quota type, with "-quota" added as a suffix. + + :returns: :class:`GetQuotaResponse` + """ + + headers = {'Accept': 'application/json', } + + res = self._api.do( + 'GET', + f'/api/2.1/unity-catalog/resource-quotas/{parent_securable_type}/{parent_full_name}/{quota_name}', + headers=headers) + return GetQuotaResponse.from_dict(res) + + def list_quotas(self, + *, + max_results: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[QuotaInfo]: + """List all resource quotas under a metastore. + + ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the + counts returned. This API does not trigger a refresh of quota counts. + + :param max_results: int (optional) + The number of quotas to return. + :param page_token: str (optional) + Opaque token for the next page of results. + + :returns: Iterator over :class:`QuotaInfo` + """ + + query = {} + if max_results is not None: query['max_results'] = max_results + if page_token is not None: query['page_token'] = page_token + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', + '/api/2.1/unity-catalog/resource-quotas/all-resource-quotas', + query=query, + headers=headers) + if 'quotas' in json: + for v in json['quotas']: + yield QuotaInfo.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + class SchemasAPI: """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace. A schema organizes tables, views and functions. To access (or list) a table or view in a schema, users must have diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py index 148ce44e8..567518222 100755 --- a/databricks/sdk/service/compute.py +++ b/databricks/sdk/service/compute.py @@ -690,6 +690,35 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterAttributes: workload_type=_from_dict(d, 'workload_type', WorkloadType)) +@dataclass +class ClusterCompliance: + cluster_id: str + """Canonical unique identifier for a cluster.""" + + is_compliant: Optional[bool] = None + """Whether this cluster is in compliance with the latest version of its policy.""" + + violations: Optional[Dict[str, str]] = None + """An object containing key-value mappings representing the first 200 policy validation errors. The + keys indicate the path where the policy validation error is occurring. The values indicate an + error message describing the policy validation error.""" + + def as_dict(self) -> dict: + """Serializes the ClusterCompliance into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.is_compliant is not None: body['is_compliant'] = self.is_compliant + if self.violations: body['violations'] = self.violations + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ClusterCompliance: + """Deserializes the ClusterCompliance from a dictionary.""" + return cls(cluster_id=d.get('cluster_id', None), + is_compliant=d.get('is_compliant', None), + violations=d.get('violations', None)) + + @dataclass class ClusterDetails: autoscale: Optional[AutoScale] = None @@ -1377,6 +1406,40 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyPermissionsRequest: cluster_policy_id=d.get('cluster_policy_id', None)) +@dataclass +class ClusterSettingsChange: + """Represents a change to the cluster settings required for the cluster to become compliant with + its policy.""" + + field: Optional[str] = None + """The field where this change would be made.""" + + new_value: Optional[str] = None + """The new value of this field after enforcing policy compliance (either a number, a boolean, or a + string) converted to a string. This is intended to be read by a human. The typed new value of + this field can be retrieved by reading the settings field in the API response.""" + + previous_value: Optional[str] = None + """The previous value of this field before enforcing policy compliance (either a number, a boolean, + or a string) converted to a string. This is intended to be read by a human. The type of the + field can be retrieved by reading the settings field in the API response.""" + + def as_dict(self) -> dict: + """Serializes the ClusterSettingsChange into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.field is not None: body['field'] = self.field + if self.new_value is not None: body['new_value'] = self.new_value + if self.previous_value is not None: body['previous_value'] = self.previous_value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ClusterSettingsChange: + """Deserializes the ClusterSettingsChange from a dictionary.""" + return cls(field=d.get('field', None), + new_value=d.get('new_value', None), + previous_value=d.get('previous_value', None)) + + @dataclass class ClusterSize: autoscale: Optional[AutoScale] = None @@ -2982,6 +3045,52 @@ def from_dict(cls, d: Dict[str, any]) -> EditResponse: return cls() +@dataclass +class EnforceClusterComplianceRequest: + cluster_id: str + """The ID of the cluster you want to enforce policy compliance on.""" + + validate_only: Optional[bool] = None + """If set, previews the changes that would be made to a cluster to enforce compliance but does not + update the cluster.""" + + def as_dict(self) -> dict: + """Serializes the EnforceClusterComplianceRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.cluster_id is not None: body['cluster_id'] = self.cluster_id + if self.validate_only is not None: body['validate_only'] = self.validate_only + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EnforceClusterComplianceRequest: + """Deserializes the EnforceClusterComplianceRequest from a dictionary.""" + return cls(cluster_id=d.get('cluster_id', None), validate_only=d.get('validate_only', None)) + + +@dataclass +class EnforceClusterComplianceResponse: + changes: Optional[List[ClusterSettingsChange]] = None + """A list of changes that have been made to the cluster settings for the cluster to become + compliant with its policy.""" + + has_changes: Optional[bool] = None + """Whether any changes have been made to the cluster settings for the cluster to become compliant + with its policy.""" + + def as_dict(self) -> dict: + """Serializes the EnforceClusterComplianceResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.changes: body['changes'] = [v.as_dict() for v in self.changes] + if self.has_changes is not None: body['has_changes'] = self.has_changes + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EnforceClusterComplianceResponse: + """Deserializes the EnforceClusterComplianceResponse from a dictionary.""" + return cls(changes=_repeated_dict(d, 'changes', ClusterSettingsChange), + has_changes=d.get('has_changes', None)) + + @dataclass class Environment: """The environment entity used to preserve serverless environment side panel and jobs' environment @@ -3251,6 +3360,30 @@ def from_dict(cls, d: Dict[str, any]) -> GcsStorageInfo: return cls(destination=d.get('destination', None)) +@dataclass +class GetClusterComplianceResponse: + is_compliant: Optional[bool] = None + """Whether the cluster is compliant with its policy or not. Clusters could be out of compliance if + the policy was updated after the cluster was last edited.""" + + violations: Optional[Dict[str, str]] = None + """An object containing key-value mappings representing the first 200 policy validation errors. The + keys indicate the path where the policy validation error is occurring. The values indicate an + error message describing the policy validation error.""" + + def as_dict(self) -> dict: + """Serializes the GetClusterComplianceResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.is_compliant is not None: body['is_compliant'] = self.is_compliant + if self.violations: body['violations'] = self.violations + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GetClusterComplianceResponse: + """Deserializes the GetClusterComplianceResponse from a dictionary.""" + return cls(is_compliant=d.get('is_compliant', None), violations=d.get('violations', None)) + + @dataclass class GetClusterPermissionLevelsResponse: permission_levels: Optional[List[ClusterPermissionsDescription]] = None @@ -4600,6 +4733,35 @@ def from_dict(cls, d: Dict[str, any]) -> ListAvailableZonesResponse: return cls(default_zone=d.get('default_zone', None), zones=d.get('zones', None)) +@dataclass +class ListClusterCompliancesResponse: + clusters: Optional[List[ClusterCompliance]] = None + """A list of clusters and their policy compliance statuses.""" + + next_page_token: Optional[str] = None + """This field represents the pagination token to retrieve the next page of results. If the value is + "", it means no further results for the request.""" + + prev_page_token: Optional[str] = None + """This field represents the pagination token to retrieve the previous page of results. If the + value is "", it means no further results for the request.""" + + def as_dict(self) -> dict: + """Serializes the ListClusterCompliancesResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListClusterCompliancesResponse: + """Deserializes the ListClusterCompliancesResponse from a dictionary.""" + return cls(clusters=_repeated_dict(d, 'clusters', ClusterCompliance), + next_page_token=d.get('next_page_token', None), + prev_page_token=d.get('prev_page_token', None)) + + @dataclass class ListClustersFilterBy: cluster_sources: Optional[List[ClusterSource]] = None @@ -8584,6 +8746,116 @@ def uninstall(self, cluster_id: str, libraries: List[Library]): self._api.do('POST', '/api/2.0/libraries/uninstall', body=body, headers=headers) +class PolicyComplianceForClustersAPI: + """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your + workspace. + + A cluster is compliant with its policy if its configuration satisfies all its policy rules. Clusters could + be out of compliance if their policy was updated after the cluster was last edited. + + The get and list compliance APIs allow you to view the policy compliance status of a cluster. The enforce + compliance API allows you to update a cluster to be compliant with the current version of its policy.""" + + def __init__(self, api_client): + self._api = api_client + + def enforce_compliance(self, + cluster_id: str, + *, + validate_only: Optional[bool] = None) -> EnforceClusterComplianceResponse: + """Enforce cluster policy compliance. + + Updates a cluster to be compliant with the current version of its policy. A cluster can be updated if + it is in a `RUNNING` or `TERMINATED` state. + + If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes + can take effect. + + If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time the + cluster is started, the new attributes will take effect. + + Clusters created by the Databricks Jobs, DLT, or Models services cannot be enforced by this API. + Instead, use the "Enforce job policy compliance" API to enforce policy compliance on jobs. + + :param cluster_id: str + The ID of the cluster you want to enforce policy compliance on. + :param validate_only: bool (optional) + If set, previews the changes that would be made to a cluster to enforce compliance but does not + update the cluster. + + :returns: :class:`EnforceClusterComplianceResponse` + """ + body = {} + if cluster_id is not None: body['cluster_id'] = cluster_id + if validate_only is not None: body['validate_only'] = validate_only + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', + '/api/2.0/policies/clusters/enforce-compliance', + body=body, + headers=headers) + return EnforceClusterComplianceResponse.from_dict(res) + + def get_compliance(self, cluster_id: str) -> GetClusterComplianceResponse: + """Get cluster policy compliance. + + Returns the policy compliance status of a cluster. Clusters could be out of compliance if their policy + was updated after the cluster was last edited. + + :param cluster_id: str + The ID of the cluster to get the compliance status + + :returns: :class:`GetClusterComplianceResponse` + """ + + query = {} + if cluster_id is not None: query['cluster_id'] = cluster_id + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', '/api/2.0/policies/clusters/get-compliance', query=query, headers=headers) + return GetClusterComplianceResponse.from_dict(res) + + def list_compliance(self, + policy_id: str, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[ClusterCompliance]: + """List cluster policy compliance. + + Returns the policy compliance status of all clusters that use a given policy. Clusters could be out of + compliance if their policy was updated after the cluster was last edited. + + :param policy_id: str + Canonical unique identifier for the cluster policy. + :param page_size: int (optional) + Use this field to specify the maximum number of results to be returned by the server. The server may + further constrain the maximum number of results returned in a single page. + :param page_token: str (optional) + A page token that can be used to navigate to the next page or previous page as returned by + `next_page_token` or `prev_page_token`. + + :returns: Iterator over :class:`ClusterCompliance` + """ + + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + if policy_id is not None: query['policy_id'] = policy_id + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', + '/api/2.0/policies/clusters/list-compliance', + query=query, + headers=headers) + if 'clusters' in json: + for v in json['clusters']: + yield ClusterCompliance.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] + + class PolicyFamiliesAPI: """View available policy families. A policy family contains a policy definition providing best practices for configuring clusters for a particular use case. diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py index 28ddca569..7169531e5 100755 --- a/databricks/sdk/service/dashboards.py +++ b/databricks/sdk/service/dashboards.py @@ -27,10 +27,11 @@ class CreateDashboardRequest: parent_path: Optional[str] = None """The workspace path of the folder containing the dashboard. Includes leading slash and no - trailing slash.""" + trailing slash. This field is excluded in List Dashboards responses.""" serialized_dashboard: Optional[str] = None - """The contents of the dashboard in serialized string form.""" + """The contents of the dashboard in serialized string form. This field is excluded in List + Dashboards responses.""" warehouse_id: Optional[str] = None """The warehouse ID used to run the dashboard.""" @@ -154,23 +155,26 @@ class Dashboard: etag: Optional[str] = None """The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard - has not been modified since the last read.""" + has not been modified since the last read. This field is excluded in List Dashboards responses.""" lifecycle_state: Optional[LifecycleState] = None """The state of the dashboard resource. Used for tracking trashed status.""" parent_path: Optional[str] = None """The workspace path of the folder containing the dashboard. Includes leading slash and no - trailing slash.""" + trailing slash. This field is excluded in List Dashboards responses.""" path: Optional[str] = None - """The workspace path of the dashboard asset, including the file name.""" + """The workspace path of the dashboard asset, including the file name. This field is excluded in + List Dashboards responses.""" serialized_dashboard: Optional[str] = None - """The contents of the dashboard in serialized string form.""" + """The contents of the dashboard in serialized string form. This field is excluded in List + Dashboards responses.""" update_time: Optional[str] = None - """The timestamp of when the dashboard was last updated by the user.""" + """The timestamp of when the dashboard was last updated by the user. This field is excluded in List + Dashboards responses.""" warehouse_id: Optional[str] = None """The warehouse ID used to run the dashboard.""" @@ -1020,10 +1024,11 @@ class UpdateDashboardRequest: etag: Optional[str] = None """The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard - has not been modified since the last read.""" + has not been modified since the last read. This field is excluded in List Dashboards responses.""" serialized_dashboard: Optional[str] = None - """The contents of the dashboard in serialized string form.""" + """The contents of the dashboard in serialized string form. This field is excluded in List + Dashboards responses.""" warehouse_id: Optional[str] = None """The warehouse ID used to run the dashboard.""" @@ -1300,9 +1305,10 @@ def create(self, The display name of the dashboard. :param parent_path: str (optional) The workspace path of the folder containing the dashboard. Includes leading slash and no trailing - slash. + slash. This field is excluded in List Dashboards responses. :param serialized_dashboard: str (optional) - The contents of the dashboard in serialized string form. + The contents of the dashboard in serialized string form. This field is excluded in List Dashboards + responses. :param warehouse_id: str (optional) The warehouse ID used to run the dashboard. @@ -1714,9 +1720,10 @@ def update(self, The display name of the dashboard. :param etag: str (optional) The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard has - not been modified since the last read. + not been modified since the last read. This field is excluded in List Dashboards responses. :param serialized_dashboard: str (optional) - The contents of the dashboard in serialized string form. + The contents of the dashboard in serialized string form. This field is excluded in List Dashboards + responses. :param warehouse_id: str (optional) The warehouse ID used to run the dashboard. diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index 6e5b34ad1..ea1bfd880 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -58,8 +58,8 @@ def from_dict(cls, d: Dict[str, any]) -> BaseJob: class BaseRun: attempt_number: Optional[int] = None """The sequence number of this run attempt for a triggered job run. The initial attempt of a run - has an attempt_number of 0\. If the initial run attempt fails, and the job has a retry policy - (`max_retries` \> 0), subsequent runs are created with an `original_attempt_run_id` of the + has an attempt_number of 0. If the initial run attempt fails, and the job has a retry policy + (`max_retries` > 0), subsequent runs are created with an `original_attempt_run_id` of the original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job.""" @@ -115,6 +115,11 @@ class BaseRun: job_parameters: Optional[List[JobParameter]] = None """Job-level parameters used in the run""" + job_run_id: Optional[int] = None + """ID of the job run that this run belongs to. For legacy and single-task job runs the field is + populated with the job run ID. For task runs, the field is populated with the ID of the job run + that the task run belongs to.""" + number_in_job: Optional[int] = None """A unique identifier for this job run. This is set to the same value as `run_id`.""" @@ -201,6 +206,7 @@ def as_dict(self) -> dict: if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters] if self.job_id is not None: body['job_id'] = self.job_id if self.job_parameters: body['job_parameters'] = [v.as_dict() for v in self.job_parameters] + if self.job_run_id is not None: body['job_run_id'] = self.job_run_id if self.number_in_job is not None: body['number_in_job'] = self.number_in_job if self.original_attempt_run_id is not None: body['original_attempt_run_id'] = self.original_attempt_run_id @@ -236,6 +242,7 @@ def from_dict(cls, d: Dict[str, any]) -> BaseRun: job_clusters=_repeated_dict(d, 'job_clusters', JobCluster), job_id=d.get('job_id', None), job_parameters=_repeated_dict(d, 'job_parameters', JobParameter), + job_run_id=d.get('job_run_id', None), number_in_job=d.get('number_in_job', None), original_attempt_run_id=d.get('original_attempt_run_id', None), overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters), @@ -827,6 +834,96 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteRunResponse: return cls() +@dataclass +class EnforcePolicyComplianceForJobResponseJobClusterSettingsChange: + """Represents a change to the job cluster's settings that would be required for the job clusters to + become compliant with their policies.""" + + field: Optional[str] = None + """The field where this change would be made, prepended with the job cluster key.""" + + new_value: Optional[str] = None + """The new value of this field after enforcing policy compliance (either a number, a boolean, or a + string) converted to a string. This is intended to be read by a human. The typed new value of + this field can be retrieved by reading the settings field in the API response.""" + + previous_value: Optional[str] = None + """The previous value of this field before enforcing policy compliance (either a number, a boolean, + or a string) converted to a string. This is intended to be read by a human. The type of the + field can be retrieved by reading the settings field in the API response.""" + + def as_dict(self) -> dict: + """Serializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.field is not None: body['field'] = self.field + if self.new_value is not None: body['new_value'] = self.new_value + if self.previous_value is not None: body['previous_value'] = self.previous_value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceForJobResponseJobClusterSettingsChange: + """Deserializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange from a dictionary.""" + return cls(field=d.get('field', None), + new_value=d.get('new_value', None), + previous_value=d.get('previous_value', None)) + + +@dataclass +class EnforcePolicyComplianceRequest: + job_id: int + """The ID of the job you want to enforce policy compliance on.""" + + validate_only: Optional[bool] = None + """If set, previews changes made to the job to comply with its policy, but does not update the job.""" + + def as_dict(self) -> dict: + """Serializes the EnforcePolicyComplianceRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.job_id is not None: body['job_id'] = self.job_id + if self.validate_only is not None: body['validate_only'] = self.validate_only + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceRequest: + """Deserializes the EnforcePolicyComplianceRequest from a dictionary.""" + return cls(job_id=d.get('job_id', None), validate_only=d.get('validate_only', None)) + + +@dataclass +class EnforcePolicyComplianceResponse: + has_changes: Optional[bool] = None + """Whether any changes have been made to the job cluster settings for the job to become compliant + with its policies.""" + + job_cluster_changes: Optional[List[EnforcePolicyComplianceForJobResponseJobClusterSettingsChange]] = None + """A list of job cluster changes that have been made to the job’s cluster settings in order for + all job clusters to become compliant with their policies.""" + + settings: Optional[JobSettings] = None + """Updated job settings after policy enforcement. Policy enforcement only applies to job clusters + that are created when running the job (which are specified in new_cluster) and does not apply to + existing all-purpose clusters. Updated job settings are derived by applying policy default + values to the existing job clusters in order to satisfy policy requirements.""" + + def as_dict(self) -> dict: + """Serializes the EnforcePolicyComplianceResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.has_changes is not None: body['has_changes'] = self.has_changes + if self.job_cluster_changes: + body['job_cluster_changes'] = [v.as_dict() for v in self.job_cluster_changes] + if self.settings: body['settings'] = self.settings.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceResponse: + """Deserializes the EnforcePolicyComplianceResponse from a dictionary.""" + return cls(has_changes=d.get('has_changes', None), + job_cluster_changes=_repeated_dict( + d, 'job_cluster_changes', + EnforcePolicyComplianceForJobResponseJobClusterSettingsChange), + settings=_from_dict(d, 'settings', JobSettings)) + + @dataclass class ExportRunOutput: """Run was exported successfully.""" @@ -914,7 +1011,8 @@ class ForEachTask: """Configuration for the task that will be run for each element in the array""" concurrency: Optional[int] = None - """Controls the number of active iterations task runs. Default is 20, maximum allowed is 100.""" + """An optional maximum allowed number of concurrent runs of the task. Set this value if you want to + be able to execute multiple runs of the task concurrently.""" def as_dict(self) -> dict: """Serializes the ForEachTask into a dictionary suitable for use as a JSON request body.""" @@ -1024,6 +1122,32 @@ def from_dict(cls, d: Dict[str, any]) -> GetJobPermissionLevelsResponse: return cls(permission_levels=_repeated_dict(d, 'permission_levels', JobPermissionsDescription)) +@dataclass +class GetPolicyComplianceResponse: + is_compliant: Optional[bool] = None + """Whether the job is compliant with its policies or not. Jobs could be out of compliance if a + policy they are using was updated after the job was last edited and some of its job clusters no + longer comply with their updated policies.""" + + violations: Optional[Dict[str, str]] = None + """An object containing key-value mappings representing the first 200 policy validation errors. The + keys indicate the path where the policy validation error is occurring. An identifier for the job + cluster is prepended to the path. The values indicate an error message describing the policy + validation error.""" + + def as_dict(self) -> dict: + """Serializes the GetPolicyComplianceResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.is_compliant is not None: body['is_compliant'] = self.is_compliant + if self.violations: body['violations'] = self.violations + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> GetPolicyComplianceResponse: + """Deserializes the GetPolicyComplianceResponse from a dictionary.""" + return cls(is_compliant=d.get('is_compliant', None), violations=d.get('violations', None)) + + class GitProvider(Enum): AWS_CODE_COMMIT = 'awsCodeCommit' @@ -1260,6 +1384,36 @@ def from_dict(cls, d: Dict[str, any]) -> JobCluster: new_cluster=_from_dict(d, 'new_cluster', compute.ClusterSpec)) +@dataclass +class JobCompliance: + job_id: int + """Canonical unique identifier for a job.""" + + is_compliant: Optional[bool] = None + """Whether this job is in compliance with the latest version of its policy.""" + + violations: Optional[Dict[str, str]] = None + """An object containing key-value mappings representing the first 200 policy validation errors. The + keys indicate the path where the policy validation error is occurring. An identifier for the job + cluster is prepended to the path. The values indicate an error message describing the policy + validation error.""" + + def as_dict(self) -> dict: + """Serializes the JobCompliance into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.is_compliant is not None: body['is_compliant'] = self.is_compliant + if self.job_id is not None: body['job_id'] = self.job_id + if self.violations: body['violations'] = self.violations + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> JobCompliance: + """Deserializes the JobCompliance from a dictionary.""" + return cls(is_compliant=d.get('is_compliant', None), + job_id=d.get('job_id', None), + violations=d.get('violations', None)) + + @dataclass class JobDeployment: kind: JobDeploymentKind @@ -1874,6 +2028,35 @@ def from_dict(cls, d: Dict[str, any]) -> JobsHealthRules: return cls(rules=_repeated_dict(d, 'rules', JobsHealthRule)) +@dataclass +class ListJobComplianceForPolicyResponse: + jobs: Optional[List[JobCompliance]] = None + """A list of jobs and their policy compliance statuses.""" + + next_page_token: Optional[str] = None + """This field represents the pagination token to retrieve the next page of results. If this field + is not in the response, it means no further results for the request.""" + + prev_page_token: Optional[str] = None + """This field represents the pagination token to retrieve the previous page of results. If this + field is not in the response, it means no further results for the request.""" + + def as_dict(self) -> dict: + """Serializes the ListJobComplianceForPolicyResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.jobs: body['jobs'] = [v.as_dict() for v in self.jobs] + if self.next_page_token is not None: body['next_page_token'] = self.next_page_token + if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> ListJobComplianceForPolicyResponse: + """Deserializes the ListJobComplianceForPolicyResponse from a dictionary.""" + return cls(jobs=_repeated_dict(d, 'jobs', JobCompliance), + next_page_token=d.get('next_page_token', None), + prev_page_token=d.get('prev_page_token', None)) + + @dataclass class ListJobsResponse: """List of jobs was retrieved successfully.""" @@ -2568,8 +2751,8 @@ class Run: attempt_number: Optional[int] = None """The sequence number of this run attempt for a triggered job run. The initial attempt of a run - has an attempt_number of 0\. If the initial run attempt fails, and the job has a retry policy - (`max_retries` \> 0), subsequent runs are created with an `original_attempt_run_id` of the + has an attempt_number of 0. If the initial run attempt fails, and the job has a retry policy + (`max_retries` > 0), subsequent runs are created with an `original_attempt_run_id` of the original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job.""" @@ -2628,6 +2811,11 @@ class Run: job_parameters: Optional[List[JobParameter]] = None """Job-level parameters used in the run""" + job_run_id: Optional[int] = None + """ID of the job run that this run belongs to. For legacy and single-task job runs the field is + populated with the job run ID. For task runs, the field is populated with the ID of the job run + that the task run belongs to.""" + next_page_token: Optional[str] = None """A token that can be used to list the next page of sub-resources.""" @@ -2721,6 +2909,7 @@ def as_dict(self) -> dict: if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters] if self.job_id is not None: body['job_id'] = self.job_id if self.job_parameters: body['job_parameters'] = [v.as_dict() for v in self.job_parameters] + if self.job_run_id is not None: body['job_run_id'] = self.job_run_id if self.next_page_token is not None: body['next_page_token'] = self.next_page_token if self.number_in_job is not None: body['number_in_job'] = self.number_in_job if self.original_attempt_run_id is not None: @@ -2759,6 +2948,7 @@ def from_dict(cls, d: Dict[str, any]) -> Run: job_clusters=_repeated_dict(d, 'job_clusters', JobCluster), job_id=d.get('job_id', None), job_parameters=_repeated_dict(d, 'job_parameters', JobParameter), + job_run_id=d.get('job_run_id', None), next_page_token=d.get('next_page_token', None), number_in_job=d.get('number_in_job', None), original_attempt_run_id=d.get('original_attempt_run_id', None), @@ -2832,7 +3022,8 @@ class RunForEachTask: """Configuration for the task that will be run for each element in the array""" concurrency: Optional[int] = None - """Controls the number of active iterations task runs. Default is 20, maximum allowed is 100.""" + """An optional maximum allowed number of concurrent runs of the task. Set this value if you want to + be able to execute multiple runs of the task concurrently.""" stats: Optional[ForEachStats] = None """Read only field. Populated for GetRun and ListRuns RPC calls and stores the execution stats of @@ -3429,8 +3620,8 @@ class RunTask: attempt_number: Optional[int] = None """The sequence number of this run attempt for a triggered job run. The initial attempt of a run - has an attempt_number of 0\. If the initial run attempt fails, and the job has a retry policy - (`max_retries` \> 0), subsequent runs are created with an `original_attempt_run_id` of the + has an attempt_number of 0. If the initial run attempt fails, and the job has a retry policy + (`max_retries` > 0), subsequent runs are created with an `original_attempt_run_id` of the original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job.""" @@ -6127,3 +6318,102 @@ def update_permissions( res = self._api.do('PATCH', f'/api/2.0/permissions/jobs/{job_id}', body=body, headers=headers) return JobPermissions.from_dict(res) + + +class PolicyComplianceForJobsAPI: + """The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace. + This API currently only supports compliance controls for cluster policies. + + A job is in compliance if its cluster configurations satisfy the rules of all their respective cluster + policies. A job could be out of compliance if a cluster policy it uses was updated after the job was last + edited. The job is considered out of compliance if any of its clusters no longer comply with their updated + policies. + + The get and list compliance APIs allow you to view the policy compliance status of a job. The enforce + compliance API allows you to update a job so that it becomes compliant with all of its policies.""" + + def __init__(self, api_client): + self._api = api_client + + def enforce_compliance(self, + job_id: int, + *, + validate_only: Optional[bool] = None) -> EnforcePolicyComplianceResponse: + """Enforce job policy compliance. + + Updates a job so the job clusters that are created when running the job (specified in `new_cluster`) + are compliant with the current versions of their respective cluster policies. All-purpose clusters + used in the job will not be updated. + + :param job_id: int + The ID of the job you want to enforce policy compliance on. + :param validate_only: bool (optional) + If set, previews changes made to the job to comply with its policy, but does not update the job. + + :returns: :class:`EnforcePolicyComplianceResponse` + """ + body = {} + if job_id is not None: body['job_id'] = job_id + if validate_only is not None: body['validate_only'] = validate_only + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', '/api/2.0/policies/jobs/enforce-compliance', body=body, headers=headers) + return EnforcePolicyComplianceResponse.from_dict(res) + + def get_compliance(self, job_id: int) -> GetPolicyComplianceResponse: + """Get job policy compliance. + + Returns the policy compliance status of a job. Jobs could be out of compliance if a cluster policy + they use was updated after the job was last edited and some of its job clusters no longer comply with + their updated policies. + + :param job_id: int + The ID of the job whose compliance status you are requesting. + + :returns: :class:`GetPolicyComplianceResponse` + """ + + query = {} + if job_id is not None: query['job_id'] = job_id + headers = {'Accept': 'application/json', } + + res = self._api.do('GET', '/api/2.0/policies/jobs/get-compliance', query=query, headers=headers) + return GetPolicyComplianceResponse.from_dict(res) + + def list_compliance(self, + policy_id: str, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None) -> Iterator[JobCompliance]: + """List job policy compliance. + + Returns the policy compliance status of all jobs that use a given policy. Jobs could be out of + compliance if a cluster policy they use was updated after the job was last edited and its job clusters + no longer comply with the updated policy. + + :param policy_id: str + Canonical unique identifier for the cluster policy. + :param page_size: int (optional) + Use this field to specify the maximum number of results to be returned by the server. The server may + further constrain the maximum number of results returned in a single page. + :param page_token: str (optional) + A page token that can be used to navigate to the next page or previous page as returned by + `next_page_token` or `prev_page_token`. + + :returns: Iterator over :class:`JobCompliance` + """ + + query = {} + if page_size is not None: query['page_size'] = page_size + if page_token is not None: query['page_token'] = page_token + if policy_id is not None: query['policy_id'] = policy_id + headers = {'Accept': 'application/json', } + + while True: + json = self._api.do('GET', '/api/2.0/policies/jobs/list-compliance', query=query, headers=headers) + if 'jobs' in json: + for v in json['jobs']: + yield JobCompliance.from_dict(v) + if 'next_page_token' not in json or not json['next_page_token']: + return + query['page_token'] = json['next_page_token'] diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py index f2526909f..b77e5b5e6 100755 --- a/databricks/sdk/service/sql.py +++ b/databricks/sdk/service/sql.py @@ -600,68 +600,6 @@ class ColumnInfoTypeName(Enum): USER_DEFINED_TYPE = 'USER_DEFINED_TYPE' -@dataclass -class ContextFilter: - dbsql_alert_id: Optional[str] = None - """Databricks SQL Alert id""" - - dbsql_dashboard_id: Optional[str] = None - """Databricks SQL Dashboard id""" - - dbsql_query_id: Optional[str] = None - """Databricks SQL Query id""" - - dbsql_session_id: Optional[str] = None - """Databricks SQL Query session id""" - - job_id: Optional[str] = None - """Databricks Workflows id""" - - job_run_id: Optional[str] = None - """Databricks Workflows task run id""" - - lakeview_dashboard_id: Optional[str] = None - """Databricks Lakeview Dashboard id""" - - notebook_cell_run_id: Optional[str] = None - """Databricks Notebook runnableCommandId""" - - notebook_id: Optional[str] = None - """Databricks Notebook id""" - - statement_ids: Optional[List[str]] = None - """Databricks Query History statement ids.""" - - def as_dict(self) -> dict: - """Serializes the ContextFilter into a dictionary suitable for use as a JSON request body.""" - body = {} - if self.dbsql_alert_id is not None: body['dbsql_alert_id'] = self.dbsql_alert_id - if self.dbsql_dashboard_id is not None: body['dbsql_dashboard_id'] = self.dbsql_dashboard_id - if self.dbsql_query_id is not None: body['dbsql_query_id'] = self.dbsql_query_id - if self.dbsql_session_id is not None: body['dbsql_session_id'] = self.dbsql_session_id - if self.job_id is not None: body['job_id'] = self.job_id - if self.job_run_id is not None: body['job_run_id'] = self.job_run_id - if self.lakeview_dashboard_id is not None: body['lakeview_dashboard_id'] = self.lakeview_dashboard_id - if self.notebook_cell_run_id is not None: body['notebook_cell_run_id'] = self.notebook_cell_run_id - if self.notebook_id is not None: body['notebook_id'] = self.notebook_id - if self.statement_ids: body['statement_ids'] = [v for v in self.statement_ids] - return body - - @classmethod - def from_dict(cls, d: Dict[str, any]) -> ContextFilter: - """Deserializes the ContextFilter from a dictionary.""" - return cls(dbsql_alert_id=d.get('dbsql_alert_id', None), - dbsql_dashboard_id=d.get('dbsql_dashboard_id', None), - dbsql_query_id=d.get('dbsql_query_id', None), - dbsql_session_id=d.get('dbsql_session_id', None), - job_id=d.get('job_id', None), - job_run_id=d.get('job_run_id', None), - lakeview_dashboard_id=d.get('lakeview_dashboard_id', None), - notebook_cell_run_id=d.get('notebook_cell_run_id', None), - notebook_id=d.get('notebook_id', None), - statement_ids=d.get('statement_ids', None)) - - @dataclass class CreateAlert: name: str @@ -3434,12 +3372,12 @@ def from_dict(cls, d: Dict[str, any]) -> QueryEditContent: @dataclass class QueryFilter: - context_filter: Optional[ContextFilter] = None - """Filter by one or more property describing where the query was generated""" - query_start_time_range: Optional[TimeRange] = None """A range filter for query submitted time. The time range must be <= 30 days.""" + statement_ids: Optional[List[str]] = None + """A list of statement IDs.""" + statuses: Optional[List[QueryStatus]] = None user_ids: Optional[List[int]] = None @@ -3451,8 +3389,8 @@ class QueryFilter: def as_dict(self) -> dict: """Serializes the QueryFilter into a dictionary suitable for use as a JSON request body.""" body = {} - if self.context_filter: body['context_filter'] = self.context_filter.as_dict() if self.query_start_time_range: body['query_start_time_range'] = self.query_start_time_range.as_dict() + if self.statement_ids: body['statement_ids'] = [v for v in self.statement_ids] if self.statuses: body['statuses'] = [v.value for v in self.statuses] if self.user_ids: body['user_ids'] = [v for v in self.user_ids] if self.warehouse_ids: body['warehouse_ids'] = [v for v in self.warehouse_ids] @@ -3461,8 +3399,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> QueryFilter: """Deserializes the QueryFilter from a dictionary.""" - return cls(context_filter=_from_dict(d, 'context_filter', ContextFilter), - query_start_time_range=_from_dict(d, 'query_start_time_range', TimeRange), + return cls(query_start_time_range=_from_dict(d, 'query_start_time_range', TimeRange), + statement_ids=d.get('statement_ids', None), statuses=_repeated_enum(d, 'statuses', QueryStatus), user_ids=d.get('user_ids', None), warehouse_ids=d.get('warehouse_ids', None)) @@ -3944,12 +3882,6 @@ class QuerySource: notebook_id: Optional[str] = None - pipeline_id: Optional[str] = None - """Id associated with a DLT pipeline""" - - pipeline_update_id: Optional[str] = None - """Id associated with a DLT update""" - query_tags: Optional[str] = None """String provided by a customer that'll help them identify the query""" @@ -3984,8 +3916,6 @@ def as_dict(self) -> dict: if self.job_id is not None: body['job_id'] = self.job_id if self.job_managed_by is not None: body['job_managed_by'] = self.job_managed_by.value if self.notebook_id is not None: body['notebook_id'] = self.notebook_id - if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id - if self.pipeline_update_id is not None: body['pipeline_update_id'] = self.pipeline_update_id if self.query_tags is not None: body['query_tags'] = self.query_tags if self.run_id is not None: body['run_id'] = self.run_id if self.runnable_command_id is not None: body['runnable_command_id'] = self.runnable_command_id @@ -4012,8 +3942,6 @@ def from_dict(cls, d: Dict[str, any]) -> QuerySource: job_id=d.get('job_id', None), job_managed_by=_enum(d, 'job_managed_by', QuerySourceJobManager), notebook_id=d.get('notebook_id', None), - pipeline_id=d.get('pipeline_id', None), - pipeline_update_id=d.get('pipeline_update_id', None), query_tags=d.get('query_tags', None), run_id=d.get('run_id', None), runnable_command_id=d.get('runnable_command_id', None), @@ -6558,8 +6486,8 @@ def update(self, class QueryHistoryAPI: - """A service responsible for storing and retrieving the list of queries run against SQL endpoints, serverless - compute, and DLT.""" + """A service responsible for storing and retrieving the list of queries run against SQL endpoints and + serverless compute.""" def __init__(self, api_client): self._api = api_client @@ -6567,11 +6495,12 @@ def __init__(self, api_client): def list(self, *, filter_by: Optional[QueryFilter] = None, + include_metrics: Optional[bool] = None, max_results: Optional[int] = None, page_token: Optional[str] = None) -> ListQueriesResponse: """List Queries. - List the history of queries through SQL warehouses, serverless compute, and DLT. + List the history of queries through SQL warehouses, and serverless compute. You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are returned first (up to max_results in request). The pagination token returned in response can be used @@ -6579,6 +6508,9 @@ def list(self, :param filter_by: :class:`QueryFilter` (optional) A filter to limit query history results. This field is optional. + :param include_metrics: bool (optional) + Whether to include the query metrics with each query. Only use this for a small subset of queries + (max_results). Defaults to false. :param max_results: int (optional) Limit the number of results returned in one page. Must be less than 1000 and the default is 100. :param page_token: str (optional) @@ -6591,6 +6523,7 @@ def list(self, query = {} if filter_by is not None: query['filter_by'] = filter_by.as_dict() + if include_metrics is not None: query['include_metrics'] = include_metrics if max_results is not None: query['max_results'] = max_results if page_token is not None: query['page_token'] = page_token headers = {'Accept': 'application/json', } diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py index e187e0aa6..c3d10d7c4 100644 --- a/databricks/sdk/version.py +++ b/databricks/sdk/version.py @@ -1 +1 @@ -__version__ = '0.30.0' +__version__ = '0.31.0' diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst index d1195dd44..d15edc813 100644 --- a/docs/dbdataclasses/catalog.rst +++ b/docs/dbdataclasses/catalog.rst @@ -249,9 +249,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: CONNECTION_BIGQUERY :value: "CONNECTION_BIGQUERY" + .. py:attribute:: CONNECTION_BUILTIN_HIVE_METASTORE + :value: "CONNECTION_BUILTIN_HIVE_METASTORE" + .. py:attribute:: CONNECTION_DATABRICKS :value: "CONNECTION_DATABRICKS" + .. py:attribute:: CONNECTION_EXTERNAL_HIVE_METASTORE + :value: "CONNECTION_EXTERNAL_HIVE_METASTORE" + + .. py:attribute:: CONNECTION_GLUE + :value: "CONNECTION_GLUE" + .. py:attribute:: CONNECTION_MYSQL :value: "CONNECTION_MYSQL" @@ -283,6 +292,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: DATABRICKS :value: "DATABRICKS" + .. py:attribute:: GLUE + :value: "GLUE" + + .. py:attribute:: HIVE_METASTORE + :value: "HIVE_METASTORE" + .. py:attribute:: MYSQL :value: "MYSQL" @@ -672,6 +687,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: INTERNAL_AND_EXTERNAL :value: "INTERNAL_AND_EXTERNAL" +.. autoclass:: GetQuotaResponse + :members: + :undoc-members: + .. py:class:: IsolationMode Whether the current securable is accessible from all workspaces or a specific set of workspaces. @@ -714,6 +733,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ListQuotasResponse + :members: + :undoc-members: + .. autoclass:: ListRegisteredModelsResponse :members: :undoc-members: @@ -1149,6 +1172,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: QuotaInfo + :members: + :undoc-members: + .. autoclass:: RegisteredModelAlias :members: :undoc-members: diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst index 7b280c519..f4e175920 100644 --- a/docs/dbdataclasses/compute.rst +++ b/docs/dbdataclasses/compute.rst @@ -103,6 +103,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ClusterCompliance + :members: + :undoc-members: + .. autoclass:: ClusterDetails :members: :undoc-members: @@ -179,6 +183,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ClusterSettingsChange + :members: + :undoc-members: + .. autoclass:: ClusterSize :members: :undoc-members: @@ -443,6 +451,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: EnforceClusterComplianceRequest + :members: + :undoc-members: + +.. autoclass:: EnforceClusterComplianceResponse + :members: + :undoc-members: + .. autoclass:: Environment :members: :undoc-members: @@ -565,6 +581,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GetClusterComplianceResponse + :members: + :undoc-members: + .. autoclass:: GetClusterPermissionLevelsResponse :members: :undoc-members: @@ -817,6 +837,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ListClusterCompliancesResponse + :members: + :undoc-members: + .. autoclass:: ListClustersFilterBy :members: :undoc-members: diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst index 0f501f77a..0140be948 100644 --- a/docs/dbdataclasses/jobs.rst +++ b/docs/dbdataclasses/jobs.rst @@ -111,6 +111,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: EnforcePolicyComplianceForJobResponseJobClusterSettingsChange + :members: + :undoc-members: + +.. autoclass:: EnforcePolicyComplianceRequest + :members: + :undoc-members: + +.. autoclass:: EnforcePolicyComplianceResponse + :members: + :undoc-members: + .. autoclass:: ExportRunOutput :members: :undoc-members: @@ -147,6 +159,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: GetPolicyComplianceResponse + :members: + :undoc-members: + .. py:class:: GitProvider .. py:attribute:: AWS_CODE_COMMIT @@ -197,6 +213,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: JobCompliance + :members: + :undoc-members: + .. autoclass:: JobDeployment :members: :undoc-members: @@ -329,6 +349,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: ListJobComplianceForPolicyResponse + :members: + :undoc-members: + .. autoclass:: ListJobsResponse :members: :undoc-members: diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst index b39ea9edf..255123067 100644 --- a/docs/dbdataclasses/sql.rst +++ b/docs/dbdataclasses/sql.rst @@ -189,10 +189,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: USER_DEFINED_TYPE :value: "USER_DEFINED_TYPE" -.. autoclass:: ContextFilter - :members: - :undoc-members: - .. autoclass:: CreateAlert :members: :undoc-members: diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst index 3f6114f18..365007b09 100644 --- a/docs/workspace/catalog/external_locations.rst +++ b/docs/workspace/catalog/external_locations.rst @@ -15,7 +15,7 @@ To create external locations, you must be a metastore admin or a user with the **CREATE_EXTERNAL_LOCATION** privilege. - .. py:method:: create(name: str, url: str, credential_name: str [, access_point: Optional[str], comment: Optional[str], encryption_details: Optional[EncryptionDetails], read_only: Optional[bool], skip_validation: Optional[bool]]) -> ExternalLocationInfo + .. py:method:: create(name: str, url: str, credential_name: str [, access_point: Optional[str], comment: Optional[str], encryption_details: Optional[EncryptionDetails], fallback: Optional[bool], read_only: Optional[bool], skip_validation: Optional[bool]]) -> ExternalLocationInfo Usage: @@ -63,6 +63,10 @@ User-provided free-form text description. :param encryption_details: :class:`EncryptionDetails` (optional) Encryption options that apply to clients connecting to cloud storage. + :param fallback: bool (optional) + Indicates whether fallback mode is enabled for this external location. When fallback mode is + enabled, the access to the location falls back to cluster credentials if UC credentials are not + sufficient. :param read_only: bool (optional) Indicates whether the external location is read-only. :param skip_validation: bool (optional) @@ -163,7 +167,7 @@ :returns: Iterator over :class:`ExternalLocationInfo` - .. py:method:: update(name: str [, access_point: Optional[str], comment: Optional[str], credential_name: Optional[str], encryption_details: Optional[EncryptionDetails], force: Optional[bool], isolation_mode: Optional[IsolationMode], new_name: Optional[str], owner: Optional[str], read_only: Optional[bool], skip_validation: Optional[bool], url: Optional[str]]) -> ExternalLocationInfo + .. py:method:: update(name: str [, access_point: Optional[str], comment: Optional[str], credential_name: Optional[str], encryption_details: Optional[EncryptionDetails], fallback: Optional[bool], force: Optional[bool], isolation_mode: Optional[IsolationMode], new_name: Optional[str], owner: Optional[str], read_only: Optional[bool], skip_validation: Optional[bool], url: Optional[str]]) -> ExternalLocationInfo Usage: @@ -210,6 +214,10 @@ Name of the storage credential used with this location. :param encryption_details: :class:`EncryptionDetails` (optional) Encryption options that apply to clients connecting to cloud storage. + :param fallback: bool (optional) + Indicates whether fallback mode is enabled for this external location. When fallback mode is + enabled, the access to the location falls back to cluster credentials if UC credentials are not + sufficient. :param force: bool (optional) Force update even if changing url invalidates dependent external tables or mounts. :param isolation_mode: :class:`IsolationMode` (optional) diff --git a/docs/workspace/catalog/index.rst b/docs/workspace/catalog/index.rst index 935804016..3bf2522d8 100644 --- a/docs/workspace/catalog/index.rst +++ b/docs/workspace/catalog/index.rst @@ -18,6 +18,7 @@ Configure data governance with Unity Catalog for metastores, catalogs, schemas, online_tables quality_monitors registered_models + resource_quotas schemas storage_credentials system_schemas diff --git a/docs/workspace/catalog/resource_quotas.rst b/docs/workspace/catalog/resource_quotas.rst new file mode 100644 index 000000000..3396011f0 --- /dev/null +++ b/docs/workspace/catalog/resource_quotas.rst @@ -0,0 +1,45 @@ +``w.resource_quotas``: Resource Quotas +====================================== +.. currentmodule:: databricks.sdk.service.catalog + +.. py:class:: ResourceQuotasAPI + + Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that + can be created. Quotas are expressed in terms of a resource type and a parent (for example, tables per + metastore or schemas per catalog). The resource quota APIs enable you to monitor your current usage and + limits. For more information on resource quotas see the [Unity Catalog documentation]. + + [Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas + + .. py:method:: get_quota(parent_securable_type: str, parent_full_name: str, quota_name: str) -> GetQuotaResponse + + Get information for a single resource quota. + + The GetQuota API returns usage information for a single resource quota, defined as a child-parent + pair. This API also refreshes the quota count if it is out of date. Refreshes are triggered + asynchronously. The updated count might not be returned in the first call. + + :param parent_securable_type: str + Securable type of the quota parent. + :param parent_full_name: str + Full name of the parent resource. Provide the metastore ID if the parent is a metastore. + :param quota_name: str + Name of the quota. Follows the pattern of the quota type, with "-quota" added as a suffix. + + :returns: :class:`GetQuotaResponse` + + + .. py:method:: list_quotas( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[QuotaInfo] + + List all resource quotas under a metastore. + + ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the + counts returned. This API does not trigger a refresh of quota counts. + + :param max_results: int (optional) + The number of quotas to return. + :param page_token: str (optional) + Opaque token for the next page of results. + + :returns: Iterator over :class:`QuotaInfo` + \ No newline at end of file diff --git a/docs/workspace/compute/index.rst b/docs/workspace/compute/index.rst index b13a21610..858cf70ff 100644 --- a/docs/workspace/compute/index.rst +++ b/docs/workspace/compute/index.rst @@ -14,4 +14,5 @@ Use and configure compute for Databricks instance_pools instance_profiles libraries + policy_compliance_for_clusters policy_families \ No newline at end of file diff --git a/docs/workspace/compute/policy_compliance_for_clusters.rst b/docs/workspace/compute/policy_compliance_for_clusters.rst new file mode 100644 index 000000000..90c3aeb98 --- /dev/null +++ b/docs/workspace/compute/policy_compliance_for_clusters.rst @@ -0,0 +1,71 @@ +``w.policy_compliance_for_clusters``: Policy compliance for clusters +==================================================================== +.. currentmodule:: databricks.sdk.service.compute + +.. py:class:: PolicyComplianceForClustersAPI + + The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your + workspace. + + A cluster is compliant with its policy if its configuration satisfies all its policy rules. Clusters could + be out of compliance if their policy was updated after the cluster was last edited. + + The get and list compliance APIs allow you to view the policy compliance status of a cluster. The enforce + compliance API allows you to update a cluster to be compliant with the current version of its policy. + + .. py:method:: enforce_compliance(cluster_id: str [, validate_only: Optional[bool]]) -> EnforceClusterComplianceResponse + + Enforce cluster policy compliance. + + Updates a cluster to be compliant with the current version of its policy. A cluster can be updated if + it is in a `RUNNING` or `TERMINATED` state. + + If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes + can take effect. + + If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time the + cluster is started, the new attributes will take effect. + + Clusters created by the Databricks Jobs, DLT, or Models services cannot be enforced by this API. + Instead, use the "Enforce job policy compliance" API to enforce policy compliance on jobs. + + :param cluster_id: str + The ID of the cluster you want to enforce policy compliance on. + :param validate_only: bool (optional) + If set, previews the changes that would be made to a cluster to enforce compliance but does not + update the cluster. + + :returns: :class:`EnforceClusterComplianceResponse` + + + .. py:method:: get_compliance(cluster_id: str) -> GetClusterComplianceResponse + + Get cluster policy compliance. + + Returns the policy compliance status of a cluster. Clusters could be out of compliance if their policy + was updated after the cluster was last edited. + + :param cluster_id: str + The ID of the cluster to get the compliance status + + :returns: :class:`GetClusterComplianceResponse` + + + .. py:method:: list_compliance(policy_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ClusterCompliance] + + List cluster policy compliance. + + Returns the policy compliance status of all clusters that use a given policy. Clusters could be out of + compliance if their policy was updated after the cluster was last edited. + + :param policy_id: str + Canonical unique identifier for the cluster policy. + :param page_size: int (optional) + Use this field to specify the maximum number of results to be returned by the server. The server may + further constrain the maximum number of results returned in a single page. + :param page_token: str (optional) + A page token that can be used to navigate to the next page or previous page as returned by + `next_page_token` or `prev_page_token`. + + :returns: Iterator over :class:`ClusterCompliance` + \ No newline at end of file diff --git a/docs/workspace/dashboards/lakeview.rst b/docs/workspace/dashboards/lakeview.rst index d3257b79e..92aa8c0e3 100644 --- a/docs/workspace/dashboards/lakeview.rst +++ b/docs/workspace/dashboards/lakeview.rst @@ -17,9 +17,10 @@ The display name of the dashboard. :param parent_path: str (optional) The workspace path of the folder containing the dashboard. Includes leading slash and no trailing - slash. + slash. This field is excluded in List Dashboards responses. :param serialized_dashboard: str (optional) - The contents of the dashboard in serialized string form. + The contents of the dashboard in serialized string form. This field is excluded in List Dashboards + responses. :param warehouse_id: str (optional) The warehouse ID used to run the dashboard. @@ -257,9 +258,10 @@ The display name of the dashboard. :param etag: str (optional) The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard has - not been modified since the last read. + not been modified since the last read. This field is excluded in List Dashboards responses. :param serialized_dashboard: str (optional) - The contents of the dashboard in serialized string form. + The contents of the dashboard in serialized string form. This field is excluded in List Dashboards + responses. :param warehouse_id: str (optional) The warehouse ID used to run the dashboard. diff --git a/docs/workspace/jobs/index.rst b/docs/workspace/jobs/index.rst index a8f242ea2..0729f8dce 100644 --- a/docs/workspace/jobs/index.rst +++ b/docs/workspace/jobs/index.rst @@ -7,4 +7,5 @@ Schedule automated jobs on Databricks Workspaces .. toctree:: :maxdepth: 1 - jobs \ No newline at end of file + jobs + policy_compliance_for_jobs \ No newline at end of file diff --git a/docs/workspace/jobs/policy_compliance_for_jobs.rst b/docs/workspace/jobs/policy_compliance_for_jobs.rst new file mode 100644 index 000000000..69f211552 --- /dev/null +++ b/docs/workspace/jobs/policy_compliance_for_jobs.rst @@ -0,0 +1,66 @@ +``w.policy_compliance_for_jobs``: Policy compliance for jobs +============================================================ +.. currentmodule:: databricks.sdk.service.jobs + +.. py:class:: PolicyComplianceForJobsAPI + + The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace. + This API currently only supports compliance controls for cluster policies. + + A job is in compliance if its cluster configurations satisfy the rules of all their respective cluster + policies. A job could be out of compliance if a cluster policy it uses was updated after the job was last + edited. The job is considered out of compliance if any of its clusters no longer comply with their updated + policies. + + The get and list compliance APIs allow you to view the policy compliance status of a job. The enforce + compliance API allows you to update a job so that it becomes compliant with all of its policies. + + .. py:method:: enforce_compliance(job_id: int [, validate_only: Optional[bool]]) -> EnforcePolicyComplianceResponse + + Enforce job policy compliance. + + Updates a job so the job clusters that are created when running the job (specified in `new_cluster`) + are compliant with the current versions of their respective cluster policies. All-purpose clusters + used in the job will not be updated. + + :param job_id: int + The ID of the job you want to enforce policy compliance on. + :param validate_only: bool (optional) + If set, previews changes made to the job to comply with its policy, but does not update the job. + + :returns: :class:`EnforcePolicyComplianceResponse` + + + .. py:method:: get_compliance(job_id: int) -> GetPolicyComplianceResponse + + Get job policy compliance. + + Returns the policy compliance status of a job. Jobs could be out of compliance if a cluster policy + they use was updated after the job was last edited and some of its job clusters no longer comply with + their updated policies. + + :param job_id: int + The ID of the job whose compliance status you are requesting. + + :returns: :class:`GetPolicyComplianceResponse` + + + .. py:method:: list_compliance(policy_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[JobCompliance] + + List job policy compliance. + + Returns the policy compliance status of all jobs that use a given policy. Jobs could be out of + compliance if a cluster policy they use was updated after the job was last edited and its job clusters + no longer comply with the updated policy. + + :param policy_id: str + Canonical unique identifier for the cluster policy. + :param page_size: int (optional) + Use this field to specify the maximum number of results to be returned by the server. The server may + further constrain the maximum number of results returned in a single page. + :param page_token: str (optional) + A page token that can be used to navigate to the next page or previous page as returned by + `next_page_token` or `prev_page_token`. + + :returns: Iterator over :class:`JobCompliance` + \ No newline at end of file diff --git a/docs/workspace/sql/query_history.rst b/docs/workspace/sql/query_history.rst index 5fa003c0e..2f5520cdf 100644 --- a/docs/workspace/sql/query_history.rst +++ b/docs/workspace/sql/query_history.rst @@ -4,10 +4,10 @@ .. py:class:: QueryHistoryAPI - A service responsible for storing and retrieving the list of queries run against SQL endpoints, serverless - compute, and DLT. + A service responsible for storing and retrieving the list of queries run against SQL endpoints and + serverless compute. - .. py:method:: list( [, filter_by: Optional[QueryFilter], max_results: Optional[int], page_token: Optional[str]]) -> ListQueriesResponse + .. py:method:: list( [, filter_by: Optional[QueryFilter], include_metrics: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> ListQueriesResponse Usage: @@ -24,7 +24,7 @@ List Queries. - List the history of queries through SQL warehouses, serverless compute, and DLT. + List the history of queries through SQL warehouses, and serverless compute. You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are returned first (up to max_results in request). The pagination token returned in response can be used @@ -32,6 +32,9 @@ :param filter_by: :class:`QueryFilter` (optional) A filter to limit query history results. This field is optional. + :param include_metrics: bool (optional) + Whether to include the query metrics with each query. Only use this for a small subset of queries + (max_results). Defaults to false. :param max_results: int (optional) Limit the number of results returned in one page. Must be less than 1000 and the default is 100. :param page_token: str (optional) From 5893d4ddd3981445937c997710a1ff4b3c972d3f Mon Sep 17 00:00:00 2001 From: Parth Bansal Date: Tue, 27 Aug 2024 17:21:33 +0200 Subject: [PATCH 026/136] [Fix] Fix `DatabricksConfig.copy` when authenticated with OAuth (#723) ## Changes `DatabricksCliTokenSource().token()` itself can't be copied. So, Deep Copy can't be performed for Config. Added the wrapper function which can be copied. So, Deep copy can be performed. ## Tests - [ ] `make test` run locally - [ ] `make fmt` applied - [ ] relevant integration tests applied --- databricks/sdk/credentials_provider.py | 5 +++- tests/test_config.py | 39 ++++++++++++++++++++++++++ 2 files changed, 43 insertions(+), 1 deletion(-) diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py index cfdf80e0d..860a06ce4 100644 --- a/databricks/sdk/credentials_provider.py +++ b/databricks/sdk/credentials_provider.py @@ -607,7 +607,10 @@ def inner() -> Dict[str, str]: token = token_source.token() return {'Authorization': f'{token.token_type} {token.access_token}'} - return OAuthCredentialsProvider(inner, token_source.token) + def token() -> Token: + return token_source.token() + + return OAuthCredentialsProvider(inner, token) class MetadataServiceTokenSource(Refreshable): diff --git a/tests/test_config.py b/tests/test_config.py index 4bab85cf1..2eac6d2f8 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -1,10 +1,15 @@ import os +import pathlib import platform +import random +import string +from datetime import datetime import pytest from databricks.sdk import useragent from databricks.sdk.config import Config, with_product, with_user_agent_extra +from databricks.sdk.credentials_provider import Token from databricks.sdk.version import __version__ from .conftest import noop_credentials, set_az_path @@ -79,6 +84,40 @@ def test_config_copy_deep_copies_user_agent_other_info(config): useragent._reset_extra(original_extra) +def test_config_deep_copy(monkeypatch, mocker, tmp_path): + mocker.patch('databricks.sdk.credentials_provider.CliTokenSource.refresh', + return_value=Token(access_token='token', + token_type='Bearer', + expiry=datetime(2023, 5, 22, 0, 0, 0))) + + write_large_dummy_executable(tmp_path) + monkeypatch.setenv('PATH', tmp_path.as_posix()) + + config = Config(host="https://abc123.azuredatabricks.net", auth_type="databricks-cli") + config_copy = config.deep_copy() + assert config_copy.host == config.host + + +def write_large_dummy_executable(path: pathlib.Path): + cli = path.joinpath('databricks') + + # Generate a long random string to inflate the file size. + random_string = ''.join(random.choice(string.ascii_letters) for i in range(1024 * 1024)) + cli.write_text("""#!/bin/sh +cat <= (1024 * 1024) + return cli + + def test_load_azure_tenant_id_404(requests_mock, monkeypatch): set_az_path(monkeypatch) mock = requests_mock.get('https://abc123.azuredatabricks.net/aad/auth', status_code=404) From dd0707969a78a00f09a86d88a4cbce5abf1e9007 Mon Sep 17 00:00:00 2001 From: Serge Smertin <259697+nfx@users.noreply.github.com> Date: Tue, 27 Aug 2024 17:22:18 +0200 Subject: [PATCH 027/136] [Internal] Verify that `WorkspaceClient` created from `AccountClient` does actually work through integration tests (#736) Signed-off-by: Serge Smertin <259697+nfx@users.noreply.github.com> --- tests/integration/test_client.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/integration/test_client.py b/tests/integration/test_client.py index fd46abb47..5a0a98d53 100644 --- a/tests/integration/test_client.py +++ b/tests/integration/test_client.py @@ -18,7 +18,6 @@ def test_creating_ws_client_from_ac_client_does_not_override_config(a): wss = list(a.workspaces.list()) if len(wss) == 0: pytest.skip("no workspaces") - a.get_workspace_client(wss[0]) - - # assert doesn't throw - wss = list(a.workspaces.list()) + w = a.get_workspace_client(wss[0]) + me = w.current_user.me() + assert me.user_name is not None From 4597c364e27a19fe98219677433b3faa1921e394 Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Wed, 28 Aug 2024 05:12:07 -0400 Subject: [PATCH 028/136] [Internal] Fix get_workspace_client test to match Go SDK behavior (#738) ## Changes The current get_workspace_client test fails because the SP used by the test does not have access to the first workspace listed. In the [Go](https://github.com/databricks/databricks-sdk-go/blob/main/internal/account_client_test.go#L12) & [Java](https://github.com/databricks/databricks-sdk-java/blob/1b90e2318f8221ac0a6e4b56c9b0e4c286e38c9f/databricks-sdk-java/src/test/java/com/databricks/sdk/integration/AccountClientIT.java#L17) SDKs, the corresponding test respects the `TEST_WORKSPACE_ID` environment variable to know which workspace to attempt to login to. This PR changes the test to use that environment variable as well. ## Tests - [ ] `make test` run locally - [ ] `make fmt` applied - [ ] relevant integration tests applied --- tests/integration/test_client.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/tests/integration/test_client.py b/tests/integration/test_client.py index 5a0a98d53..eab9c4713 100644 --- a/tests/integration/test_client.py +++ b/tests/integration/test_client.py @@ -1,6 +1,3 @@ -import pytest - - def test_get_workspace_client(ucacct, env_or_skip): # Need to switch to ucacct workspace_id = env_or_skip("TEST_WORKSPACE_ID") @@ -14,10 +11,9 @@ def test_get_workspace_id(ucws, env_or_skip): assert ucws.get_workspace_id() == ws_id -def test_creating_ws_client_from_ac_client_does_not_override_config(a): - wss = list(a.workspaces.list()) - if len(wss) == 0: - pytest.skip("no workspaces") - w = a.get_workspace_client(wss[0]) +def test_creating_ws_client_from_ac_client_does_not_override_config(ucacct, env_or_skip): + ws_id = env_or_skip('TEST_WORKSPACE_ID') + ws = ucacct.workspaces.get(ws_id) + w = ucacct.get_workspace_client(ws) me = w.current_user.me() assert me.user_name is not None From b36a7b9fc220b217db1572159db806881818a134 Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Wed, 28 Aug 2024 07:11:51 -0400 Subject: [PATCH 029/136] [Release] Release v0.31.1 (#739) ### Bug Fixes * Fix `DatabricksConfig.copy` when authenticated with OAuth ([#723](https://github.com/databricks/databricks-sdk-py/pull/723)). ### Internal Changes * Fix get_workspace_client test to match Go SDK behavior ([#738](https://github.com/databricks/databricks-sdk-py/pull/738)). * Verify that `WorkspaceClient` created from `AccountClient` does actually work through integration tests ([#736](https://github.com/databricks/databricks-sdk-py/pull/736)). --- CHANGELOG.md | 14 ++++++++++++++ databricks/sdk/version.py | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ee73d57f9..3442369c7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,19 @@ # Version changelog +## [Release] Release v0.31.1 + +### Bug Fixes + + * Fix `DatabricksConfig.copy` when authenticated with OAuth ([#723](https://github.com/databricks/databricks-sdk-py/pull/723)). + + +### Internal Changes + + * Fix get_workspace_client test to match Go SDK behavior ([#738](https://github.com/databricks/databricks-sdk-py/pull/738)). + * Verify that `WorkspaceClient` created from `AccountClient` does actually work through integration tests ([#736](https://github.com/databricks/databricks-sdk-py/pull/736)). + + + ## [Release] Release v0.31.0 ### Bug Fixes diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py index c3d10d7c4..74f9490de 100644 --- a/databricks/sdk/version.py +++ b/databricks/sdk/version.py @@ -1 +1 @@ -__version__ = '0.31.0' +__version__ = '0.31.1' From 1ccbcd218e32b3041f61fa5be83d6d7b26a5bde6 Mon Sep 17 00:00:00 2001 From: hectorcast-db Date: Fri, 30 Aug 2024 09:12:20 +0200 Subject: [PATCH 030/136] [Doc] Add Data Plane access documentation (#732) ## Changes Add Data Plane access documentation ## Tests - [ ] `make test` run locally - [ ] `make fmt` applied - [ ] relevant integration tests applied --- docs/dataplane.md | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 docs/dataplane.md diff --git a/docs/dataplane.md b/docs/dataplane.md new file mode 100644 index 000000000..51e3d0225 --- /dev/null +++ b/docs/dataplane.md @@ -0,0 +1,27 @@ +# Data Plane APIs + +Some APIs such as Model Serving support direct Data Plane access for higher throughput and lower latency requests. +To access Data Plane access, a dedicated short-lived OAuth token must be used. The SDK is able to generate and refresh +such tokens transparently for the user. + +## Prerequisites +Databricks SDK must be configured using a supported OAuth token. For more information, see +[Supported Databricks authentication types](https://docs.databricks.com/en/dev-tools/auth/index.html) + +The desired service or endpoint must have direct Data Plane access enabled. + +## Usage +Databricks SDK provides a separate service to be used for Data Plane access, which includes a `_data_plane` suffix. +This service contains the subset of the methods for the original service which are supported in the Data Plane. + +Example: + +```python +from databricks.sdk import WorkspaceClient +# Control Plane +w = WorkspaceClient() +w.serving_endpoints.query(...) +# Data Plane +w.serving_endpoints_data_plane.query(...) +``` + From 3dab4576a31c5bc3947733d64b8e58c474c55cb9 Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Fri, 30 Aug 2024 08:40:43 -0400 Subject: [PATCH 031/136] [Fix] Handle non-JSON errors gracefully (#741) ## Changes Some errors returned by the platform are not serialized using JSON (see https://github.com/databricks/databricks-sdk-go/issues/998 for an example). They are instead serialized in the form ": ". Today, the SDK cannot parse these error messages well, resulting in a poor user experience. This PR adds support for parsing these error messages from the platform to the SDK. This should reduce bug reports for the SDK with respect to unexpected response parsing. This PR also refactors the error deserialization logic somewhat to make it more extensible in the future for other potential error formats that are not currently handled. As a side-effect of this change, I've refactored the structure of the error handling in the Python SDK to more closely reflect how errors are handled in the Go SDK. This should make maintenance more straightforward in the future. It also introduces a new error message to the Python SDK to refer users to our issue tracker when the SDK receives an error response that it cannot parse, like what we do in the Go SDK. Ports https://github.com/databricks/databricks-sdk-go/pull/1031 to the Python SDK. ## Deprecations This PR deprecates several fields in the constructor for DatabricksError. Going forward, SCIM-specific and API 1.2-specific parameters should not be specified in the constructor; instead, they will be handled in error parsers. ## Breaking Changes The introduction of a different message for non-JSON responses may be a breaking change if users matched on the message structure used before. ## Tests Existing tests still pass, adding tests before merging this. - [ ] `make test` run locally - [ ] `make fmt` applied - [ ] relevant integration tests applied --- databricks/sdk/core.py | 147 ++--------------- databricks/sdk/errors/__init__.py | 3 +- databricks/sdk/errors/base.py | 34 +++- databricks/sdk/errors/mapper.py | 6 +- databricks/sdk/errors/parser.py | 146 +++++++++++++++++ databricks/sdk/errors/private_link.py | 2 +- databricks/sdk/logger/__init__.py | 1 + databricks/sdk/logger/round_trip_logger.py | 118 ++++++++++++++ tests/test_errors.py | 178 ++++++++++++--------- 9 files changed, 417 insertions(+), 218 deletions(-) create mode 100644 databricks/sdk/errors/parser.py create mode 100644 databricks/sdk/logger/__init__.py create mode 100644 databricks/sdk/logger/round_trip_logger.py diff --git a/databricks/sdk/core.py b/databricks/sdk/core.py index b686bd7fd..e028e4b15 100644 --- a/databricks/sdk/core.py +++ b/databricks/sdk/core.py @@ -1,7 +1,5 @@ import re -import urllib.parse from datetime import timedelta -from json import JSONDecodeError from types import TracebackType from typing import Any, BinaryIO, Iterator, Type from urllib.parse import urlencode @@ -12,8 +10,8 @@ from .config import * # To preserve backwards compatibility (as these definitions were previously in this module) from .credentials_provider import * -from .errors import DatabricksError, error_mapper -from .errors.private_link import _is_private_link_redirect +from .errors import DatabricksError, get_api_error +from .logger import RoundTrip from .oauth import retrieve_token from .retries import retried @@ -262,134 +260,23 @@ def _perform(self, auth=auth, stream=raw, timeout=self._http_timeout_seconds) - try: - self._record_request_log(response, raw=raw or data is not None or files is not None) - if not response.ok: # internally calls response.raise_for_status() - # TODO: experiment with traceback pruning for better readability - # See https://stackoverflow.com/a/58821552/277035 - payload = response.json() - raise self._make_nicer_error(response=response, **payload) from None - # Private link failures happen via a redirect to the login page. From a requests-perspective, the request - # is successful, but the response is not what we expect. We need to handle this case separately. - if _is_private_link_redirect(response): - raise self._make_nicer_error(response=response) from None - return response - except requests.exceptions.JSONDecodeError: - message = self._make_sense_from_html(response.text) - if not message: - message = response.reason - raise self._make_nicer_error(response=response, message=message) from None - - @staticmethod - def _make_sense_from_html(txt: str) -> str: - matchers = [r'
(.*)
', r'(.*)'] - for attempt in matchers: - expr = re.compile(attempt, re.MULTILINE) - match = expr.search(txt) - if not match: - continue - return match.group(1).strip() - return txt - - def _make_nicer_error(self, *, response: requests.Response, **kwargs) -> DatabricksError: - status_code = response.status_code - message = kwargs.get('message', 'request failed') - is_http_unauthorized_or_forbidden = status_code in (401, 403) - is_too_many_requests_or_unavailable = status_code in (429, 503) - if is_http_unauthorized_or_forbidden: - message = self._cfg.wrap_debug_info(message) - if is_too_many_requests_or_unavailable: - kwargs['retry_after_secs'] = self._parse_retry_after(response) - kwargs['message'] = message - return error_mapper(response, kwargs) - - def _record_request_log(self, response: requests.Response, raw=False): + self._record_request_log(response, raw=raw or data is not None or files is not None) + error = get_api_error(response) + if error is not None: + status_code = response.status_code + is_http_unauthorized_or_forbidden = status_code in (401, 403) + is_too_many_requests_or_unavailable = status_code in (429, 503) + if is_http_unauthorized_or_forbidden: + error.message = self._cfg.wrap_debug_info(error.message) + if is_too_many_requests_or_unavailable: + error.retry_after_secs = self._parse_retry_after(response) + raise error from None + return response + + def _record_request_log(self, response: requests.Response, raw: bool = False) -> None: if not logger.isEnabledFor(logging.DEBUG): return - request = response.request - url = urllib.parse.urlparse(request.url) - query = '' - if url.query: - query = f'?{urllib.parse.unquote(url.query)}' - sb = [f'{request.method} {urllib.parse.unquote(url.path)}{query}'] - if self._cfg.debug_headers: - if self._cfg.host: - sb.append(f'> * Host: {self._cfg.host}') - for k, v in request.headers.items(): - sb.append(f'> * {k}: {self._only_n_bytes(v, self._debug_truncate_bytes)}') - if request.body: - sb.append("> [raw stream]" if raw else self._redacted_dump("> ", request.body)) - sb.append(f'< {response.status_code} {response.reason}') - if raw and response.headers.get('Content-Type', None) != 'application/json': - # Raw streams with `Transfer-Encoding: chunked` do not have `Content-Type` header - sb.append("< [raw stream]") - elif response.content: - sb.append(self._redacted_dump("< ", response.content)) - logger.debug("\n".join(sb)) - - @staticmethod - def _mask(m: Dict[str, any]): - for k in m: - if k in {'bytes_value', 'string_value', 'token_value', 'value', 'content'}: - m[k] = "**REDACTED**" - - @staticmethod - def _map_keys(m: Dict[str, any]) -> List[str]: - keys = list(m.keys()) - keys.sort() - return keys - - @staticmethod - def _only_n_bytes(j: str, num_bytes: int = 96) -> str: - diff = len(j.encode('utf-8')) - num_bytes - if diff > 0: - return f"{j[:num_bytes]}... ({diff} more bytes)" - return j - - def _recursive_marshal_dict(self, m, budget) -> dict: - out = {} - self._mask(m) - for k in sorted(m.keys()): - raw = self._recursive_marshal(m[k], budget) - out[k] = raw - budget -= len(str(raw)) - return out - - def _recursive_marshal_list(self, s, budget) -> list: - out = [] - for i in range(len(s)): - if i > 0 >= budget: - out.append("... (%d additional elements)" % (len(s) - len(out))) - break - raw = self._recursive_marshal(s[i], budget) - out.append(raw) - budget -= len(str(raw)) - return out - - def _recursive_marshal(self, v: any, budget: int) -> any: - if isinstance(v, dict): - return self._recursive_marshal_dict(v, budget) - elif isinstance(v, list): - return self._recursive_marshal_list(v, budget) - elif isinstance(v, str): - return self._only_n_bytes(v, self._debug_truncate_bytes) - else: - return v - - def _redacted_dump(self, prefix: str, body: str) -> str: - if len(body) == 0: - return "" - try: - # Unmarshal body into primitive types. - tmp = json.loads(body) - max_bytes = 96 - if self._debug_truncate_bytes > max_bytes: - max_bytes = self._debug_truncate_bytes - # Re-marshal body taking redaction and character limit into account. - raw = self._recursive_marshal(tmp, max_bytes) - return "\n".join([f'{prefix}{line}' for line in json.dumps(raw, indent=2).split("\n")]) - except JSONDecodeError: - return f'{prefix}[non-JSON document of {len(body)} bytes]' + logger.debug(RoundTrip(response, self._cfg.debug_headers, self._debug_truncate_bytes, raw).generate()) class StreamingResponse(BinaryIO): diff --git a/databricks/sdk/errors/__init__.py b/databricks/sdk/errors/__init__.py index 749c95116..578406803 100644 --- a/databricks/sdk/errors/__init__.py +++ b/databricks/sdk/errors/__init__.py @@ -1,5 +1,6 @@ from .base import DatabricksError, ErrorDetail -from .mapper import error_mapper +from .mapper import _error_mapper +from .parser import get_api_error from .platform import * from .private_link import PrivateLinkValidationError from .sdk import * diff --git a/databricks/sdk/errors/base.py b/databricks/sdk/errors/base.py index 89be376b6..973c3644e 100644 --- a/databricks/sdk/errors/base.py +++ b/databricks/sdk/errors/base.py @@ -1,4 +1,5 @@ import re +import warnings from dataclasses import dataclass from typing import Dict, List, Optional @@ -41,9 +42,38 @@ def __init__(self, retry_after_secs: int = None, details: List[Dict[str, any]] = None, **kwargs): + """ + + :param message: + :param error_code: + :param detail: [Deprecated] + :param status: [Deprecated] + :param scimType: [Deprecated] + :param error: [Deprecated] + :param retry_after_secs: + :param details: + :param kwargs: + """ + # SCIM-specific parameters are deprecated + if detail: + warnings.warn( + "The 'detail' parameter of DatabricksError is deprecated and will be removed in a future version." + ) + if scimType: + warnings.warn( + "The 'scimType' parameter of DatabricksError is deprecated and will be removed in a future version." + ) + if status: + warnings.warn( + "The 'status' parameter of DatabricksError is deprecated and will be removed in a future version." + ) + + # API 1.2-specific parameters are deprecated if error: - # API 1.2 has different response format, let's adapt - message = error + warnings.warn( + "The 'error' parameter of DatabricksError is deprecated and will be removed in a future version." + ) + if detail: # Handle SCIM error message details # @see https://tools.ietf.org/html/rfc7644#section-3.7.3 diff --git a/databricks/sdk/errors/mapper.py b/databricks/sdk/errors/mapper.py index 0b809eb7e..282b09c76 100644 --- a/databricks/sdk/errors/mapper.py +++ b/databricks/sdk/errors/mapper.py @@ -4,11 +4,9 @@ from databricks.sdk.errors.base import DatabricksError from .overrides import _ALL_OVERRIDES -from .private_link import (_get_private_link_validation_error, - _is_private_link_redirect) -def error_mapper(response: requests.Response, raw: dict) -> DatabricksError: +def _error_mapper(response: requests.Response, raw: dict) -> DatabricksError: for override in _ALL_OVERRIDES: if override.matches(response, raw): return override.custom_error(**raw) @@ -23,8 +21,6 @@ def error_mapper(response: requests.Response, raw: dict) -> DatabricksError: # where there's a default exception class per HTTP status code, and we do # rely on Databricks platform exception mapper to do the right thing. return platform.STATUS_CODE_MAPPING[status_code](**raw) - if _is_private_link_redirect(response): - return _get_private_link_validation_error(response.url) # backwards-compatible error creation for cases like using older versions of # the SDK on way never releases of the platform. diff --git a/databricks/sdk/errors/parser.py b/databricks/sdk/errors/parser.py new file mode 100644 index 000000000..e2feb99d6 --- /dev/null +++ b/databricks/sdk/errors/parser.py @@ -0,0 +1,146 @@ +import abc +import json +import logging +import re +from typing import Optional + +import requests + +from ..logger import RoundTrip +from .base import DatabricksError +from .mapper import _error_mapper +from .private_link import (_get_private_link_validation_error, + _is_private_link_redirect) + + +class _ErrorParser(abc.ABC): + """A parser for errors from the Databricks REST API.""" + + @abc.abstractmethod + def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: + """Parses an error from the Databricks REST API. If the error cannot be parsed, returns None.""" + + +class _EmptyParser(_ErrorParser): + """A parser that handles empty responses.""" + + def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: + if len(response_body) == 0: + return {'message': response.reason} + return None + + +class _StandardErrorParser(_ErrorParser): + """ + Parses errors from the Databricks REST API using the standard error format. + """ + + def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: + try: + payload_str = response_body.decode('utf-8') + resp: dict = json.loads(payload_str) + except json.JSONDecodeError as e: + logging.debug('_StandardErrorParser: unable to deserialize response as json', exc_info=e) + return None + + error_args = { + 'message': resp.get('message', 'request failed'), + 'error_code': resp.get('error_code'), + 'details': resp.get('details'), + } + + # Handle API 1.2-style errors + if 'error' in resp: + error_args['message'] = resp['error'] + + # Handle SCIM Errors + detail = resp.get('detail') + status = resp.get('status') + scim_type = resp.get('scimType') + if detail: + # Handle SCIM error message details + # @see https://tools.ietf.org/html/rfc7644#section-3.7.3 + error_args[ + 'message'] = f"{scim_type} {error_args.get('message', 'SCIM API Internal Error')}".strip(" ") + error_args['error_code'] = f"SCIM_{status}" + return error_args + + +class _StringErrorParser(_ErrorParser): + """ + Parses errors from the Databricks REST API in the format "ERROR_CODE: MESSAGE". + """ + + __STRING_ERROR_REGEX = re.compile(r'([A-Z_]+): (.*)') + + def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: + payload_str = response_body.decode('utf-8') + match = self.__STRING_ERROR_REGEX.match(payload_str) + if not match: + logging.debug('_StringErrorParser: unable to parse response as string') + return None + error_code, message = match.groups() + return {'error_code': error_code, 'message': message, 'status': response.status_code, } + + +class _HtmlErrorParser(_ErrorParser): + """ + Parses errors from the Databricks REST API in HTML format. + """ + + __HTML_ERROR_REGEXES = [re.compile(r'
(.*)
'), re.compile(r'(.*)'), ] + + def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: + payload_str = response_body.decode('utf-8') + for regex in self.__HTML_ERROR_REGEXES: + match = regex.search(payload_str) + if match: + message = match.group(1) if match.group(1) else response.reason + return { + 'status': response.status_code, + 'message': message, + 'error_code': response.reason.upper().replace(' ', '_') + } + logging.debug('_HtmlErrorParser: no
 tag found in error response')
+        return None
+
+
+# A list of ErrorParsers that are tried in order to parse an API error from a response body. Most errors should be
+# parsable by the _StandardErrorParser, but additional parsers can be added here for specific error formats. The order
+# of the parsers is not important, as the set of errors that can be parsed by each parser should be disjoint.
+_error_parsers = [_EmptyParser(), _StandardErrorParser(), _StringErrorParser(), _HtmlErrorParser(), ]
+
+
+def _unknown_error(response: requests.Response) -> str:
+    """A standard error message that can be shown when an API response cannot be parsed.
+
+    This error message includes a link to the issue tracker for the SDK for users to report the issue to us.
+    """
+    request_log = RoundTrip(response, debug_headers=True, debug_truncate_bytes=10 * 1024).generate()
+    return (
+        'This is likely a bug in the Databricks SDK for Python or the underlying '
+        'API. Please report this issue with the following debugging information to the SDK issue tracker at '
+        f'https://github.com/databricks/databricks-sdk-go/issues. Request log:```{request_log}```')
+
+
+def get_api_error(response: requests.Response) -> Optional[DatabricksError]:
+    """
+    Handles responses from the REST API and returns a DatabricksError if the response indicates an error.
+    :param response: The response from the REST API.
+    :return: A DatabricksError if the response indicates an error, otherwise None.
+    """
+    if not response.ok:
+        content = response.content
+        for parser in _error_parsers:
+            try:
+                error_args = parser.parse_error(response, content)
+                if error_args:
+                    return _error_mapper(response, error_args)
+            except Exception as e:
+                logging.debug(f'Error parsing response with {parser}, continuing', exc_info=e)
+        return _error_mapper(response, {'message': 'unable to parse response. ' + _unknown_error(response)})
+
+    # Private link failures happen via a redirect to the login page. From a requests-perspective, the request
+    # is successful, but the response is not what we expect. We need to handle this case separately.
+    if _is_private_link_redirect(response):
+        return _get_private_link_validation_error(response.url)
diff --git a/databricks/sdk/errors/private_link.py b/databricks/sdk/errors/private_link.py
index e8cc5eadf..946b41b50 100644
--- a/databricks/sdk/errors/private_link.py
+++ b/databricks/sdk/errors/private_link.py
@@ -51,7 +51,7 @@ def _is_private_link_redirect(resp: requests.Response) -> bool:
     return parsed.path == '/login.html' and 'error=private-link-validation-error' in parsed.query
 
 
-def _get_private_link_validation_error(url: str) -> _PrivateLinkInfo:
+def _get_private_link_validation_error(url: str) -> PrivateLinkValidationError:
     parsed = parse.urlparse(url)
     env = get_environment_for_hostname(parsed.hostname)
     return PrivateLinkValidationError(message=_private_link_info_map[env.cloud].error_message(),
diff --git a/databricks/sdk/logger/__init__.py b/databricks/sdk/logger/__init__.py
new file mode 100644
index 000000000..f843f05f6
--- /dev/null
+++ b/databricks/sdk/logger/__init__.py
@@ -0,0 +1 @@
+from .round_trip_logger import RoundTrip
diff --git a/databricks/sdk/logger/round_trip_logger.py b/databricks/sdk/logger/round_trip_logger.py
new file mode 100644
index 000000000..f1d177aaa
--- /dev/null
+++ b/databricks/sdk/logger/round_trip_logger.py
@@ -0,0 +1,118 @@
+import json
+import urllib.parse
+from typing import Dict, List
+
+import requests
+
+
+class RoundTrip:
+    """
+    A utility class for converting HTTP requests and responses to strings.
+
+    :param response: The response object to stringify.
+    :param debug_headers: Whether to include headers in the generated string.
+    :param debug_truncate_bytes: The maximum number of bytes to include in the generated string.
+    :param raw: Whether the response is a stream or not. If True, the response will not be logged directly.
+    """
+
+    def __init__(self,
+                 response: requests.Response,
+                 debug_headers: bool,
+                 debug_truncate_bytes: int,
+                 raw=False):
+        self._debug_headers = debug_headers
+        self._debug_truncate_bytes = max(debug_truncate_bytes, 96)
+        self._raw = raw
+        self._response = response
+
+    def generate(self) -> str:
+        """
+        Generate a string representation of the request and response. The string will include the request method, URL,
+        headers, and body, as well as the response status code, reason, headers, and body. Outgoing information
+        will be prefixed with `>`, and incoming information will be prefixed with `<`.
+        :return: A string representation of the request.
+        """
+        request = self._response.request
+        url = urllib.parse.urlparse(request.url)
+        query = ''
+        if url.query:
+            query = f'?{urllib.parse.unquote(url.query)}'
+        sb = [f'{request.method} {urllib.parse.unquote(url.path)}{query}']
+        if self._debug_headers:
+            for k, v in request.headers.items():
+                sb.append(f'> * {k}: {self._only_n_bytes(v, self._debug_truncate_bytes)}')
+        if request.body:
+            sb.append("> [raw stream]" if self._raw else self._redacted_dump("> ", request.body))
+        sb.append(f'< {self._response.status_code} {self._response.reason}')
+        if self._raw and self._response.headers.get('Content-Type', None) != 'application/json':
+            # Raw streams with `Transfer-Encoding: chunked` do not have `Content-Type` header
+            sb.append("< [raw stream]")
+        elif self._response.content:
+            sb.append(self._redacted_dump("< ", self._response.content.decode('utf-8')))
+        return '\n'.join(sb)
+
+    @staticmethod
+    def _mask(m: Dict[str, any]):
+        for k in m:
+            if k in {'bytes_value', 'string_value', 'token_value', 'value', 'content'}:
+                m[k] = "**REDACTED**"
+
+    @staticmethod
+    def _map_keys(m: Dict[str, any]) -> List[str]:
+        keys = list(m.keys())
+        keys.sort()
+        return keys
+
+    @staticmethod
+    def _only_n_bytes(j: str, num_bytes: int = 96) -> str:
+        diff = len(j.encode('utf-8')) - num_bytes
+        if diff > 0:
+            return f"{j[:num_bytes]}... ({diff} more bytes)"
+        return j
+
+    def _recursive_marshal_dict(self, m, budget) -> dict:
+        out = {}
+        self._mask(m)
+        for k in sorted(m.keys()):
+            raw = self._recursive_marshal(m[k], budget)
+            out[k] = raw
+            budget -= len(str(raw))
+        return out
+
+    def _recursive_marshal_list(self, s, budget) -> list:
+        out = []
+        for i in range(len(s)):
+            if i > 0 >= budget:
+                out.append("... (%d additional elements)" % (len(s) - len(out)))
+                break
+            raw = self._recursive_marshal(s[i], budget)
+            out.append(raw)
+            budget -= len(str(raw))
+        return out
+
+    def _recursive_marshal(self, v: any, budget: int) -> any:
+        if isinstance(v, dict):
+            return self._recursive_marshal_dict(v, budget)
+        elif isinstance(v, list):
+            return self._recursive_marshal_list(v, budget)
+        elif isinstance(v, str):
+            return self._only_n_bytes(v, self._debug_truncate_bytes)
+        else:
+            return v
+
+    def _redacted_dump(self, prefix: str, body: str) -> str:
+        if len(body) == 0:
+            return ""
+        try:
+            # Unmarshal body into primitive types.
+            tmp = json.loads(body)
+            max_bytes = 96
+            if self._debug_truncate_bytes > max_bytes:
+                max_bytes = self._debug_truncate_bytes
+            # Re-marshal body taking redaction and character limit into account.
+            raw = self._recursive_marshal(tmp, max_bytes)
+            return "\n".join([f'{prefix}{line}' for line in json.dumps(raw, indent=2).split("\n")])
+        except json.JSONDecodeError:
+            to_log = self._only_n_bytes(body, self._debug_truncate_bytes)
+            log_lines = [prefix + x.strip('\r') for x in to_log.split("\n")]
+            return '\n'.join(log_lines)
diff --git a/tests/test_errors.py b/tests/test_errors.py
index 87111c4b4..1dfcfaf26 100644
--- a/tests/test_errors.py
+++ b/tests/test_errors.py
@@ -1,97 +1,117 @@
+import http.client
+import json
+from typing import List, Optional, Tuple
+
 import pytest
 import requests
 
 from databricks.sdk import errors
 
 
-def fake_response(status_code: int) -> requests.Response:
+def fake_response(method: str,
+                  status_code: int,
+                  response_body: str,
+                  path: Optional[str] = None) -> requests.Response:
     resp = requests.Response()
     resp.status_code = status_code
-    resp.request = requests.Request('GET', 'https://databricks.com/api/2.0/service').prepare()
+    resp.reason = http.client.responses.get(status_code, '')
+    if path is None:
+        path = '/api/2.0/service'
+    resp.request = requests.Request(method, f"https://databricks.com{path}").prepare()
+    resp._content = response_body.encode('utf-8')
     return resp
 
 
-def test_error_code_has_precedence_over_http_status():
-    err = errors.error_mapper(fake_response(400), {
-        'error_code': 'INVALID_PARAMETER_VALUE',
-        'message': 'nope'
-    })
-    assert errors.InvalidParameterValue == type(err)
-
-
-def test_http_status_code_maps_fine():
-    err = errors.error_mapper(fake_response(400), {'error_code': 'MALFORMED_REQUEST', 'message': 'nope'})
-    assert errors.BadRequest == type(err)
-
-
-def test_other_errors_also_map_fine():
-    err = errors.error_mapper(fake_response(417), {'error_code': 'WHOOPS', 'message': 'nope'})
-    assert errors.DatabricksError == type(err)
-
+def fake_valid_response(method: str,
+                        status_code: int,
+                        error_code: str,
+                        message: str,
+                        path: Optional[str] = None) -> requests.Response:
+    body = {'message': message}
+    if error_code:
+        body['error_code'] = error_code
+    return fake_response(method, status_code, json.dumps(body), path)
 
-def test_missing_error_code():
-    err = errors.error_mapper(fake_response(522), {'message': 'nope'})
-    assert errors.DatabricksError == type(err)
 
-
-def test_private_link_error():
+def make_private_link_response() -> requests.Response:
     resp = requests.Response()
     resp.url = 'https://databricks.com/login.html?error=private-link-validation-error'
     resp.request = requests.Request('GET', 'https://databricks.com/api/2.0/service').prepare()
-    err = errors.error_mapper(resp, {})
-    assert errors.PrivateLinkValidationError == type(err)
-
-
-@pytest.mark.parametrize('status_code, error_code, klass',
-                         [(400, ..., errors.BadRequest), (400, 'INVALID_PARAMETER_VALUE', errors.BadRequest),
-                          (400, 'INVALID_PARAMETER_VALUE', errors.InvalidParameterValue),
-                          (400, 'REQUEST_LIMIT_EXCEEDED', errors.TooManyRequests), (400, ..., IOError),
-                          (401, ..., errors.Unauthenticated), (401, ..., IOError),
-                          (403, ..., errors.PermissionDenied),
-                          (403, ..., IOError), (404, ..., errors.NotFound), (404, ..., IOError),
-                          (409, ..., errors.ResourceConflict), (409, 'ABORTED', errors.Aborted),
-                          (409, 'ABORTED', errors.ResourceConflict),
-                          (409, 'ALREADY_EXISTS', errors.AlreadyExists),
-                          (409, 'ALREADY_EXISTS', errors.ResourceConflict), (409, ..., IOError),
-                          (429, ..., errors.TooManyRequests),
-                          (429, 'REQUEST_LIMIT_EXCEEDED', errors.TooManyRequests),
-                          (429, 'REQUEST_LIMIT_EXCEEDED', errors.RequestLimitExceeded),
-                          (429, 'RESOURCE_EXHAUSTED', errors.TooManyRequests),
-                          (429, 'RESOURCE_EXHAUSTED', errors.ResourceExhausted), (429, ..., IOError),
-                          (499, ..., errors.Cancelled), (499, ..., IOError), (500, ..., errors.InternalError),
-                          (500, 'UNKNOWN', errors.InternalError), (500, 'UNKNOWN', errors.Unknown),
-                          (500, 'DATA_LOSS', errors.InternalError), (500, 'DATA_LOSS', errors.DataLoss),
-                          (500, ..., IOError), (501, ..., errors.NotImplemented), (501, ..., IOError),
-                          (503, ..., errors.TemporarilyUnavailable), (503, ..., IOError),
-                          (504, ..., errors.DeadlineExceeded), (504, ..., IOError),
-                          (444, ..., errors.DatabricksError), (444, ..., IOError), ])
-def test_subclasses(status_code, error_code, klass):
-    try:
-        raise errors.error_mapper(fake_response(status_code), {'error_code': error_code, 'message': 'nope'})
-    except klass:
-        return
+    resp._content = b'{}'
+    resp.status_code = 200
+    return resp
 
 
-@pytest.mark.parametrize('verb, path, status_code, error_code, message, expected_error',
-                         [[
-                             'GET', '/api/2.0/clusters/get', 400, 'INVALID_PARAMETER_VALUE',
-                             'Cluster abcde does not exist', errors.ResourceDoesNotExist
-                         ],
-                          [
-                              'GET', '/api/2.0/jobs/get', 400, 'INVALID_PARAMETER_VALUE',
-                              'Job abcde does not exist', errors.ResourceDoesNotExist
-                          ],
-                          [
-                              'GET', '/api/2.1/jobs/get', 400, 'INVALID_PARAMETER_VALUE',
-                              'Job abcde does not exist', errors.ResourceDoesNotExist
-                          ],
-                          [
-                              'GET', '/api/2.1/jobs/get', 400, 'INVALID_PARAMETER_VALUE',
-                              'Invalid spark version', errors.InvalidParameterValue
-                          ], ])
-def test_error_overrides(verb, path, status_code, error_code, message, expected_error):
-    resp = requests.Response()
-    resp.status_code = status_code
-    resp.request = requests.Request(verb, f'https://databricks.com{path}').prepare()
-    with pytest.raises(expected_error):
-        raise errors.error_mapper(resp, {'error_code': error_code, 'message': message})
+# This should be `(int, str, type)` but doesn't work in Python 3.7-3.8.
+base_subclass_test_cases: List[Tuple[int, str,
+                                     type]] = [(400, '', errors.BadRequest),
+                                               (400, 'INVALID_PARAMETER_VALUE', errors.BadRequest),
+                                               (400, 'INVALID_PARAMETER_VALUE', errors.InvalidParameterValue),
+                                               (400, 'REQUEST_LIMIT_EXCEEDED', errors.TooManyRequests),
+                                               (400, '', IOError), (401, '', errors.Unauthenticated),
+                                               (401, '', IOError), (403, '', errors.PermissionDenied),
+                                               (403, '', IOError), (404, '', errors.NotFound),
+                                               (404, '', IOError), (409, '', errors.ResourceConflict),
+                                               (409, 'ABORTED', errors.Aborted),
+                                               (409, 'ABORTED', errors.ResourceConflict),
+                                               (409, 'ALREADY_EXISTS', errors.AlreadyExists),
+                                               (409, 'ALREADY_EXISTS', errors.ResourceConflict),
+                                               (409, '', IOError), (429, '', errors.TooManyRequests),
+                                               (429, 'REQUEST_LIMIT_EXCEEDED', errors.TooManyRequests),
+                                               (429, 'REQUEST_LIMIT_EXCEEDED', errors.RequestLimitExceeded),
+                                               (429, 'RESOURCE_EXHAUSTED', errors.TooManyRequests),
+                                               (429, 'RESOURCE_EXHAUSTED', errors.ResourceExhausted),
+                                               (429, '', IOError), (499, '', errors.Cancelled),
+                                               (499, '', IOError), (500, '', errors.InternalError),
+                                               (500, 'UNKNOWN', errors.InternalError),
+                                               (500, 'UNKNOWN', errors.Unknown),
+                                               (500, 'DATA_LOSS', errors.InternalError),
+                                               (500, 'DATA_LOSS', errors.DataLoss), (500, '', IOError),
+                                               (501, '', errors.NotImplemented), (501, '', IOError),
+                                               (503, '', errors.TemporarilyUnavailable), (503, '', IOError),
+                                               (504, '', errors.DeadlineExceeded), (504, '', IOError),
+                                               (444, '', errors.DatabricksError), (444, '', IOError), ]
+
+subclass_test_cases = [(fake_valid_response('GET', x[0], x[1], 'nope'), x[2], 'nope')
+                       for x in base_subclass_test_cases]
+
+
+@pytest.mark.parametrize(
+    'response, expected_error, expected_message', subclass_test_cases +
+    [(fake_response('GET', 400, ''), errors.BadRequest, 'Bad Request'),
+     (fake_valid_response('GET', 417, 'WHOOPS', 'nope'), errors.DatabricksError, 'nope'),
+     (fake_valid_response('GET', 522, '', 'nope'), errors.DatabricksError, 'nope'),
+     (make_private_link_response(), errors.PrivateLinkValidationError,
+      ('The requested workspace has AWS PrivateLink enabled and is not accessible from the current network. '
+       'Ensure that AWS PrivateLink is properly configured and that your device has access to the AWS VPC '
+       'endpoint. For more information, see '
+       'https://docs.databricks.com/en/security/network/classic/privatelink.html.'),
+      ),
+     (fake_valid_response(
+         'GET', 400, 'INVALID_PARAMETER_VALUE', 'Cluster abcde does not exist',
+         '/api/2.0/clusters/get'), errors.ResourceDoesNotExist, 'Cluster abcde does not exist'),
+     (fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Job abcde does not exist',
+                          '/api/2.0/jobs/get'), errors.ResourceDoesNotExist, 'Job abcde does not exist'),
+     (fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Job abcde does not exist',
+                          '/api/2.1/jobs/get'), errors.ResourceDoesNotExist, 'Job abcde does not exist'),
+     (fake_valid_response('GET', 400, 'INVALID_PARAMETER_VALUE', 'Invalid spark version',
+                          '/api/2.1/jobs/get'), errors.InvalidParameterValue, 'Invalid spark version'),
+     (fake_response(
+         'GET', 400,
+         'MALFORMED_REQUEST: vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list'
+     ), errors.BadRequest,
+      'vpc_endpoints malformed parameters: VPC Endpoint ... with use_case ... cannot be attached in ... list'
+      ),
+     (fake_response('GET', 400, '
Worker environment not ready
'), errors.BadRequest, + 'Worker environment not ready'), + (fake_response('GET', 400, 'this is not a real response'), errors.BadRequest, + ('unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. ' + 'Please report this issue with the following debugging information to the SDK issue tracker at ' + 'https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n' + '< 400 Bad Request\n' + '< this is not a real response```')), ]) +def test_get_api_error(response, expected_error, expected_message): + with pytest.raises(errors.DatabricksError) as e: + raise errors.get_api_error(response) + assert isinstance(e.value, expected_error) + assert str(e.value) == expected_message From de939a8cf7eca09be15ca235b2f3239c34e8be81 Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Mon, 2 Sep 2024 05:17:56 -0400 Subject: [PATCH 032/136] [Internal] Fix test_iam::test_scim_error_unmarshall integration test (#743) ## Changes The error message returned by the SCIM API when using an invalid filter changed, breaking our integration test. This PR updates the test to check for the type of error returned, which should be robust to error message changes. The new error message, `InvalidFilter request failed`, is worse in my opinion, so I will raise this with the identity team. ## Tests - [ ] `make test` run locally - [ ] `make fmt` applied - [ ] relevant integration tests applied --- tests/integration/test_iam.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/integration/test_iam.py b/tests/integration/test_iam.py index f5120f546..f8d7c3b1f 100644 --- a/tests/integration/test_iam.py +++ b/tests/integration/test_iam.py @@ -1,5 +1,6 @@ import pytest +from databricks.sdk import errors from databricks.sdk.core import DatabricksError @@ -13,7 +14,7 @@ def test_scim_error_unmarshall(w, random): with pytest.raises(DatabricksError) as exc_info: groups = w.groups.list(filter=random(12)) next(groups) - assert 'Given filter operator is not supported' in str(exc_info.value) + assert isinstance(exc_info.value, errors.BadRequest) def test_scim_get_user_as_dict(w): From c532de65b9ab5dd64e9229e4969c211ee19d506b Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Wed, 4 Sep 2024 09:31:11 -0400 Subject: [PATCH 033/136] [Release] Release v0.32.0 (#747) ### Bug Fixes * Handle non-JSON errors gracefully ([#741](https://github.com/databricks/databricks-sdk-py/pull/741)). ### Documentation * Add Data Plane access documentation ([#732](https://github.com/databricks/databricks-sdk-py/pull/732)). ### Internal Changes * Fix test_iam::test_scim_error_unmarshall integration test ([#743](https://github.com/databricks/databricks-sdk-py/pull/743)). ### API Changes: * Added `regenerate_dashboard()` method for [w.quality_monitors](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/quality_monitors.html) workspace-level service. * Added `databricks.sdk.service.catalog.RegenerateDashboardRequest` and `databricks.sdk.service.catalog.RegenerateDashboardResponse` dataclasses. * Added `databricks.sdk.service.jobs.QueueDetails`, `databricks.sdk.service.jobs.QueueDetailsCodeCode`, `databricks.sdk.service.jobs.RunLifecycleStateV2State`, `databricks.sdk.service.jobs.RunStatus`, `databricks.sdk.service.jobs.TerminationCodeCode`, `databricks.sdk.service.jobs.TerminationDetails` and `databricks.sdk.service.jobs.TerminationTypeType` dataclasses. * Added `status` field for `databricks.sdk.service.jobs.BaseRun`. * Added `status` field for `databricks.sdk.service.jobs.RepairHistoryItem`. * Added `status` field for `databricks.sdk.service.jobs.Run`. * Added `status` field for `databricks.sdk.service.jobs.RunTask`. * Added `max_provisioned_throughput` and `min_provisioned_throughput` fields for `databricks.sdk.service.serving.ServedModelInput`. * Added `columns_to_sync` field for `databricks.sdk.service.vectorsearch.DeltaSyncVectorIndexSpecRequest`. * Changed `workload_size` field for `databricks.sdk.service.serving.ServedModelInput` to no longer be required. OpenAPI SHA: d05898328669a3f8ab0c2ecee37db2673d3ea3f7, Date: 2024-09-04 --- .codegen/_openapi_sha | 2 +- .gitattributes | 1 + CHANGELOG.md | 32 +++ databricks/sdk/service/catalog.py | 89 ++++++- databricks/sdk/service/iam.py | 16 +- databricks/sdk/service/jobs.py | 250 +++++++++++++++++- databricks/sdk/service/ml.py | 10 +- databricks/sdk/service/serving.py | 28 +- databricks/sdk/service/vectorsearch.py | 9 +- databricks/sdk/version.py | 2 +- docs/account/billing/billable_usage.rst | 2 +- docs/account/billing/budgets.rst | 8 +- docs/account/provisioning/workspaces.rst | 50 +--- docs/dbdataclasses/catalog.rst | 8 + docs/dbdataclasses/jobs.rst | 132 +++++++++ docs/workspace/catalog/metastores.rst | 6 +- docs/workspace/catalog/quality_monitors.rst | 23 ++ docs/workspace/iam/permissions.rst | 12 +- docs/workspace/ml/experiments.rst | 10 +- .../billable_usage/download_usage_download.py | 2 +- examples/account/budgets/update_budgets.py | 8 +- examples/account/io/read_usage_download.py | 2 +- examples/account/waiter/get_workspaces.py | 5 + .../account/workspaces/create_workspaces.py | 10 +- examples/account/workspaces/get_workspaces.py | 23 +- .../account/workspaces/update_workspaces.py | 17 +- 26 files changed, 619 insertions(+), 138 deletions(-) create mode 100755 examples/account/waiter/get_workspaces.py diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha index 8b01a2422..4ceeab3d3 100644 --- a/.codegen/_openapi_sha +++ b/.codegen/_openapi_sha @@ -1 +1 @@ -3eae49b444cac5a0118a3503e5b7ecef7f96527a \ No newline at end of file +d05898328669a3f8ab0c2ecee37db2673d3ea3f7 \ No newline at end of file diff --git a/.gitattributes b/.gitattributes index 22e000b1b..c862f312c 100755 --- a/.gitattributes +++ b/.gitattributes @@ -63,6 +63,7 @@ examples/account/users/patch_account_users.py linguist-generated=true examples/account/vpc_endpoints/create_vpc_endpoints.py linguist-generated=true examples/account/vpc_endpoints/get_vpc_endpoints.py linguist-generated=true examples/account/vpc_endpoints/list_vpc_endpoints.py linguist-generated=true +examples/account/waiter/get_workspaces.py linguist-generated=true examples/account/workspace_assignment/list_workspace_assignment_on_aws.py linguist-generated=true examples/account/workspace_assignment/update_workspace_assignment_on_aws.py linguist-generated=true examples/account/workspaces/create_workspaces.py linguist-generated=true diff --git a/CHANGELOG.md b/CHANGELOG.md index 3442369c7..62b0985d6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,37 @@ # Version changelog +## [Release] Release v0.32.0 + +### Bug Fixes + + * Handle non-JSON errors gracefully ([#741](https://github.com/databricks/databricks-sdk-py/pull/741)). + + +### Documentation + + * Add Data Plane access documentation ([#732](https://github.com/databricks/databricks-sdk-py/pull/732)). + + +### Internal Changes + + * Fix test_iam::test_scim_error_unmarshall integration test ([#743](https://github.com/databricks/databricks-sdk-py/pull/743)). + + +### API Changes: + + * Added `regenerate_dashboard()` method for [w.quality_monitors](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/quality_monitors.html) workspace-level service. + * Added `databricks.sdk.service.catalog.RegenerateDashboardRequest` and `databricks.sdk.service.catalog.RegenerateDashboardResponse` dataclasses. + * Added `databricks.sdk.service.jobs.QueueDetails`, `databricks.sdk.service.jobs.QueueDetailsCodeCode`, `databricks.sdk.service.jobs.RunLifecycleStateV2State`, `databricks.sdk.service.jobs.RunStatus`, `databricks.sdk.service.jobs.TerminationCodeCode`, `databricks.sdk.service.jobs.TerminationDetails` and `databricks.sdk.service.jobs.TerminationTypeType` dataclasses. + * Added `status` field for `databricks.sdk.service.jobs.BaseRun`. + * Added `status` field for `databricks.sdk.service.jobs.RepairHistoryItem`. + * Added `status` field for `databricks.sdk.service.jobs.Run`. + * Added `status` field for `databricks.sdk.service.jobs.RunTask`. + * Added `max_provisioned_throughput` and `min_provisioned_throughput` fields for `databricks.sdk.service.serving.ServedModelInput`. + * Added `columns_to_sync` field for `databricks.sdk.service.vectorsearch.DeltaSyncVectorIndexSpecRequest`. + * Changed `workload_size` field for `databricks.sdk.service.serving.ServedModelInput` to no longer be required. + +OpenAPI SHA: d05898328669a3f8ab0c2ecee37db2673d3ea3f7, Date: 2024-09-04 + ## [Release] Release v0.31.1 ### Bug Fixes diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py index 5c3702daf..b372bc9b2 100755 --- a/databricks/sdk/service/catalog.py +++ b/databricks/sdk/service/catalog.py @@ -1268,7 +1268,8 @@ class CreateMetastoreAssignment: """The unique ID of the metastore.""" default_catalog_name: str - """The name of the default catalog in the metastore.""" + """The name of the default catalog in the metastore. This field is depracted. Please use "Default + Namespace API" to configure the default catalog for a Databricks workspace.""" workspace_id: Optional[int] = None """A workspace ID.""" @@ -4150,6 +4151,49 @@ def from_dict(cls, d: Dict[str, any]) -> QuotaInfo: quota_name=d.get('quota_name', None)) +@dataclass +class RegenerateDashboardRequest: + table_name: Optional[str] = None + """Full name of the table.""" + + warehouse_id: Optional[str] = None + """Optional argument to specify the warehouse for dashboard regeneration. If not specified, the + first running warehouse will be used.""" + + def as_dict(self) -> dict: + """Serializes the RegenerateDashboardRequest into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.table_name is not None: body['table_name'] = self.table_name + if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> RegenerateDashboardRequest: + """Deserializes the RegenerateDashboardRequest from a dictionary.""" + return cls(table_name=d.get('table_name', None), warehouse_id=d.get('warehouse_id', None)) + + +@dataclass +class RegenerateDashboardResponse: + dashboard_id: Optional[str] = None + """Id of the regenerated monitoring dashboard.""" + + parent_folder: Optional[str] = None + """The directory where the regenerated dashboard is stored.""" + + def as_dict(self) -> dict: + """Serializes the RegenerateDashboardResponse into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id + if self.parent_folder is not None: body['parent_folder'] = self.parent_folder + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> RegenerateDashboardResponse: + """Deserializes the RegenerateDashboardResponse from a dictionary.""" + return cls(dashboard_id=d.get('dashboard_id', None), parent_folder=d.get('parent_folder', None)) + + @dataclass class RegisteredModelAlias: """Registered model alias.""" @@ -5220,7 +5264,8 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateMetastore: @dataclass class UpdateMetastoreAssignment: default_catalog_name: Optional[str] = None - """The name of the default catalog for the metastore.""" + """The name of the default catalog in the metastore. This field is depracted. Please use "Default + Namespace API" to configure the default catalog for a Databricks workspace.""" metastore_id: Optional[str] = None """The unique ID of the metastore.""" @@ -7208,7 +7253,8 @@ def assign(self, workspace_id: int, metastore_id: str, default_catalog_name: str :param metastore_id: str The unique ID of the metastore. :param default_catalog_name: str - The name of the default catalog in the metastore. + The name of the default catalog in the metastore. This field is depracted. Please use "Default + Namespace API" to configure the default catalog for a Databricks workspace. """ @@ -7421,7 +7467,8 @@ def update_assignment(self, :param workspace_id: int A workspace ID. :param default_catalog_name: str (optional) - The name of the default catalog for the metastore. + The name of the default catalog in the metastore. This field is depracted. Please use "Default + Namespace API" to configure the default catalog for a Databricks workspace. :param metastore_id: str (optional) The unique ID of the metastore. @@ -7916,6 +7963,40 @@ def list_refreshes(self, table_name: str) -> MonitorRefreshListResponse: headers=headers) return MonitorRefreshListResponse.from_dict(res) + def regenerate_dashboard(self, + table_name: str, + *, + warehouse_id: Optional[str] = None) -> RegenerateDashboardResponse: + """Regenerate a monitoring dashboard. + + Regenerates the monitoring dashboard for the specified table. + + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the + table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: + - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an + owner of the table + + The call must be made from the workspace where the monitor was created. The dashboard will be + regenerated in the assets directory that was specified when the monitor was created. + + :param table_name: str + Full name of the table. + :param warehouse_id: str (optional) + Optional argument to specify the warehouse for dashboard regeneration. If not specified, the first + running warehouse will be used. + + :returns: :class:`RegenerateDashboardResponse` + """ + body = {} + if warehouse_id is not None: body['warehouse_id'] = warehouse_id + headers = {'Accept': 'application/json', 'Content-Type': 'application/json', } + + res = self._api.do('POST', + f'/api/2.1/quality-monitoring/tables/{table_name}/monitor/dashboard', + body=body, + headers=headers) + return RegenerateDashboardResponse.from_dict(res) + def run_refresh(self, table_name: str) -> MonitorRefreshInfo: """Queue a metric refresh for a monitor. diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py index 2b027fe6e..f1c56a1a9 100755 --- a/databricks/sdk/service/iam.py +++ b/databricks/sdk/service/iam.py @@ -892,8 +892,8 @@ class PermissionsRequest: request_object_type: Optional[str] = None """The type of the request object. Can be one of the following: alerts, authorization, clusters, - cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, - notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.""" + cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, + jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.""" def as_dict(self) -> dict: """Serializes the PermissionsRequest into a dictionary suitable for use as a JSON request body.""" @@ -2600,8 +2600,8 @@ def get(self, request_object_type: str, request_object_id: str) -> ObjectPermiss :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, - cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, - notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. + cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, + jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. @@ -2648,8 +2648,8 @@ def set(self, :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, - cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, - notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. + cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, + jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) @@ -2679,8 +2679,8 @@ def update(self, :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, - cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, - notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. + cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, + jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py index ea1bfd880..da6cd586c 100755 --- a/databricks/sdk/service/jobs.py +++ b/databricks/sdk/service/jobs.py @@ -171,7 +171,10 @@ class BaseRun: scheduled to run on a new cluster, this is the time the cluster creation call is issued.""" state: Optional[RunState] = None - """The current state of the run.""" + """Deprecated. Please use the `status` field instead.""" + + status: Optional[RunStatus] = None + """The current status of the run""" tasks: Optional[List[RunTask]] = None """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call @@ -222,6 +225,7 @@ def as_dict(self) -> dict: if self.setup_duration is not None: body['setup_duration'] = self.setup_duration if self.start_time is not None: body['start_time'] = self.start_time if self.state: body['state'] = self.state.as_dict() + if self.status: body['status'] = self.status.as_dict() if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks] if self.trigger is not None: body['trigger'] = self.trigger.value if self.trigger_info: body['trigger_info'] = self.trigger_info.as_dict() @@ -257,6 +261,7 @@ def from_dict(cls, d: Dict[str, any]) -> BaseRun: setup_duration=d.get('setup_duration', None), start_time=d.get('start_time', None), state=_from_dict(d, 'state', RunState), + status=_from_dict(d, 'status', RunStatus), tasks=_repeated_dict(d, 'tasks', RunTask), trigger=_enum(d, 'trigger', TriggerType), trigger_info=_from_dict(d, 'trigger_info', TriggerInfo)) @@ -2314,6 +2319,44 @@ def from_dict(cls, d: Dict[str, any]) -> PythonWheelTask: parameters=d.get('parameters', None)) +@dataclass +class QueueDetails: + code: Optional[QueueDetailsCodeCode] = None + """The reason for queuing the run. * `ACTIVE_RUNS_LIMIT_REACHED`: The run was queued due to + reaching the workspace limit of active task runs. * `MAX_CONCURRENT_RUNS_REACHED`: The run was + queued due to reaching the per-job limit of concurrent job runs. * + `ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED`: The run was queued due to reaching the workspace limit of + active run job tasks.""" + + message: Optional[str] = None + """A descriptive message with the queuing details. This field is unstructured, and its exact format + is subject to change.""" + + def as_dict(self) -> dict: + """Serializes the QueueDetails into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.code is not None: body['code'] = self.code.value + if self.message is not None: body['message'] = self.message + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> QueueDetails: + """Deserializes the QueueDetails from a dictionary.""" + return cls(code=_enum(d, 'code', QueueDetailsCodeCode), message=d.get('message', None)) + + +class QueueDetailsCodeCode(Enum): + """The reason for queuing the run. * `ACTIVE_RUNS_LIMIT_REACHED`: The run was queued due to + reaching the workspace limit of active task runs. * `MAX_CONCURRENT_RUNS_REACHED`: The run was + queued due to reaching the per-job limit of concurrent job runs. * + `ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED`: The run was queued due to reaching the workspace limit of + active run job tasks.""" + + ACTIVE_RUNS_LIMIT_REACHED = 'ACTIVE_RUNS_LIMIT_REACHED' + ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED = 'ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED' + MAX_CONCURRENT_RUNS_REACHED = 'MAX_CONCURRENT_RUNS_REACHED' + + @dataclass class QueueSettings: enabled: bool @@ -2343,7 +2386,10 @@ class RepairHistoryItem: """The start time of the (repaired) run.""" state: Optional[RunState] = None - """The current state of the run.""" + """Deprecated. Please use the `status` field instead.""" + + status: Optional[RunStatus] = None + """The current status of the run""" task_run_ids: Optional[List[int]] = None """The run IDs of the task runs that ran as part of this repair history item.""" @@ -2358,6 +2404,7 @@ def as_dict(self) -> dict: if self.id is not None: body['id'] = self.id if self.start_time is not None: body['start_time'] = self.start_time if self.state: body['state'] = self.state.as_dict() + if self.status: body['status'] = self.status.as_dict() if self.task_run_ids: body['task_run_ids'] = [v for v in self.task_run_ids] if self.type is not None: body['type'] = self.type.value return body @@ -2369,6 +2416,7 @@ def from_dict(cls, d: Dict[str, any]) -> RepairHistoryItem: id=d.get('id', None), start_time=d.get('start_time', None), state=_from_dict(d, 'state', RunState), + status=_from_dict(d, 'status', RunStatus), task_run_ids=d.get('task_run_ids', None), type=_enum(d, 'type', RepairHistoryItemType)) @@ -2873,7 +2921,10 @@ class Run: scheduled to run on a new cluster, this is the time the cluster creation call is issued.""" state: Optional[RunState] = None - """The current state of the run.""" + """Deprecated. Please use the `status` field instead.""" + + status: Optional[RunStatus] = None + """The current status of the run""" tasks: Optional[List[RunTask]] = None """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call @@ -2927,6 +2978,7 @@ def as_dict(self) -> dict: if self.setup_duration is not None: body['setup_duration'] = self.setup_duration if self.start_time is not None: body['start_time'] = self.start_time if self.state: body['state'] = self.state.as_dict() + if self.status: body['status'] = self.status.as_dict() if self.tasks: body['tasks'] = [v.as_dict() for v in self.tasks] if self.trigger is not None: body['trigger'] = self.trigger.value if self.trigger_info: body['trigger_info'] = self.trigger_info.as_dict() @@ -2965,6 +3017,7 @@ def from_dict(cls, d: Dict[str, any]) -> Run: setup_duration=d.get('setup_duration', None), start_time=d.get('start_time', None), state=_from_dict(d, 'state', RunState), + status=_from_dict(d, 'status', RunStatus), tasks=_repeated_dict(d, 'tasks', RunTask), trigger=_enum(d, 'trigger', TriggerType), trigger_info=_from_dict(d, 'trigger_info', TriggerInfo)) @@ -3216,6 +3269,17 @@ class RunLifeCycleState(Enum): WAITING_FOR_RETRY = 'WAITING_FOR_RETRY' +class RunLifecycleStateV2State(Enum): + """The current state of the run.""" + + BLOCKED = 'BLOCKED' + PENDING = 'PENDING' + QUEUED = 'QUEUED' + RUNNING = 'RUNNING' + TERMINATED = 'TERMINATED' + TERMINATING = 'TERMINATING' + + @dataclass class RunNow: job_id: int @@ -3609,6 +3673,36 @@ def from_dict(cls, d: Dict[str, any]) -> RunState: user_cancelled_or_timedout=d.get('user_cancelled_or_timedout', None)) +@dataclass +class RunStatus: + """The current status of the run""" + + queue_details: Optional[QueueDetails] = None + """If the run was queued, details about the reason for queuing the run.""" + + state: Optional[RunLifecycleStateV2State] = None + """The current state of the run.""" + + termination_details: Optional[TerminationDetails] = None + """If the run is in a TERMINATING or TERMINATED state, details about the reason for terminating the + run.""" + + def as_dict(self) -> dict: + """Serializes the RunStatus into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.queue_details: body['queue_details'] = self.queue_details.as_dict() + if self.state is not None: body['state'] = self.state.value + if self.termination_details: body['termination_details'] = self.termination_details.as_dict() + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> RunStatus: + """Deserializes the RunStatus from a dictionary.""" + return cls(queue_details=_from_dict(d, 'queue_details', QueueDetails), + state=_enum(d, 'state', RunLifecycleStateV2State), + termination_details=_from_dict(d, 'termination_details', TerminationDetails)) + + @dataclass class RunTask: """Used when outputting a child run, in GetRun or ListRuns.""" @@ -3773,7 +3867,10 @@ class RunTask: scheduled to run on a new cluster, this is the time the cluster creation call is issued.""" state: Optional[RunState] = None - """The current state of the run.""" + """Deprecated. Please use the `status` field instead.""" + + status: Optional[RunStatus] = None + """The current status of the run""" timeout_seconds: Optional[int] = None """An optional timeout applied to each run of this job task. A value of `0` means no timeout.""" @@ -3821,6 +3918,7 @@ def as_dict(self) -> dict: if self.sql_task: body['sql_task'] = self.sql_task.as_dict() if self.start_time is not None: body['start_time'] = self.start_time if self.state: body['state'] = self.state.as_dict() + if self.status: body['status'] = self.status.as_dict() if self.task_key is not None: body['task_key'] = self.task_key if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict() @@ -3864,6 +3962,7 @@ def from_dict(cls, d: Dict[str, any]) -> RunTask: sql_task=_from_dict(d, 'sql_task', SqlTask), start_time=d.get('start_time', None), state=_from_dict(d, 'state', RunState), + status=_from_dict(d, 'status', RunStatus), task_key=d.get('task_key', None), timeout_seconds=d.get('timeout_seconds', None), webhook_notifications=_from_dict(d, 'webhook_notifications', WebhookNotifications)) @@ -5027,6 +5126,149 @@ def from_dict(cls, d: Dict[str, any]) -> TaskNotificationSettings: no_alert_for_skipped_runs=d.get('no_alert_for_skipped_runs', None)) +class TerminationCodeCode(Enum): + """The code indicates why the run was terminated. Additional codes might be introduced in future + releases. * `SUCCESS`: The run was completed successfully. * `CANCELED`: The run was canceled + during execution by the Databricks platform; for example, if the maximum run duration was + exceeded. * `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the + dependency type condition was not met, or there were no material tasks to execute. * + `INTERNAL_ERROR`: The run encountered an unexpected error. Refer to the state message for + further details. * `DRIVER_ERROR`: The run encountered an error while communicating with the + Spark Driver. * `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state + message for further details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due + to an error when communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The + run failed because it issued an invalid request to start the cluster. * + `WORKSPACE_RUN_LIMIT_EXCEEDED`: The workspace has reached the quota for the maximum number of + concurrent active runs. Consider scheduling the runs over a larger time frame. * + `FEATURE_DISABLED`: The run failed because it tried to access a feature unavailable for the + workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The number of cluster creation, start, and upsize + requests have exceeded the allotted rate limit. Consider spreading the run execution over a + larger time frame. * `STORAGE_ACCESS_ERROR`: The run failed due to an error when accessing the + customer blob storage. Refer to the state message for further details. * `RUN_EXECUTION_ERROR`: + The run was completed with task failures. For more details, refer to the state message or run + output. * `UNAUTHORIZED_ERROR`: The run failed due to a permission issue while accessing a + resource. Refer to the state message for further details. * `LIBRARY_INSTALLATION_ERROR`: The + run failed while installing the user-requested library. Refer to the state message for further + details. The causes might include, but are not limited to: The provided library is invalid, + there are insufficient permissions to install the library, and so forth. * + `MAX_CONCURRENT_RUNS_EXCEEDED`: The scheduled run exceeds the limit of maximum concurrent runs + set for the job. * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a cluster that has + already reached the maximum number of contexts it is configured to create. See: [Link]. * + `RESOURCE_NOT_FOUND`: A resource necessary for run execution does not exist. Refer to the state + message for further details. * `INVALID_RUN_CONFIGURATION`: The run failed due to an invalid + configuration. Refer to the state message for further details. * `CLOUD_FAILURE`: The run failed + due to a cloud provider issue. Refer to the state message for further details. * + `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size + limit. + + [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now""" + + CANCELED = 'CANCELED' + CLOUD_FAILURE = 'CLOUD_FAILURE' + CLUSTER_ERROR = 'CLUSTER_ERROR' + CLUSTER_REQUEST_LIMIT_EXCEEDED = 'CLUSTER_REQUEST_LIMIT_EXCEEDED' + DRIVER_ERROR = 'DRIVER_ERROR' + FEATURE_DISABLED = 'FEATURE_DISABLED' + INTERNAL_ERROR = 'INTERNAL_ERROR' + INVALID_CLUSTER_REQUEST = 'INVALID_CLUSTER_REQUEST' + INVALID_RUN_CONFIGURATION = 'INVALID_RUN_CONFIGURATION' + LIBRARY_INSTALLATION_ERROR = 'LIBRARY_INSTALLATION_ERROR' + MAX_CONCURRENT_RUNS_EXCEEDED = 'MAX_CONCURRENT_RUNS_EXCEEDED' + MAX_JOB_QUEUE_SIZE_EXCEEDED = 'MAX_JOB_QUEUE_SIZE_EXCEEDED' + MAX_SPARK_CONTEXTS_EXCEEDED = 'MAX_SPARK_CONTEXTS_EXCEEDED' + REPOSITORY_CHECKOUT_FAILED = 'REPOSITORY_CHECKOUT_FAILED' + RESOURCE_NOT_FOUND = 'RESOURCE_NOT_FOUND' + RUN_EXECUTION_ERROR = 'RUN_EXECUTION_ERROR' + SKIPPED = 'SKIPPED' + STORAGE_ACCESS_ERROR = 'STORAGE_ACCESS_ERROR' + SUCCESS = 'SUCCESS' + UNAUTHORIZED_ERROR = 'UNAUTHORIZED_ERROR' + WORKSPACE_RUN_LIMIT_EXCEEDED = 'WORKSPACE_RUN_LIMIT_EXCEEDED' + + +@dataclass +class TerminationDetails: + code: Optional[TerminationCodeCode] = None + """The code indicates why the run was terminated. Additional codes might be introduced in future + releases. * `SUCCESS`: The run was completed successfully. * `CANCELED`: The run was canceled + during execution by the Databricks platform; for example, if the maximum run duration was + exceeded. * `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the + dependency type condition was not met, or there were no material tasks to execute. * + `INTERNAL_ERROR`: The run encountered an unexpected error. Refer to the state message for + further details. * `DRIVER_ERROR`: The run encountered an error while communicating with the + Spark Driver. * `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state + message for further details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due + to an error when communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The + run failed because it issued an invalid request to start the cluster. * + `WORKSPACE_RUN_LIMIT_EXCEEDED`: The workspace has reached the quota for the maximum number of + concurrent active runs. Consider scheduling the runs over a larger time frame. * + `FEATURE_DISABLED`: The run failed because it tried to access a feature unavailable for the + workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The number of cluster creation, start, and upsize + requests have exceeded the allotted rate limit. Consider spreading the run execution over a + larger time frame. * `STORAGE_ACCESS_ERROR`: The run failed due to an error when accessing the + customer blob storage. Refer to the state message for further details. * `RUN_EXECUTION_ERROR`: + The run was completed with task failures. For more details, refer to the state message or run + output. * `UNAUTHORIZED_ERROR`: The run failed due to a permission issue while accessing a + resource. Refer to the state message for further details. * `LIBRARY_INSTALLATION_ERROR`: The + run failed while installing the user-requested library. Refer to the state message for further + details. The causes might include, but are not limited to: The provided library is invalid, + there are insufficient permissions to install the library, and so forth. * + `MAX_CONCURRENT_RUNS_EXCEEDED`: The scheduled run exceeds the limit of maximum concurrent runs + set for the job. * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a cluster that has + already reached the maximum number of contexts it is configured to create. See: [Link]. * + `RESOURCE_NOT_FOUND`: A resource necessary for run execution does not exist. Refer to the state + message for further details. * `INVALID_RUN_CONFIGURATION`: The run failed due to an invalid + configuration. Refer to the state message for further details. * `CLOUD_FAILURE`: The run failed + due to a cloud provider issue. Refer to the state message for further details. * + `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size + limit. + + [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now""" + + message: Optional[str] = None + """A descriptive message with the termination details. This field is unstructured and the format + might change.""" + + type: Optional[TerminationTypeType] = None + """* `SUCCESS`: The run terminated without any issues * `INTERNAL_ERROR`: An error occurred in the + Databricks platform. Please look at the [status page] or contact support if the issue persists. + * `CLIENT_ERROR`: The run was terminated because of an error caused by user input or the job + configuration. * `CLOUD_FAILURE`: The run was terminated because of an issue with your cloud + provider. + + [status page]: https://status.databricks.com/""" + + def as_dict(self) -> dict: + """Serializes the TerminationDetails into a dictionary suitable for use as a JSON request body.""" + body = {} + if self.code is not None: body['code'] = self.code.value + if self.message is not None: body['message'] = self.message + if self.type is not None: body['type'] = self.type.value + return body + + @classmethod + def from_dict(cls, d: Dict[str, any]) -> TerminationDetails: + """Deserializes the TerminationDetails from a dictionary.""" + return cls(code=_enum(d, 'code', TerminationCodeCode), + message=d.get('message', None), + type=_enum(d, 'type', TerminationTypeType)) + + +class TerminationTypeType(Enum): + """* `SUCCESS`: The run terminated without any issues * `INTERNAL_ERROR`: An error occurred in the + Databricks platform. Please look at the [status page] or contact support if the issue persists. + * `CLIENT_ERROR`: The run was terminated because of an error caused by user input or the job + configuration. * `CLOUD_FAILURE`: The run was terminated because of an issue with your cloud + provider. + + [status page]: https://status.databricks.com/""" + + CLIENT_ERROR = 'CLIENT_ERROR' + CLOUD_FAILURE = 'CLOUD_FAILURE' + INTERNAL_ERROR = 'INTERNAL_ERROR' + SUCCESS = 'SUCCESS' + + @dataclass class TriggerInfo: """Additional details about what triggered the run""" diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py index 4d79ef72c..b2cec8126 100755 --- a/databricks/sdk/service/ml.py +++ b/databricks/sdk/service/ml.py @@ -4143,10 +4143,16 @@ def list_artifacts(self, """Get all artifacts. List artifacts for a run. Takes an optional `artifact_path` prefix. If it is specified, the response - contains only artifacts with the specified prefix.", + contains only artifacts with the specified prefix. This API does not support pagination when listing + artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call + `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports + pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents). :param page_token: str (optional) - Token indicating the page of artifact results to fetch + Token indicating the page of artifact results to fetch. `page_token` is not supported when listing + artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call + `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports + pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents). :param path: str (optional) Filter artifacts matching this path (a relative path from the root artifact directory). :param run_id: str (optional) diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py index 97306b075..e41f34a63 100755 --- a/databricks/sdk/service/serving.py +++ b/databricks/sdk/service/serving.py @@ -1629,14 +1629,6 @@ class ServedModelInput: model_version: str """The version of the model in Databricks Model Registry or Unity Catalog to be served.""" - workload_size: ServedModelInputWorkloadSize - """The workload size of the served model. The workload size corresponds to a range of provisioned - concurrency that the compute will autoscale between. A single unit of provisioned concurrency - can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned - concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned - concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for - each workload size will be 0.""" - scale_to_zero_enabled: bool """Whether the compute resources for the served model should scale down to zero.""" @@ -1649,11 +1641,25 @@ class ServedModelInput: instance_profile_arn: Optional[str] = None """ARN of the instance profile that the served model will use to access AWS resources.""" + max_provisioned_throughput: Optional[int] = None + """The maximum tokens per second that the endpoint can scale up to.""" + + min_provisioned_throughput: Optional[int] = None + """The minimum tokens per second that the endpoint can scale down to.""" + name: Optional[str] = None """The name of a served model. It must be unique across an endpoint. If not specified, this field will default to -. A served model name can consist of alphanumeric characters, dashes, and underscores.""" + workload_size: Optional[ServedModelInputWorkloadSize] = None + """The workload size of the served model. The workload size corresponds to a range of provisioned + concurrency that the compute will autoscale between. A single unit of provisioned concurrency + can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned + concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned + concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for + each workload size will be 0.""" + workload_type: Optional[ServedModelInputWorkloadType] = None """The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU @@ -1667,6 +1673,10 @@ def as_dict(self) -> dict: body = {} if self.environment_vars: body['environment_vars'] = self.environment_vars if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn + if self.max_provisioned_throughput is not None: + body['max_provisioned_throughput'] = self.max_provisioned_throughput + if self.min_provisioned_throughput is not None: + body['min_provisioned_throughput'] = self.min_provisioned_throughput if self.model_name is not None: body['model_name'] = self.model_name if self.model_version is not None: body['model_version'] = self.model_version if self.name is not None: body['name'] = self.name @@ -1680,6 +1690,8 @@ def from_dict(cls, d: Dict[str, any]) -> ServedModelInput: """Deserializes the ServedModelInput from a dictionary.""" return cls(environment_vars=d.get('environment_vars', None), instance_profile_arn=d.get('instance_profile_arn', None), + max_provisioned_throughput=d.get('max_provisioned_throughput', None), + min_provisioned_throughput=d.get('min_provisioned_throughput', None), model_name=d.get('model_name', None), model_version=d.get('model_version', None), name=d.get('name', None), diff --git a/databricks/sdk/service/vectorsearch.py b/databricks/sdk/service/vectorsearch.py index 2f0ceaab8..d6c28b840 100755 --- a/databricks/sdk/service/vectorsearch.py +++ b/databricks/sdk/service/vectorsearch.py @@ -231,6 +231,11 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteIndexResponse: @dataclass class DeltaSyncVectorIndexSpecRequest: + columns_to_sync: Optional[List[str]] = None + """[Optional] Select the columns to sync with the vector index. If you leave this field blank, all + columns from the source table are synced with the index. The primary key column and embedding + source column or embedding vector column are always synced.""" + embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None """The columns that contain the embedding source.""" @@ -256,6 +261,7 @@ class DeltaSyncVectorIndexSpecRequest: def as_dict(self) -> dict: """Serializes the DeltaSyncVectorIndexSpecRequest into a dictionary suitable for use as a JSON request body.""" body = {} + if self.columns_to_sync: body['columns_to_sync'] = [v for v in self.columns_to_sync] if self.embedding_source_columns: body['embedding_source_columns'] = [v.as_dict() for v in self.embedding_source_columns] if self.embedding_vector_columns: @@ -269,7 +275,8 @@ def as_dict(self) -> dict: @classmethod def from_dict(cls, d: Dict[str, any]) -> DeltaSyncVectorIndexSpecRequest: """Deserializes the DeltaSyncVectorIndexSpecRequest from a dictionary.""" - return cls(embedding_source_columns=_repeated_dict(d, 'embedding_source_columns', + return cls(columns_to_sync=d.get('columns_to_sync', None), + embedding_source_columns=_repeated_dict(d, 'embedding_source_columns', EmbeddingSourceColumn), embedding_vector_columns=_repeated_dict(d, 'embedding_vector_columns', EmbeddingVectorColumn), diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py index 74f9490de..2ef0c52eb 100644 --- a/databricks/sdk/version.py +++ b/databricks/sdk/version.py @@ -1 +1 @@ -__version__ = '0.31.1' +__version__ = '0.32.0' diff --git a/docs/account/billing/billable_usage.rst b/docs/account/billing/billable_usage.rst index 51c2eb2fa..181b91cc3 100644 --- a/docs/account/billing/billable_usage.rst +++ b/docs/account/billing/billable_usage.rst @@ -18,7 +18,7 @@ a = AccountClient() - resp = a.billable_usage.download(start_month="2023-01", end_month="2023-02") + resp = a.billable_usage.download(start_month="2024-08", end_month="2024-09") Return billable usage logs. diff --git a/docs/account/billing/budgets.rst b/docs/account/billing/budgets.rst index bb625b49b..edba0a733 100644 --- a/docs/account/billing/budgets.rst +++ b/docs/account/billing/budgets.rst @@ -183,6 +183,7 @@ _ = a.budgets.update( budget_id=created.budget.budget_configuration_id, budget=billing.UpdateBudgetConfigurationBudget( + budget_configuration_id=created.budget.budget_configuration_id, display_name=f'sdk-{time.time_ns()}', filter=billing.BudgetConfigurationFilter(tags=[ billing.BudgetConfigurationFilterTagClause( @@ -192,15 +193,12 @@ ]), alert_configurations=[ billing.AlertConfiguration( + alert_configuration_id=created.budget.alert_configurations[0].alert_configuration_id, time_period=billing.AlertConfigurationTimePeriod.MONTH, quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD, trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED, quantity_threshold="50", - action_configurations=[ - billing.ActionConfiguration( - action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION, - target="admin@example.com") - ]) + action_configurations=created.budget.alert_configurations[0].action_configurations) ])) # cleanup diff --git a/docs/account/provisioning/workspaces.rst b/docs/account/provisioning/workspaces.rst index abbed0f37..98c47cc9b 100644 --- a/docs/account/provisioning/workspaces.rst +++ b/docs/account/provisioning/workspaces.rst @@ -35,15 +35,15 @@ aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole( role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]))) - created = a.workspaces.create(workspace_name=f'sdk-{time.time_ns()}', - aws_region=os.environ["AWS_REGION"], - credentials_id=role.credentials_id, - storage_configuration_id=storage.storage_configuration_id).result() + waiter = a.workspaces.create(workspace_name=f'sdk-{time.time_ns()}', + aws_region=os.environ["AWS_REGION"], + credentials_id=role.credentials_id, + storage_configuration_id=storage.storage_configuration_id) # cleanup a.storage.delete(storage_configuration_id=storage.storage_configuration_id) a.credentials.delete(credentials_id=role.credentials_id) - a.workspaces.delete(workspace_id=created.workspace_id) + a.workspaces.delete(workspace_id=waiter.workspace_id) Create a new workspace. @@ -175,34 +175,13 @@ .. code-block:: - import os - import time - from databricks.sdk import AccountClient - from databricks.sdk.service import provisioning a = AccountClient() - storage = a.storage.create( - storage_configuration_name=f'sdk-{time.time_ns()}', - root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"])) - - role = a.credentials.create( - credentials_name=f'sdk-{time.time_ns()}', - aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole( - role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]))) - - created = a.workspaces.create(workspace_name=f'sdk-{time.time_ns()}', - aws_region=os.environ["AWS_REGION"], - credentials_id=role.credentials_id, - storage_configuration_id=storage.storage_configuration_id).result() + created = a.waiter.get() by_id = a.workspaces.get(workspace_id=created.workspace_id) - - # cleanup - a.storage.delete(storage_configuration_id=storage.storage_configuration_id) - a.credentials.delete(credentials_id=role.credentials_id) - a.workspaces.delete(workspace_id=created.workspace_id) Get a workspace. @@ -263,32 +242,17 @@ a = AccountClient() - storage = a.storage.create( - storage_configuration_name=f'sdk-{time.time_ns()}', - root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"])) - - role = a.credentials.create( - credentials_name=f'sdk-{time.time_ns()}', - aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole( - role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]))) - update_role = a.credentials.create( credentials_name=f'sdk-{time.time_ns()}', aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole( role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]))) - created = a.workspaces.create(workspace_name=f'sdk-{time.time_ns()}', - aws_region=os.environ["AWS_REGION"], - credentials_id=role.credentials_id, - storage_configuration_id=storage.storage_configuration_id).result() + created = a.waiter.get() _ = a.workspaces.update(workspace_id=created.workspace_id, credentials_id=update_role.credentials_id).result() # cleanup - a.storage.delete(storage_configuration_id=storage.storage_configuration_id) - a.credentials.delete(credentials_id=role.credentials_id) a.credentials.delete(credentials_id=update_role.credentials_id) - a.workspaces.delete(workspace_id=created.workspace_id) Update workspace configuration. diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst index d15edc813..4f9c651d2 100644 --- a/docs/dbdataclasses/catalog.rst +++ b/docs/dbdataclasses/catalog.rst @@ -1176,6 +1176,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: RegenerateDashboardRequest + :members: + :undoc-members: + +.. autoclass:: RegenerateDashboardResponse + :members: + :undoc-members: + .. autoclass:: RegisteredModelAlias :members: :undoc-members: diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst index 0140be948..b1b05ec18 100644 --- a/docs/dbdataclasses/jobs.rst +++ b/docs/dbdataclasses/jobs.rst @@ -404,6 +404,23 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: QueueDetails + :members: + :undoc-members: + +.. py:class:: QueueDetailsCodeCode + + The reason for queuing the run. * `ACTIVE_RUNS_LIMIT_REACHED`: The run was queued due to reaching the workspace limit of active task runs. * `MAX_CONCURRENT_RUNS_REACHED`: The run was queued due to reaching the per-job limit of concurrent job runs. * `ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED`: The run was queued due to reaching the workspace limit of active run job tasks. + + .. py:attribute:: ACTIVE_RUNS_LIMIT_REACHED + :value: "ACTIVE_RUNS_LIMIT_REACHED" + + .. py:attribute:: ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED + :value: "ACTIVE_RUN_JOB_TASKS_LIMIT_REACHED" + + .. py:attribute:: MAX_CONCURRENT_RUNS_REACHED + :value: "MAX_CONCURRENT_RUNS_REACHED" + .. autoclass:: QueueSettings :members: :undoc-members: @@ -544,6 +561,28 @@ These dataclasses are used in the SDK to represent API requests and responses fo .. py:attribute:: WAITING_FOR_RETRY :value: "WAITING_FOR_RETRY" +.. py:class:: RunLifecycleStateV2State + + The current state of the run. + + .. py:attribute:: BLOCKED + :value: "BLOCKED" + + .. py:attribute:: PENDING + :value: "PENDING" + + .. py:attribute:: QUEUED + :value: "QUEUED" + + .. py:attribute:: RUNNING + :value: "RUNNING" + + .. py:attribute:: TERMINATED + :value: "TERMINATED" + + .. py:attribute:: TERMINATING + :value: "TERMINATING" + .. autoclass:: RunNow :members: :undoc-members: @@ -595,6 +634,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. autoclass:: RunStatus + :members: + :undoc-members: + .. autoclass:: RunTask :members: :undoc-members: @@ -751,6 +794,95 @@ These dataclasses are used in the SDK to represent API requests and responses fo :members: :undoc-members: +.. py:class:: TerminationCodeCode + + The code indicates why the run was terminated. Additional codes might be introduced in future releases. * `SUCCESS`: The run was completed successfully. * `CANCELED`: The run was canceled during execution by the Databricks platform; for example, if the maximum run duration was exceeded. * `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the dependency type condition was not met, or there were no material tasks to execute. * `INTERNAL_ERROR`: The run encountered an unexpected error. Refer to the state message for further details. * `DRIVER_ERROR`: The run encountered an error while communicating with the Spark Driver. * `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state message for further details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due to an error when communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The run failed because it issued an invalid request to start the cluster. * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The workspace has reached the quota for the maximum number of concurrent active runs. Consider scheduling the runs over a larger time frame. * `FEATURE_DISABLED`: The run failed because it tried to access a feature unavailable for the workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The number of cluster creation, start, and upsize requests have exceeded the allotted rate limit. Consider spreading the run execution over a larger time frame. * `STORAGE_ACCESS_ERROR`: The run failed due to an error when accessing the customer blob storage. Refer to the state message for further details. * `RUN_EXECUTION_ERROR`: The run was completed with task failures. For more details, refer to the state message or run output. * `UNAUTHORIZED_ERROR`: The run failed due to a permission issue while accessing a resource. Refer to the state message for further details. * `LIBRARY_INSTALLATION_ERROR`: The run failed while installing the user-requested library. Refer to the state message for further details. The causes might include, but are not limited to: The provided library is invalid, there are insufficient permissions to install the library, and so forth. * `MAX_CONCURRENT_RUNS_EXCEEDED`: The scheduled run exceeds the limit of maximum concurrent runs set for the job. * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a cluster that has already reached the maximum number of contexts it is configured to create. See: [Link]. * `RESOURCE_NOT_FOUND`: A resource necessary for run execution does not exist. Refer to the state message for further details. * `INVALID_RUN_CONFIGURATION`: The run failed due to an invalid configuration. Refer to the state message for further details. * `CLOUD_FAILURE`: The run failed due to a cloud provider issue. Refer to the state message for further details. * `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size limit. + [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now + + .. py:attribute:: CANCELED + :value: "CANCELED" + + .. py:attribute:: CLOUD_FAILURE + :value: "CLOUD_FAILURE" + + .. py:attribute:: CLUSTER_ERROR + :value: "CLUSTER_ERROR" + + .. py:attribute:: CLUSTER_REQUEST_LIMIT_EXCEEDED + :value: "CLUSTER_REQUEST_LIMIT_EXCEEDED" + + .. py:attribute:: DRIVER_ERROR + :value: "DRIVER_ERROR" + + .. py:attribute:: FEATURE_DISABLED + :value: "FEATURE_DISABLED" + + .. py:attribute:: INTERNAL_ERROR + :value: "INTERNAL_ERROR" + + .. py:attribute:: INVALID_CLUSTER_REQUEST + :value: "INVALID_CLUSTER_REQUEST" + + .. py:attribute:: INVALID_RUN_CONFIGURATION + :value: "INVALID_RUN_CONFIGURATION" + + .. py:attribute:: LIBRARY_INSTALLATION_ERROR + :value: "LIBRARY_INSTALLATION_ERROR" + + .. py:attribute:: MAX_CONCURRENT_RUNS_EXCEEDED + :value: "MAX_CONCURRENT_RUNS_EXCEEDED" + + .. py:attribute:: MAX_JOB_QUEUE_SIZE_EXCEEDED + :value: "MAX_JOB_QUEUE_SIZE_EXCEEDED" + + .. py:attribute:: MAX_SPARK_CONTEXTS_EXCEEDED + :value: "MAX_SPARK_CONTEXTS_EXCEEDED" + + .. py:attribute:: REPOSITORY_CHECKOUT_FAILED + :value: "REPOSITORY_CHECKOUT_FAILED" + + .. py:attribute:: RESOURCE_NOT_FOUND + :value: "RESOURCE_NOT_FOUND" + + .. py:attribute:: RUN_EXECUTION_ERROR + :value: "RUN_EXECUTION_ERROR" + + .. py:attribute:: SKIPPED + :value: "SKIPPED" + + .. py:attribute:: STORAGE_ACCESS_ERROR + :value: "STORAGE_ACCESS_ERROR" + + .. py:attribute:: SUCCESS + :value: "SUCCESS" + + .. py:attribute:: UNAUTHORIZED_ERROR + :value: "UNAUTHORIZED_ERROR" + + .. py:attribute:: WORKSPACE_RUN_LIMIT_EXCEEDED + :value: "WORKSPACE_RUN_LIMIT_EXCEEDED" + +.. autoclass:: TerminationDetails + :members: + :undoc-members: + +.. py:class:: TerminationTypeType + + * `SUCCESS`: The run terminated without any issues * `INTERNAL_ERROR`: An error occurred in the Databricks platform. Please look at the [status page] or contact support if the issue persists. * `CLIENT_ERROR`: The run was terminated because of an error caused by user input or the job configuration. * `CLOUD_FAILURE`: The run was terminated because of an issue with your cloud provider. + [status page]: https://status.databricks.com/ + + .. py:attribute:: CLIENT_ERROR + :value: "CLIENT_ERROR" + + .. py:attribute:: CLOUD_FAILURE + :value: "CLOUD_FAILURE" + + .. py:attribute:: INTERNAL_ERROR + :value: "INTERNAL_ERROR" + + .. py:attribute:: SUCCESS + :value: "SUCCESS" + .. autoclass:: TriggerInfo :members: :undoc-members: diff --git a/docs/workspace/catalog/metastores.rst b/docs/workspace/catalog/metastores.rst index f8a3c2872..01a936e0b 100644 --- a/docs/workspace/catalog/metastores.rst +++ b/docs/workspace/catalog/metastores.rst @@ -52,7 +52,8 @@ :param metastore_id: str The unique ID of the metastore. :param default_catalog_name: str - The name of the default catalog in the metastore. + The name of the default catalog in the metastore. This field is depracted. Please use "Default + Namespace API" to configure the default catalog for a Databricks workspace. @@ -305,7 +306,8 @@ :param workspace_id: int A workspace ID. :param default_catalog_name: str (optional) - The name of the default catalog for the metastore. + The name of the default catalog in the metastore. This field is depracted. Please use "Default + Namespace API" to configure the default catalog for a Databricks workspace. :param metastore_id: str (optional) The unique ID of the metastore. diff --git a/docs/workspace/catalog/quality_monitors.rst b/docs/workspace/catalog/quality_monitors.rst index 030094049..93f05b69a 100644 --- a/docs/workspace/catalog/quality_monitors.rst +++ b/docs/workspace/catalog/quality_monitors.rst @@ -166,6 +166,29 @@ :returns: :class:`MonitorRefreshListResponse` + .. py:method:: regenerate_dashboard(table_name: str [, warehouse_id: Optional[str]]) -> RegenerateDashboardResponse + + Regenerate a monitoring dashboard. + + Regenerates the monitoring dashboard for the specified table. + + The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the + table's parent catalog and be an owner of the table's parent schema 3. have the following permissions: + - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an + owner of the table + + The call must be made from the workspace where the monitor was created. The dashboard will be + regenerated in the assets directory that was specified when the monitor was created. + + :param table_name: str + Full name of the table. + :param warehouse_id: str (optional) + Optional argument to specify the warehouse for dashboard regeneration. If not specified, the first + running warehouse will be used. + + :returns: :class:`RegenerateDashboardResponse` + + .. py:method:: run_refresh(table_name: str) -> MonitorRefreshInfo Queue a metric refresh for a monitor. diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst index 7deb9eafb..1f2fd2851 100644 --- a/docs/workspace/iam/permissions.rst +++ b/docs/workspace/iam/permissions.rst @@ -81,8 +81,8 @@ :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, - cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, - notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. + cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, + jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. @@ -158,8 +158,8 @@ :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, - cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, - notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. + cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, + jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) @@ -176,8 +176,8 @@ :param request_object_type: str The type of the request object. Can be one of the following: alerts, authorization, clusters, - cluster-policies, dbsql-dashboards, directories, experiments, files, instance-pools, jobs, - notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. + cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools, + jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses. :param request_object_id: str The id of the request object. :param access_control_list: List[:class:`AccessControlRequest`] (optional) diff --git a/docs/workspace/ml/experiments.rst b/docs/workspace/ml/experiments.rst index 1ada6b1e5..c09cfe353 100644 --- a/docs/workspace/ml/experiments.rst +++ b/docs/workspace/ml/experiments.rst @@ -270,10 +270,16 @@ Get all artifacts. List artifacts for a run. Takes an optional `artifact_path` prefix. If it is specified, the response - contains only artifacts with the specified prefix.", + contains only artifacts with the specified prefix. This API does not support pagination when listing + artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call + `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports + pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents). :param page_token: str (optional) - Token indicating the page of artifact results to fetch + Token indicating the page of artifact results to fetch. `page_token` is not supported when listing + artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call + `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports + pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents). :param path: str (optional) Filter artifacts matching this path (a relative path from the root artifact directory). :param run_id: str (optional) diff --git a/examples/account/billable_usage/download_usage_download.py b/examples/account/billable_usage/download_usage_download.py index aba474963..9147f87b9 100755 --- a/examples/account/billable_usage/download_usage_download.py +++ b/examples/account/billable_usage/download_usage_download.py @@ -2,4 +2,4 @@ a = AccountClient() -resp = a.billable_usage.download(start_month="2023-01", end_month="2023-02") +resp = a.billable_usage.download(start_month="2024-08", end_month="2024-09") diff --git a/examples/account/budgets/update_budgets.py b/examples/account/budgets/update_budgets.py index e19630113..399770058 100755 --- a/examples/account/budgets/update_budgets.py +++ b/examples/account/budgets/update_budgets.py @@ -29,6 +29,7 @@ _ = a.budgets.update( budget_id=created.budget.budget_configuration_id, budget=billing.UpdateBudgetConfigurationBudget( + budget_configuration_id=created.budget.budget_configuration_id, display_name=f'sdk-{time.time_ns()}', filter=billing.BudgetConfigurationFilter(tags=[ billing.BudgetConfigurationFilterTagClause( @@ -38,15 +39,12 @@ ]), alert_configurations=[ billing.AlertConfiguration( + alert_configuration_id=created.budget.alert_configurations[0].alert_configuration_id, time_period=billing.AlertConfigurationTimePeriod.MONTH, quantity_type=billing.AlertConfigurationQuantityType.LIST_PRICE_DOLLARS_USD, trigger_type=billing.AlertConfigurationTriggerType.CUMULATIVE_SPENDING_EXCEEDED, quantity_threshold="50", - action_configurations=[ - billing.ActionConfiguration( - action_type=billing.ActionConfigurationType.EMAIL_NOTIFICATION, - target="admin@example.com") - ]) + action_configurations=created.budget.alert_configurations[0].action_configurations) ])) # cleanup diff --git a/examples/account/io/read_usage_download.py b/examples/account/io/read_usage_download.py index d52b31b0b..544a1d3f2 100755 --- a/examples/account/io/read_usage_download.py +++ b/examples/account/io/read_usage_download.py @@ -2,6 +2,6 @@ a = AccountClient() -resp = a.billable_usage.download(start_month="2023-01", end_month="2023-02") +resp = a.billable_usage.download(start_month="2024-08", end_month="2024-09") out = a.io.read(resp.contents) diff --git a/examples/account/waiter/get_workspaces.py b/examples/account/waiter/get_workspaces.py new file mode 100755 index 000000000..c682d25c0 --- /dev/null +++ b/examples/account/waiter/get_workspaces.py @@ -0,0 +1,5 @@ +from databricks.sdk import AccountClient + +a = AccountClient() + +created = a.waiter.get() diff --git a/examples/account/workspaces/create_workspaces.py b/examples/account/workspaces/create_workspaces.py index bf820677c..c2ff96ef2 100755 --- a/examples/account/workspaces/create_workspaces.py +++ b/examples/account/workspaces/create_workspaces.py @@ -15,12 +15,12 @@ aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole( role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]))) -created = a.workspaces.create(workspace_name=f'sdk-{time.time_ns()}', - aws_region=os.environ["AWS_REGION"], - credentials_id=role.credentials_id, - storage_configuration_id=storage.storage_configuration_id).result() +waiter = a.workspaces.create(workspace_name=f'sdk-{time.time_ns()}', + aws_region=os.environ["AWS_REGION"], + credentials_id=role.credentials_id, + storage_configuration_id=storage.storage_configuration_id) # cleanup a.storage.delete(storage_configuration_id=storage.storage_configuration_id) a.credentials.delete(credentials_id=role.credentials_id) -a.workspaces.delete(workspace_id=created.workspace_id) +a.workspaces.delete(workspace_id=waiter.workspace_id) diff --git a/examples/account/workspaces/get_workspaces.py b/examples/account/workspaces/get_workspaces.py index 809a1f374..cd05630c7 100755 --- a/examples/account/workspaces/get_workspaces.py +++ b/examples/account/workspaces/get_workspaces.py @@ -1,28 +1,7 @@ -import os -import time - from databricks.sdk import AccountClient -from databricks.sdk.service import provisioning a = AccountClient() -storage = a.storage.create( - storage_configuration_name=f'sdk-{time.time_ns()}', - root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"])) - -role = a.credentials.create( - credentials_name=f'sdk-{time.time_ns()}', - aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole( - role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]))) - -created = a.workspaces.create(workspace_name=f'sdk-{time.time_ns()}', - aws_region=os.environ["AWS_REGION"], - credentials_id=role.credentials_id, - storage_configuration_id=storage.storage_configuration_id).result() +created = a.waiter.get() by_id = a.workspaces.get(workspace_id=created.workspace_id) - -# cleanup -a.storage.delete(storage_configuration_id=storage.storage_configuration_id) -a.credentials.delete(credentials_id=role.credentials_id) -a.workspaces.delete(workspace_id=created.workspace_id) diff --git a/examples/account/workspaces/update_workspaces.py b/examples/account/workspaces/update_workspaces.py index f766e1b5a..e93450722 100755 --- a/examples/account/workspaces/update_workspaces.py +++ b/examples/account/workspaces/update_workspaces.py @@ -6,29 +6,14 @@ a = AccountClient() -storage = a.storage.create( - storage_configuration_name=f'sdk-{time.time_ns()}', - root_bucket_info=provisioning.RootBucketInfo(bucket_name=os.environ["TEST_ROOT_BUCKET"])) - -role = a.credentials.create( - credentials_name=f'sdk-{time.time_ns()}', - aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole( - role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]))) - update_role = a.credentials.create( credentials_name=f'sdk-{time.time_ns()}', aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole( role_arn=os.environ["TEST_CROSSACCOUNT_ARN"]))) -created = a.workspaces.create(workspace_name=f'sdk-{time.time_ns()}', - aws_region=os.environ["AWS_REGION"], - credentials_id=role.credentials_id, - storage_configuration_id=storage.storage_configuration_id).result() +created = a.waiter.get() _ = a.workspaces.update(workspace_id=created.workspace_id, credentials_id=update_role.credentials_id).result() # cleanup -a.storage.delete(storage_configuration_id=storage.storage_configuration_id) -a.credentials.delete(credentials_id=role.credentials_id) a.credentials.delete(credentials_id=update_role.credentials_id) -a.workspaces.delete(workspace_id=created.workspace_id) From dcd06c0c16b011ddf66f8912428688d98de86d9b Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Tue, 10 Sep 2024 07:26:27 -0400 Subject: [PATCH 034/136] [Fix] Properly include message when handing SCIM errors (#753) ## Changes #741 introduced a regression in retrieving error details from SCIM APIs. This PR addresses this and adds a regression test for this case. The implementation should now match the Go SDK's here: https://github.com/databricks/databricks-sdk-go/blob/main/apierr/errors.go#L220-L224. Fixes #749. ## Tests Added a unit test based on the supplied response in the ticket. - [ ] `make test` run locally - [ ] `make fmt` applied - [ ] relevant integration tests applied --- databricks/sdk/errors/parser.py | 5 +++-- tests/test_errors.py | 11 ++++++++++- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/databricks/sdk/errors/parser.py b/databricks/sdk/errors/parser.py index e2feb99d6..3d15f1673 100644 --- a/databricks/sdk/errors/parser.py +++ b/databricks/sdk/errors/parser.py @@ -60,8 +60,9 @@ def parse_error(self, response: requests.Response, response_body: bytes) -> Opti if detail: # Handle SCIM error message details # @see https://tools.ietf.org/html/rfc7644#section-3.7.3 - error_args[ - 'message'] = f"{scim_type} {error_args.get('message', 'SCIM API Internal Error')}".strip(" ") + if detail == "null": + detail = "SCIM API Internal Error" + error_args['message'] = f"{scim_type} {detail}".strip(" ") error_args['error_code'] = f"SCIM_{status}" return error_args diff --git a/tests/test_errors.py b/tests/test_errors.py index 1dfcfaf26..2e19ec897 100644 --- a/tests/test_errors.py +++ b/tests/test_errors.py @@ -109,7 +109,16 @@ def make_private_link_response() -> requests.Response: 'Please report this issue with the following debugging information to the SDK issue tracker at ' 'https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n' '< 400 Bad Request\n' - '< this is not a real response```')), ]) + '< this is not a real response```')), + [ + fake_response( + 'GET', 404, + json.dumps({ + 'detail': 'Group with id 1234 is not found', + 'status': '404', + 'schemas': ['urn:ietf:params:scim:api:messages:2.0:Error'] + })), errors.NotFound, 'None Group with id 1234 is not found' + ]]) def test_get_api_error(response, expected_error, expected_message): with pytest.raises(errors.DatabricksError) as e: raise errors.get_api_error(response) From b34f502cc48fe6cd37dbdf5e196de1842c9854f2 Mon Sep 17 00:00:00 2001 From: hectorcast-db Date: Tue, 10 Sep 2024 14:31:02 +0200 Subject: [PATCH 035/136] [Release] Release v0.32.1 (#754) ### Bug Fixes * Properly include message when handing SCIM errors ([#753](https://github.com/databricks/databricks-sdk-py/pull/753)). --- CHANGELOG.md | 8 ++++++++ databricks/sdk/version.py | 2 +- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 62b0985d6..cd1ee8136 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Version changelog +## [Release] Release v0.32.1 + +### Bug Fixes + + * Properly include message when handing SCIM errors ([#753](https://github.com/databricks/databricks-sdk-py/pull/753)). + + + ## [Release] Release v0.32.0 ### Bug Fixes diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py index 2ef0c52eb..68c3b1326 100644 --- a/databricks/sdk/version.py +++ b/databricks/sdk/version.py @@ -1 +1 @@ -__version__ = '0.32.0' +__version__ = '0.32.1' From 3162545c476a05e8e8c993b9e46038ddeea953a3 Mon Sep 17 00:00:00 2001 From: Shicheng Zhou <142252423+shichengzhou-db@users.noreply.github.com> Date: Thu, 12 Sep 2024 00:11:59 -0700 Subject: [PATCH 036/136] [Feature] Support Models in `dbutils.fs` operations (#750) ## Changes - Support files operations in WorkspaceClient.Files for Databricks UC Model artifacts so that user can use databricks sdk to download UC model artifacts. - This PR is part of the work to migrate mlflow client towards using databricks sdk for model artifacts download/upload operations for better security. ## Tests - Existing tests in test_dbfs_mixins.py, similar to how _VolumesPath is tested - The following code works ``` from databricks.sdk import WorkspaceClient w = WorkspaceClient() resp = w.files.download("/Models/system/ai/dbrx_instruct/3/MLmodel") ``` - [x] `make test` run locally - [x] `make fmt` applied - [x] relevant integration tests applied --- databricks/sdk/mixins/files.py | 18 +++++++++--------- tests/test_dbfs_mixins.py | 13 ++++++++----- 2 files changed, 17 insertions(+), 14 deletions(-) diff --git a/databricks/sdk/mixins/files.py b/databricks/sdk/mixins/files.py index 1e109a1a7..47c11747d 100644 --- a/databricks/sdk/mixins/files.py +++ b/databricks/sdk/mixins/files.py @@ -167,7 +167,7 @@ def __repr__(self) -> str: return f"<_DbfsIO {self._path} {'read' if self.readable() else 'write'}=True>" -class _VolumesIO(BinaryIO): +class _FilesIO(BinaryIO): def __init__(self, api: files.FilesAPI, path: str, *, read: bool, write: bool, overwrite: bool): self._buffer = [] @@ -262,7 +262,7 @@ def __exit__(self, __t, __value, __traceback): self.close() def __repr__(self) -> str: - return f"<_VolumesIO {self._path} {'read' if self.readable() else 'write'}=True>" + return f"<_FilesIO {self._path} {'read' if self.readable() else 'write'}=True>" class _Path(ABC): @@ -398,7 +398,7 @@ def __repr__(self) -> str: return f'<_LocalPath {self._path}>' -class _VolumesPath(_Path): +class _FilesPath(_Path): def __init__(self, api: files.FilesAPI, src: Union[str, pathlib.Path]): self._path = pathlib.PurePosixPath(str(src).replace('dbfs:', '').replace('file:', '')) @@ -411,7 +411,7 @@ def _is_dbfs(self) -> bool: return False def child(self, path: str) -> Self: - return _VolumesPath(self._api, str(self._path / path)) + return _FilesPath(self._api, str(self._path / path)) def _is_dir(self) -> bool: try: @@ -431,7 +431,7 @@ def exists(self) -> bool: return self.is_dir def open(self, *, read=False, write=False, overwrite=False) -> BinaryIO: - return _VolumesIO(self._api, self.as_string, read=read, write=write, overwrite=overwrite) + return _FilesIO(self._api, self.as_string, read=read, write=write, overwrite=overwrite) def list(self, *, recursive=False) -> Generator[files.FileInfo, None, None]: if not self.is_dir: @@ -458,13 +458,13 @@ def list(self, *, recursive=False) -> Generator[files.FileInfo, None, None]: def delete(self, *, recursive=False): if self.is_dir: for entry in self.list(recursive=False): - _VolumesPath(self._api, entry.path).delete(recursive=True) + _FilesPath(self._api, entry.path).delete(recursive=True) self._api.delete_directory(self.as_string) else: self._api.delete(self.as_string) def __repr__(self) -> str: - return f'<_VolumesPath {self._path}>' + return f'<_FilesPath {self._path}>' class _DbfsPath(_Path): @@ -589,8 +589,8 @@ def _path(self, src): 'UC Volumes paths, not external locations or DBFS mount points.') if src.scheme == 'file': return _LocalPath(src.geturl()) - if src.path.startswith('/Volumes'): - return _VolumesPath(self._files_api, src.geturl()) + if src.path.startswith(('/Volumes', '/Models')): + return _FilesPath(self._files_api, src.geturl()) return _DbfsPath(self._dbfs_api, src.geturl()) def copy(self, src: str, dst: str, *, recursive=False, overwrite=False): diff --git a/tests/test_dbfs_mixins.py b/tests/test_dbfs_mixins.py index 6bbaca7a2..ce86a2a80 100644 --- a/tests/test_dbfs_mixins.py +++ b/tests/test_dbfs_mixins.py @@ -1,8 +1,8 @@ import pytest from databricks.sdk.errors import NotFound -from databricks.sdk.mixins.files import (DbfsExt, _DbfsPath, _LocalPath, - _VolumesPath) +from databricks.sdk.mixins.files import (DbfsExt, _DbfsPath, _FilesPath, + _LocalPath) def test_moving_dbfs_file_to_local_dir(config, tmp_path, mocker): @@ -55,11 +55,14 @@ def test_moving_local_dir_to_dbfs(config, tmp_path, mocker): @pytest.mark.parametrize('path,expected_type', [('/path/to/file', _DbfsPath), - ('/Volumes/path/to/file', _VolumesPath), + ('/Volumes/path/to/file', _FilesPath), + ('/Models/path/to/file', _FilesPath), ('dbfs:/path/to/file', _DbfsPath), - ('dbfs:/Volumes/path/to/file', _VolumesPath), + ('dbfs:/Volumes/path/to/file', _FilesPath), + ('dbfs:/Models/path/to/file', _FilesPath), ('file:/path/to/file', _LocalPath), - ('file:/Volumes/path/to/file', _LocalPath), ]) + ('file:/Volumes/path/to/file', _LocalPath), + ('file:/Models/path/to/file', _LocalPath), ]) def test_fs_path(config, path, expected_type): dbfs_ext = DbfsExt(config) assert isinstance(dbfs_ext._path(path), expected_type) From c3aad2876214d617c0209241c67b7263d5d8bf54 Mon Sep 17 00:00:00 2001 From: Serge Smertin <259697+nfx@users.noreply.github.com> Date: Fri, 13 Sep 2024 10:55:38 +0200 Subject: [PATCH 037/136] [Fix] Use correct optional typing in `WorkspaceClient` for `mypy` (#760) Without this change, the SDK is not very `mypy`-compatible. This PR fixes `arg-type` check: ``` cmd [3] | mypy --disable-error-code 'annotation-unchecked' --exclude 'tests/resources/*' --exclude dist . error: Argument "auth_type" to "WorkspaceClient" has incompatible type "str | None"; expected "str" [arg-type] error: Argument "token" to "WorkspaceClient" has incompatible type "str | None"; expected "str" [arg-type] ``` `Optional[X]` is py3.8 and py3.9 way of expressing optional types, but in py3.10+ it gets transformed into a union type of `X | None`, which is not supported by py3.9. py3.9 EOL is 31 Oct 2025, so we have to deal with it somehow until then. --- .codegen/__init__.py.tmpl | 24 +++++----- databricks/sdk/__init__.py | 94 +++++++++++++++++++------------------- databricks/sdk/config.py | 6 +-- 3 files changed, 63 insertions(+), 61 deletions(-) diff --git a/.codegen/__init__.py.tmpl b/.codegen/__init__.py.tmpl index 572b50490..7ab8e64f6 100644 --- a/.codegen/__init__.py.tmpl +++ b/.codegen/__init__.py.tmpl @@ -41,14 +41,14 @@ class WorkspaceClient: """ The WorkspaceClient is a client for the workspace-level Databricks REST API. """ - def __init__(self, *{{range $args}}, {{.}}: str = None{{end}}, - debug_truncate_bytes: int = None, - debug_headers: bool = None, + def __init__(self, *{{range $args}}, {{.}}: Optional[str] = None{{end}}, + debug_truncate_bytes: Optional[int] = None, + debug_headers: Optional[bool] = None, product="unknown", product_version="0.0.0", - credentials_strategy: CredentialsStrategy = None, - credentials_provider: CredentialsStrategy = None, - config: client.Config = None): + credentials_strategy: Optional[CredentialsStrategy] = None, + credentials_provider: Optional[CredentialsStrategy] = None, + config: Optional[client.Config] = None): if not config: config = client.Config({{range $args}}{{.}}={{.}}, {{end}} credentials_strategy=credentials_strategy, @@ -110,14 +110,14 @@ class AccountClient: The AccountClient is a client for the account-level Databricks REST API. """ - def __init__(self, *{{range $args}}, {{.}}: str = None{{end}}, - debug_truncate_bytes: int = None, - debug_headers: bool = None, + def __init__(self, *{{range $args}}, {{.}}: Optional[str] = None{{end}}, + debug_truncate_bytes: Optional[int] = None, + debug_headers: Optional[bool] = None, product="unknown", product_version="0.0.0", - credentials_strategy: CredentialsStrategy = None, - credentials_provider: CredentialsStrategy = None, - config: client.Config = None): + credentials_strategy: Optional[CredentialsStrategy] = None, + credentials_provider: Optional[CredentialsStrategy] = None, + config: Optional[client.Config] = None): if not config: config = client.Config({{range $args}}{{.}}={{.}}, {{end}} credentials_strategy=credentials_strategy, diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py index 50069c315..b177d97dc 100755 --- a/databricks/sdk/__init__.py +++ b/databricks/sdk/__init__.py @@ -1,3 +1,5 @@ +from typing import Optional + import databricks.sdk.core as client import databricks.sdk.dbutils as dbutils from databricks.sdk import azure @@ -116,31 +118,31 @@ class WorkspaceClient: def __init__(self, *, - host: str = None, - account_id: str = None, - username: str = None, - password: str = None, - client_id: str = None, - client_secret: str = None, - token: str = None, - profile: str = None, - config_file: str = None, - azure_workspace_resource_id: str = None, - azure_client_secret: str = None, - azure_client_id: str = None, - azure_tenant_id: str = None, - azure_environment: str = None, - auth_type: str = None, - cluster_id: str = None, - google_credentials: str = None, - google_service_account: str = None, - debug_truncate_bytes: int = None, - debug_headers: bool = None, + host: Optional[str] = None, + account_id: Optional[str] = None, + username: Optional[str] = None, + password: Optional[str] = None, + client_id: Optional[str] = None, + client_secret: Optional[str] = None, + token: Optional[str] = None, + profile: Optional[str] = None, + config_file: Optional[str] = None, + azure_workspace_resource_id: Optional[str] = None, + azure_client_secret: Optional[str] = None, + azure_client_id: Optional[str] = None, + azure_tenant_id: Optional[str] = None, + azure_environment: Optional[str] = None, + auth_type: Optional[str] = None, + cluster_id: Optional[str] = None, + google_credentials: Optional[str] = None, + google_service_account: Optional[str] = None, + debug_truncate_bytes: Optional[int] = None, + debug_headers: Optional[bool] = None, product="unknown", product_version="0.0.0", - credentials_strategy: CredentialsStrategy = None, - credentials_provider: CredentialsStrategy = None, - config: client.Config = None): + credentials_strategy: Optional[CredentialsStrategy] = None, + credentials_provider: Optional[CredentialsStrategy] = None, + config: Optional[client.Config] = None): if not config: config = client.Config(host=host, account_id=account_id, @@ -742,31 +744,31 @@ class AccountClient: def __init__(self, *, - host: str = None, - account_id: str = None, - username: str = None, - password: str = None, - client_id: str = None, - client_secret: str = None, - token: str = None, - profile: str = None, - config_file: str = None, - azure_workspace_resource_id: str = None, - azure_client_secret: str = None, - azure_client_id: str = None, - azure_tenant_id: str = None, - azure_environment: str = None, - auth_type: str = None, - cluster_id: str = None, - google_credentials: str = None, - google_service_account: str = None, - debug_truncate_bytes: int = None, - debug_headers: bool = None, + host: Optional[str] = None, + account_id: Optional[str] = None, + username: Optional[str] = None, + password: Optional[str] = None, + client_id: Optional[str] = None, + client_secret: Optional[str] = None, + token: Optional[str] = None, + profile: Optional[str] = None, + config_file: Optional[str] = None, + azure_workspace_resource_id: Optional[str] = None, + azure_client_secret: Optional[str] = None, + azure_client_id: Optional[str] = None, + azure_tenant_id: Optional[str] = None, + azure_environment: Optional[str] = None, + auth_type: Optional[str] = None, + cluster_id: Optional[str] = None, + google_credentials: Optional[str] = None, + google_service_account: Optional[str] = None, + debug_truncate_bytes: Optional[int] = None, + debug_headers: Optional[bool] = None, product="unknown", product_version="0.0.0", - credentials_strategy: CredentialsStrategy = None, - credentials_provider: CredentialsStrategy = None, - config: client.Config = None): + credentials_strategy: Optional[CredentialsStrategy] = None, + credentials_provider: Optional[CredentialsStrategy] = None, + config: Optional[client.Config] = None): if not config: config = client.Config(host=host, account_id=account_id, diff --git a/databricks/sdk/config.py b/databricks/sdk/config.py index 28d57ad42..5cae1b2b4 100644 --- a/databricks/sdk/config.py +++ b/databricks/sdk/config.py @@ -92,11 +92,11 @@ class Config: def __init__(self, *, # Deprecated. Use credentials_strategy instead. - credentials_provider: CredentialsStrategy = None, - credentials_strategy: CredentialsStrategy = None, + credentials_provider: Optional[CredentialsStrategy] = None, + credentials_strategy: Optional[CredentialsStrategy] = None, product=None, product_version=None, - clock: Clock = None, + clock: Optional[Clock] = None, **kwargs): self._header_factory = None self._inner = {} From f06bb271f729e61bb67fa87dd7cc5ef3e323e4d0 Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Fri, 13 Sep 2024 09:18:29 -0400 Subject: [PATCH 038/136] [Fix] Fix deserialization of 401/403 errors (#758) ## Changes #741 introduced a change to how an error message was modified in `ApiClient._perform`. Previously, arguments to the DatabricksError constructor were modified as a dictionary in `_perform`. After that change, `get_api_error` started to return a `DatabricksError` instance whose attributes were modified. The `message` attribute referred to in that change does not exist in the DatabricksError class: there is a `message` constructor parameter, but it is not set as an attribute. This PR refactors the error handling logic slightly to restore the original behavior. In doing this, we decouple all error-parsing and customizing logic out of ApiClient. This also sets us up to allow for further extension of error parsing and customization in the future, a feature that I have seen present in other SDKs. Fixes #755. ## Tests - [ ] `make test` run locally - [ ] `make fmt` applied - [ ] relevant integration tests applied --- databricks/sdk/core.py | 47 ++---- databricks/sdk/errors/__init__.py | 4 +- databricks/sdk/errors/customizer.py | 50 ++++++ databricks/sdk/errors/deserializer.py | 106 +++++++++++++ databricks/sdk/errors/parser.py | 174 +++++++-------------- databricks/sdk/logger/round_trip_logger.py | 3 +- tests/test_core.py | 90 ++++++++--- tests/test_errors.py | 34 ++-- 8 files changed, 324 insertions(+), 184 deletions(-) create mode 100644 databricks/sdk/errors/customizer.py create mode 100644 databricks/sdk/errors/deserializer.py diff --git a/databricks/sdk/core.py b/databricks/sdk/core.py index e028e4b15..77e8c9aac 100644 --- a/databricks/sdk/core.py +++ b/databricks/sdk/core.py @@ -10,7 +10,7 @@ from .config import * # To preserve backwards compatibility (as these definitions were previously in this module) from .credentials_provider import * -from .errors import DatabricksError, get_api_error +from .errors import DatabricksError, _ErrorCustomizer, _Parser from .logger import RoundTrip from .oauth import retrieve_token from .retries import retried @@ -71,6 +71,8 @@ def __init__(self, cfg: Config = None): # Default to 60 seconds self._http_timeout_seconds = cfg.http_timeout_seconds if cfg.http_timeout_seconds else 60 + self._error_parser = _Parser(extra_error_customizers=[_AddDebugErrorCustomizer(cfg)]) + @property def account_id(self) -> str: return self._cfg.account_id @@ -219,27 +221,6 @@ def _is_retryable(err: BaseException) -> Optional[str]: return f'matched {substring}' return None - @classmethod - def _parse_retry_after(cls, response: requests.Response) -> Optional[int]: - retry_after = response.headers.get("Retry-After") - if retry_after is None: - # 429 requests should include a `Retry-After` header, but if it's missing, - # we default to 1 second. - return cls._RETRY_AFTER_DEFAULT - # If the request is throttled, try parse the `Retry-After` header and sleep - # for the specified number of seconds. Note that this header can contain either - # an integer or a RFC1123 datetime string. - # See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After - # - # For simplicity, we only try to parse it as an integer, as this is what Databricks - # platform returns. Otherwise, we fall back and don't sleep. - try: - return int(retry_after) - except ValueError: - logger.debug(f'Invalid Retry-After header received: {retry_after}. Defaulting to 1') - # defaulting to 1 sleep second to make self._is_retryable() simpler - return cls._RETRY_AFTER_DEFAULT - def _perform(self, method: str, url: str, @@ -261,15 +242,8 @@ def _perform(self, stream=raw, timeout=self._http_timeout_seconds) self._record_request_log(response, raw=raw or data is not None or files is not None) - error = get_api_error(response) + error = self._error_parser.get_api_error(response) if error is not None: - status_code = response.status_code - is_http_unauthorized_or_forbidden = status_code in (401, 403) - is_too_many_requests_or_unavailable = status_code in (429, 503) - if is_http_unauthorized_or_forbidden: - error.message = self._cfg.wrap_debug_info(error.message) - if is_too_many_requests_or_unavailable: - error.retry_after_secs = self._parse_retry_after(response) raise error from None return response @@ -279,6 +253,19 @@ def _record_request_log(self, response: requests.Response, raw: bool = False) -> logger.debug(RoundTrip(response, self._cfg.debug_headers, self._debug_truncate_bytes, raw).generate()) +class _AddDebugErrorCustomizer(_ErrorCustomizer): + """An error customizer that adds debug information about the configuration to unauthenticated and + unauthorized errors.""" + + def __init__(self, cfg: Config): + self._cfg = cfg + + def customize_error(self, response: requests.Response, kwargs: dict): + if response.status_code in (401, 403): + message = kwargs.get('message', 'request failed') + kwargs['message'] = self._cfg.wrap_debug_info(message) + + class StreamingResponse(BinaryIO): _response: requests.Response _buffer: bytes diff --git a/databricks/sdk/errors/__init__.py b/databricks/sdk/errors/__init__.py index 578406803..8ad5ac708 100644 --- a/databricks/sdk/errors/__init__.py +++ b/databricks/sdk/errors/__init__.py @@ -1,6 +1,6 @@ from .base import DatabricksError, ErrorDetail -from .mapper import _error_mapper -from .parser import get_api_error +from .customizer import _ErrorCustomizer +from .parser import _Parser from .platform import * from .private_link import PrivateLinkValidationError from .sdk import * diff --git a/databricks/sdk/errors/customizer.py b/databricks/sdk/errors/customizer.py new file mode 100644 index 000000000..5c895becc --- /dev/null +++ b/databricks/sdk/errors/customizer.py @@ -0,0 +1,50 @@ +import abc +import logging + +import requests + + +class _ErrorCustomizer(abc.ABC): + """A customizer for errors from the Databricks REST API.""" + + @abc.abstractmethod + def customize_error(self, response: requests.Response, kwargs: dict): + """Customize the error constructor parameters.""" + + +class _RetryAfterCustomizer(_ErrorCustomizer): + """An error customizer that sets the retry_after_secs parameter based on the Retry-After header.""" + + _DEFAULT_RETRY_AFTER_SECONDS = 1 + """The default number of seconds to wait before retrying a request if the Retry-After header is missing or is not + a valid integer.""" + + @classmethod + def _parse_retry_after(cls, response: requests.Response) -> int: + retry_after = response.headers.get("Retry-After") + if retry_after is None: + logging.debug( + f'No Retry-After header received in response with status code 429 or 503. Defaulting to {cls._DEFAULT_RETRY_AFTER_SECONDS}' + ) + # 429 requests should include a `Retry-After` header, but if it's missing, + # we default to 1 second. + return cls._DEFAULT_RETRY_AFTER_SECONDS + # If the request is throttled, try parse the `Retry-After` header and sleep + # for the specified number of seconds. Note that this header can contain either + # an integer or a RFC1123 datetime string. + # See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After + # + # For simplicity, we only try to parse it as an integer, as this is what Databricks + # platform returns. Otherwise, we fall back and don't sleep. + try: + return int(retry_after) + except ValueError: + logging.debug( + f'Invalid Retry-After header received: {retry_after}. Defaulting to {cls._DEFAULT_RETRY_AFTER_SECONDS}' + ) + # defaulting to 1 sleep second to make self._is_retryable() simpler + return cls._DEFAULT_RETRY_AFTER_SECONDS + + def customize_error(self, response: requests.Response, kwargs: dict): + if response.status_code in (429, 503): + kwargs['retry_after_secs'] = self._parse_retry_after(response) diff --git a/databricks/sdk/errors/deserializer.py b/databricks/sdk/errors/deserializer.py new file mode 100644 index 000000000..4da01ee68 --- /dev/null +++ b/databricks/sdk/errors/deserializer.py @@ -0,0 +1,106 @@ +import abc +import json +import logging +import re +from typing import Optional + +import requests + + +class _ErrorDeserializer(abc.ABC): + """A parser for errors from the Databricks REST API.""" + + @abc.abstractmethod + def deserialize_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: + """Parses an error from the Databricks REST API. If the error cannot be parsed, returns None.""" + + +class _EmptyDeserializer(_ErrorDeserializer): + """A parser that handles empty responses.""" + + def deserialize_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: + if len(response_body) == 0: + return {'message': response.reason} + return None + + +class _StandardErrorDeserializer(_ErrorDeserializer): + """ + Parses errors from the Databricks REST API using the standard error format. + """ + + def deserialize_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: + try: + payload_str = response_body.decode('utf-8') + resp = json.loads(payload_str) + except UnicodeDecodeError as e: + logging.debug('_StandardErrorParser: unable to decode response using utf-8', exc_info=e) + return None + except json.JSONDecodeError as e: + logging.debug('_StandardErrorParser: unable to deserialize response as json', exc_info=e) + return None + if not isinstance(resp, dict): + logging.debug('_StandardErrorParser: response is valid JSON but not a dictionary') + return None + + error_args = { + 'message': resp.get('message', 'request failed'), + 'error_code': resp.get('error_code'), + 'details': resp.get('details'), + } + + # Handle API 1.2-style errors + if 'error' in resp: + error_args['message'] = resp['error'] + + # Handle SCIM Errors + detail = resp.get('detail') + status = resp.get('status') + scim_type = resp.get('scimType') + if detail: + # Handle SCIM error message details + # @see https://tools.ietf.org/html/rfc7644#section-3.7.3 + if detail == "null": + detail = "SCIM API Internal Error" + error_args['message'] = f"{scim_type} {detail}".strip(" ") + error_args['error_code'] = f"SCIM_{status}" + return error_args + + +class _StringErrorDeserializer(_ErrorDeserializer): + """ + Parses errors from the Databricks REST API in the format "ERROR_CODE: MESSAGE". + """ + + __STRING_ERROR_REGEX = re.compile(r'([A-Z_]+): (.*)') + + def deserialize_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: + payload_str = response_body.decode('utf-8') + match = self.__STRING_ERROR_REGEX.match(payload_str) + if not match: + logging.debug('_StringErrorParser: unable to parse response as string') + return None + error_code, message = match.groups() + return {'error_code': error_code, 'message': message, 'status': response.status_code, } + + +class _HtmlErrorDeserializer(_ErrorDeserializer): + """ + Parses errors from the Databricks REST API in HTML format. + """ + + __HTML_ERROR_REGEXES = [re.compile(r'
(.*)
'), re.compile(r'(.*)'), ] + + def deserialize_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: + payload_str = response_body.decode('utf-8') + for regex in self.__HTML_ERROR_REGEXES: + match = regex.search(payload_str) + if match: + message = match.group(1) if match.group(1) else response.reason + return { + 'status': response.status_code, + 'message': message, + 'error_code': response.reason.upper().replace(' ', '_') + } + logging.debug('_HtmlErrorParser: no
 tag found in error response')
+        return None
diff --git a/databricks/sdk/errors/parser.py b/databricks/sdk/errors/parser.py
index 3d15f1673..3408964fe 100644
--- a/databricks/sdk/errors/parser.py
+++ b/databricks/sdk/errors/parser.py
@@ -1,115 +1,32 @@
-import abc
-import json
 import logging
-import re
-from typing import Optional
+from typing import List, Optional
 
 import requests
 
 from ..logger import RoundTrip
 from .base import DatabricksError
+from .customizer import _ErrorCustomizer, _RetryAfterCustomizer
+from .deserializer import (_EmptyDeserializer, _ErrorDeserializer,
+                           _HtmlErrorDeserializer, _StandardErrorDeserializer,
+                           _StringErrorDeserializer)
 from .mapper import _error_mapper
 from .private_link import (_get_private_link_validation_error,
                            _is_private_link_redirect)
 
+# A list of _ErrorDeserializers that are tried in order to parse an API error from a response body. Most errors should
+# be parsable by the _StandardErrorDeserializer, but additional parsers can be added here for specific error formats.
+# The order of the parsers is not important, as the set of errors that can be parsed by each parser should be disjoint.
+_error_deserializers = [
+    _EmptyDeserializer(),
+    _StandardErrorDeserializer(),
+    _StringErrorDeserializer(),
+    _HtmlErrorDeserializer(),
+]
 
-class _ErrorParser(abc.ABC):
-    """A parser for errors from the Databricks REST API."""
-
-    @abc.abstractmethod
-    def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
-        """Parses an error from the Databricks REST API. If the error cannot be parsed, returns None."""
-
-
-class _EmptyParser(_ErrorParser):
-    """A parser that handles empty responses."""
-
-    def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
-        if len(response_body) == 0:
-            return {'message': response.reason}
-        return None
-
-
-class _StandardErrorParser(_ErrorParser):
-    """
-    Parses errors from the Databricks REST API using the standard error format.
-    """
-
-    def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
-        try:
-            payload_str = response_body.decode('utf-8')
-            resp: dict = json.loads(payload_str)
-        except json.JSONDecodeError as e:
-            logging.debug('_StandardErrorParser: unable to deserialize response as json', exc_info=e)
-            return None
-
-        error_args = {
-            'message': resp.get('message', 'request failed'),
-            'error_code': resp.get('error_code'),
-            'details': resp.get('details'),
-        }
-
-        # Handle API 1.2-style errors
-        if 'error' in resp:
-            error_args['message'] = resp['error']
-
-        # Handle SCIM Errors
-        detail = resp.get('detail')
-        status = resp.get('status')
-        scim_type = resp.get('scimType')
-        if detail:
-            # Handle SCIM error message details
-            # @see https://tools.ietf.org/html/rfc7644#section-3.7.3
-            if detail == "null":
-                detail = "SCIM API Internal Error"
-            error_args['message'] = f"{scim_type} {detail}".strip(" ")
-            error_args['error_code'] = f"SCIM_{status}"
-        return error_args
-
-
-class _StringErrorParser(_ErrorParser):
-    """
-    Parses errors from the Databricks REST API in the format "ERROR_CODE: MESSAGE".
-    """
-
-    __STRING_ERROR_REGEX = re.compile(r'([A-Z_]+): (.*)')
-
-    def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]:
-        payload_str = response_body.decode('utf-8')
-        match = self.__STRING_ERROR_REGEX.match(payload_str)
-        if not match:
-            logging.debug('_StringErrorParser: unable to parse response as string')
-            return None
-        error_code, message = match.groups()
-        return {'error_code': error_code, 'message': message, 'status': response.status_code, }
-
-
-class _HtmlErrorParser(_ErrorParser):
-    """
-    Parses errors from the Databricks REST API in HTML format.
-    """
-
-    __HTML_ERROR_REGEXES = [re.compile(r'
(.*)
'), re.compile(r'(.*)'), ] - - def parse_error(self, response: requests.Response, response_body: bytes) -> Optional[dict]: - payload_str = response_body.decode('utf-8') - for regex in self.__HTML_ERROR_REGEXES: - match = regex.search(payload_str) - if match: - message = match.group(1) if match.group(1) else response.reason - return { - 'status': response.status_code, - 'message': message, - 'error_code': response.reason.upper().replace(' ', '_') - } - logging.debug('_HtmlErrorParser: no
 tag found in error response')
-        return None
-
-
-# A list of ErrorParsers that are tried in order to parse an API error from a response body. Most errors should be
-# parsable by the _StandardErrorParser, but additional parsers can be added here for specific error formats. The order
-# of the parsers is not important, as the set of errors that can be parsed by each parser should be disjoint.
-_error_parsers = [_EmptyParser(), _StandardErrorParser(), _StringErrorParser(), _HtmlErrorParser(), ]
+# A list of _ErrorCustomizers that are applied to the error arguments after they are parsed. Customizers can modify the
+# error arguments in any way, including adding or removing fields. Customizers are applied in order, so later
+# customizers can override the changes made by earlier customizers.
+_error_customizers = [_RetryAfterCustomizer(), ]
 
 
 def _unknown_error(response: requests.Response) -> str:
@@ -124,24 +41,43 @@ def _unknown_error(response: requests.Response) -> str:
         f'https://github.com/databricks/databricks-sdk-go/issues. Request log:```{request_log}```')
 
 
-def get_api_error(response: requests.Response) -> Optional[DatabricksError]:
+class _Parser:
     """
-    Handles responses from the REST API and returns a DatabricksError if the response indicates an error.
-    :param response: The response from the REST API.
-    :return: A DatabricksError if the response indicates an error, otherwise None.
+    A parser for errors from the Databricks REST API. It attempts to deserialize an error using a sequence of
+    deserializers, and then customizes the deserialized error using a sequence of customizers. If the error cannot be
+    deserialized, it returns a generic error with debugging information and instructions to report the issue to the SDK
+    issue tracker.
     """
-    if not response.ok:
-        content = response.content
-        for parser in _error_parsers:
-            try:
-                error_args = parser.parse_error(response, content)
-                if error_args:
-                    return _error_mapper(response, error_args)
-            except Exception as e:
-                logging.debug(f'Error parsing response with {parser}, continuing', exc_info=e)
-        return _error_mapper(response, {'message': 'unable to parse response. ' + _unknown_error(response)})
 
-    # Private link failures happen via a redirect to the login page. From a requests-perspective, the request
-    # is successful, but the response is not what we expect. We need to handle this case separately.
-    if _is_private_link_redirect(response):
-        return _get_private_link_validation_error(response.url)
+    def __init__(self,
+                 extra_error_parsers: List[_ErrorDeserializer] = [],
+                 extra_error_customizers: List[_ErrorCustomizer] = []):
+        self._error_parsers = _error_deserializers + (extra_error_parsers
+                                                      if extra_error_parsers is not None else [])
+        self._error_customizers = _error_customizers + (extra_error_customizers
+                                                        if extra_error_customizers is not None else [])
+
+    def get_api_error(self, response: requests.Response) -> Optional[DatabricksError]:
+        """
+        Handles responses from the REST API and returns a DatabricksError if the response indicates an error.
+        :param response: The response from the REST API.
+        :return: A DatabricksError if the response indicates an error, otherwise None.
+        """
+        if not response.ok:
+            content = response.content
+            for parser in self._error_parsers:
+                try:
+                    error_args = parser.deserialize_error(response, content)
+                    if error_args:
+                        for customizer in self._error_customizers:
+                            customizer.customize_error(response, error_args)
+                        return _error_mapper(response, error_args)
+                except Exception as e:
+                    logging.debug(f'Error parsing response with {parser}, continuing', exc_info=e)
+            return _error_mapper(response,
+                                 {'message': 'unable to parse response. ' + _unknown_error(response)})
+
+        # Private link failures happen via a redirect to the login page. From a requests-perspective, the request
+        # is successful, but the response is not what we expect. We need to handle this case separately.
+        if _is_private_link_redirect(response):
+            return _get_private_link_validation_error(response.url)
diff --git a/databricks/sdk/logger/round_trip_logger.py b/databricks/sdk/logger/round_trip_logger.py
index f1d177aaa..1c0a47f08 100644
--- a/databricks/sdk/logger/round_trip_logger.py
+++ b/databricks/sdk/logger/round_trip_logger.py
@@ -48,7 +48,8 @@ def generate(self) -> str:
             # Raw streams with `Transfer-Encoding: chunked` do not have `Content-Type` header
             sb.append("< [raw stream]")
         elif self._response.content:
-            sb.append(self._redacted_dump("< ", self._response.content.decode('utf-8')))
+            decoded = self._response.content.decode('utf-8', errors='replace')
+            sb.append(self._redacted_dump("< ", decoded))
         return '\n'.join(sb)
 
     @staticmethod
diff --git a/tests/test_core.py b/tests/test_core.py
index cc7926a72..d54563d4e 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -13,7 +13,7 @@
 import pytest
 import requests
 
-from databricks.sdk import WorkspaceClient
+from databricks.sdk import WorkspaceClient, errors
 from databricks.sdk.core import (ApiClient, Config, DatabricksError,
                                  StreamingResponse)
 from databricks.sdk.credentials_provider import (CliTokenSource,
@@ -359,8 +359,8 @@ def test_deletes(config, requests_mock):
     assert res is None
 
 
-def test_error(config, requests_mock):
-    errorJson = {
+@pytest.mark.parametrize('status_code,headers,body,expected_error', [
+    (400, {}, {
         "message":
         "errorMessage",
         "details": [{
@@ -378,26 +378,74 @@ def test_error(config, requests_mock):
                 "etag": "wrong etag"
             }
         }],
-    }
-
+    },
+     errors.BadRequest('errorMessage',
+                       details=[{
+                           'type': DatabricksError._error_info_type,
+                           'reason': 'error reason',
+                           'domain': 'error domain',
+                           'metadata': {
+                               'etag': 'error etag'
+                           },
+                       }])),
+    (401, {}, {
+        'error_code': 'UNAUTHORIZED',
+        'message': 'errorMessage',
+    },
+     errors.Unauthenticated('errorMessage. Config: host=http://localhost, auth_type=noop',
+                            error_code='UNAUTHORIZED')),
+    (403, {}, {
+        'error_code': 'FORBIDDEN',
+        'message': 'errorMessage',
+    },
+     errors.PermissionDenied('errorMessage. Config: host=http://localhost, auth_type=noop',
+                             error_code='FORBIDDEN')),
+    (429, {}, {
+        'error_code': 'TOO_MANY_REQUESTS',
+        'message': 'errorMessage',
+    }, errors.TooManyRequests('errorMessage', error_code='TOO_MANY_REQUESTS', retry_after_secs=1)),
+    (429, {
+        'Retry-After': '100'
+    }, {
+        'error_code': 'TOO_MANY_REQUESTS',
+        'message': 'errorMessage',
+    }, errors.TooManyRequests('errorMessage', error_code='TOO_MANY_REQUESTS', retry_after_secs=100)),
+    (503, {}, {
+        'error_code': 'TEMPORARILY_UNAVAILABLE',
+        'message': 'errorMessage',
+    }, errors.TemporarilyUnavailable('errorMessage', error_code='TEMPORARILY_UNAVAILABLE',
+                                     retry_after_secs=1)),
+    (503, {
+        'Retry-After': '100'
+    }, {
+        'error_code': 'TEMPORARILY_UNAVAILABLE',
+        'message': 'errorMessage',
+    },
+     errors.TemporarilyUnavailable('errorMessage', error_code='TEMPORARILY_UNAVAILABLE',
+                                   retry_after_secs=100)),
+    (404, {}, {
+        'scimType': 'scim type',
+        'detail': 'detail',
+        'status': 'status',
+    }, errors.NotFound('scim type detail', error_code='SCIM_status')),
+])
+def test_error(config, requests_mock, status_code, headers, body, expected_error):
     client = ApiClient(config)
-    requests_mock.get("/test", json=errorJson, status_code=400, )
+    requests_mock.get("/test", json=body, status_code=status_code, headers=headers)
     with pytest.raises(DatabricksError) as raised:
-        client.do("GET", "/test", headers={"test": "test"})
-
-    error_infos = raised.value.get_error_info()
-    assert len(error_infos) == 1
-    error_info = error_infos[0]
-    assert error_info.reason == "error reason"
-    assert error_info.domain == "error domain"
-    assert error_info.metadata["etag"] == "error etag"
-    assert error_info.type == DatabricksError._error_info_type
-
-
-def test_error_with_scimType():
-    args = {"detail": "detail", "scimType": "scim type"}
-    error = DatabricksError(**args)
-    assert str(error) == f"scim type detail"
+        client._perform("GET", "http://localhost/test", headers={"test": "test"})
+    actual = raised.value
+    assert isinstance(actual, type(expected_error))
+    assert str(actual) == str(expected_error)
+    assert actual.error_code == expected_error.error_code
+    assert actual.retry_after_secs == expected_error.retry_after_secs
+    expected_error_infos, actual_error_infos = expected_error.get_error_info(), actual.get_error_info()
+    assert len(expected_error_infos) == len(actual_error_infos)
+    for expected, actual in zip(expected_error_infos, actual_error_infos):
+        assert expected.type == actual.type
+        assert expected.reason == actual.reason
+        assert expected.domain == actual.domain
+        assert expected.metadata == actual.metadata
 
 
 @contextlib.contextmanager
diff --git a/tests/test_errors.py b/tests/test_errors.py
index 2e19ec897..881f016f3 100644
--- a/tests/test_errors.py
+++ b/tests/test_errors.py
@@ -12,13 +12,20 @@ def fake_response(method: str,
                   status_code: int,
                   response_body: str,
                   path: Optional[str] = None) -> requests.Response:
+    return fake_raw_response(method, status_code, response_body.encode('utf-8'), path)
+
+
+def fake_raw_response(method: str,
+                      status_code: int,
+                      response_body: bytes,
+                      path: Optional[str] = None) -> requests.Response:
     resp = requests.Response()
     resp.status_code = status_code
     resp.reason = http.client.responses.get(status_code, '')
     if path is None:
         path = '/api/2.0/service'
     resp.request = requests.Request(method, f"https://databricks.com{path}").prepare()
-    resp._content = response_body.encode('utf-8')
+    resp._content = response_body
     return resp
 
 
@@ -110,17 +117,22 @@ def make_private_link_response() -> requests.Response:
        'https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n'
        '< 400 Bad Request\n'
        '< this is not a real response```')),
-     [
-         fake_response(
-             'GET', 404,
-             json.dumps({
-                 'detail': 'Group with id 1234 is not found',
-                 'status': '404',
-                 'schemas': ['urn:ietf:params:scim:api:messages:2.0:Error']
-             })), errors.NotFound, 'None Group with id 1234 is not found'
-     ]])
+     (fake_response(
+         'GET', 404,
+         json.dumps({
+             'detail': 'Group with id 1234 is not found',
+             'status': '404',
+             'schemas': ['urn:ietf:params:scim:api:messages:2.0:Error']
+         })), errors.NotFound, 'None Group with id 1234 is not found'),
+     (fake_response('GET', 404, json.dumps("This is JSON but not a dictionary")), errors.NotFound,
+      'unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< "This is JSON but not a dictionary"```'
+      ),
+     (fake_raw_response('GET', 404, b'\x80'), errors.NotFound,
+      'unable to parse response. This is likely a bug in the Databricks SDK for Python or the underlying API. Please report this issue with the following debugging information to the SDK issue tracker at https://github.com/databricks/databricks-sdk-go/issues. Request log:```GET /api/2.0/service\n< 404 Not Found\n< �```'
+      )])
 def test_get_api_error(response, expected_error, expected_message):
+    parser = errors._Parser()
     with pytest.raises(errors.DatabricksError) as e:
-        raise errors.get_api_error(response)
+        raise parser.get_api_error(response)
     assert isinstance(e.value, expected_error)
     assert str(e.value) == expected_message

From 2438abdbf9fdffcea7224e20dffb9fb1d5178cc9 Mon Sep 17 00:00:00 2001
From: Miles Yucht 
Date: Mon, 16 Sep 2024 08:52:51 -0400
Subject: [PATCH 039/136] [Fix] Do not specify --tenant flag when fetching
 managed identity access token from the CLI (#748)

## Changes
Ports https://github.com/databricks/databricks-sdk-go/pull/1021 to the
Python SDK.

The Azure CLI's az account get-access-token command does not allow
specifying --tenant flag if it is authenticated via the CLI.

Fixes #742.

## Tests
Unit tests ensure that all expected cases are treated as managed
identities.

- [ ] `make test` run locally
- [ ] `make fmt` applied
- [ ] relevant integration tests applied
---
 databricks/sdk/credentials_provider.py | 44 +++++++++++++++++++++++---
 tests/test_auth_manual_tests.py        | 12 +++++++
 tests/testdata/az                      | 32 +++++++++++++++++--
 tests/testdata/windows/az.ps1          | 28 ++++++++++++++++
 4 files changed, 109 insertions(+), 7 deletions(-)

diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py
index 860a06ce4..8c1655af1 100644
--- a/databricks/sdk/credentials_provider.py
+++ b/databricks/sdk/credentials_provider.py
@@ -411,10 +411,7 @@ def _parse_expiry(expiry: str) -> datetime:
 
     def refresh(self) -> Token:
         try:
-            is_windows = sys.platform.startswith('win')
-            # windows requires shell=True to be able to execute 'az login' or other commands
-            # cannot use shell=True all the time, as it breaks macOS
-            out = subprocess.run(self._cmd, capture_output=True, check=True, shell=is_windows)
+            out = _run_subprocess(self._cmd, capture_output=True, check=True)
             it = json.loads(out.stdout.decode())
             expires_on = self._parse_expiry(it[self._expiry_field])
             return Token(access_token=it[self._access_token_field],
@@ -429,6 +426,26 @@ def refresh(self) -> Token:
             raise IOError(f'cannot get access token: {message}') from e
 
 
+def _run_subprocess(popenargs,
+                    input=None,
+                    capture_output=True,
+                    timeout=None,
+                    check=False,
+                    **kwargs) -> subprocess.CompletedProcess:
+    """Runs subprocess with given arguments.
+    This handles OS-specific modifications that need to be made to the invocation of subprocess.run."""
+    kwargs['shell'] = sys.platform.startswith('win')
+    # windows requires shell=True to be able to execute 'az login' or other commands
+    # cannot use shell=True all the time, as it breaks macOS
+    logging.debug(f'Running command: {" ".join(popenargs)}')
+    return subprocess.run(popenargs,
+                          input=input,
+                          capture_output=capture_output,
+                          timeout=timeout,
+                          check=check,
+                          **kwargs)
+
+
 class AzureCliTokenSource(CliTokenSource):
     """ Obtain the token granted by `az login` CLI command """
 
@@ -437,13 +454,30 @@ def __init__(self, resource: str, subscription: Optional[str] = None, tenant: Op
         if subscription is not None:
             cmd.append("--subscription")
             cmd.append(subscription)
-        if tenant:
+        if tenant and not self.__is_cli_using_managed_identity():
             cmd.extend(["--tenant", tenant])
         super().__init__(cmd=cmd,
                          token_type_field='tokenType',
                          access_token_field='accessToken',
                          expiry_field='expiresOn')
 
+    @staticmethod
+    def __is_cli_using_managed_identity() -> bool:
+        """Checks whether the current CLI session is authenticated using managed identity."""
+        try:
+            cmd = ["az", "account", "show", "--output", "json"]
+            out = _run_subprocess(cmd, capture_output=True, check=True)
+            account = json.loads(out.stdout.decode())
+            user = account.get("user")
+            if user is None:
+                return False
+            return user.get("type") == "servicePrincipal" and user.get("name") in [
+                'systemAssignedIdentity', 'userAssignedIdentity'
+            ]
+        except subprocess.CalledProcessError as e:
+            logger.debug("Failed to get account information from Azure CLI", exc_info=e)
+            return False
+
     def is_human_user(self) -> bool:
         """The UPN claim is the username of the user, but not the Service Principal.
 
diff --git a/tests/test_auth_manual_tests.py b/tests/test_auth_manual_tests.py
index 34aa3a9c2..8c58dd6bf 100644
--- a/tests/test_auth_manual_tests.py
+++ b/tests/test_auth_manual_tests.py
@@ -1,3 +1,5 @@
+import pytest
+
 from databricks.sdk.core import Config
 
 from .conftest import set_az_path, set_home
@@ -60,3 +62,13 @@ def test_azure_cli_with_warning_on_stderr(monkeypatch, mock_tenant):
                  host='https://adb-123.4.azuredatabricks.net',
                  azure_workspace_resource_id=resource_id)
     assert 'X-Databricks-Azure-SP-Management-Token' in cfg.authenticate()
+
+
+@pytest.mark.parametrize('username', ['systemAssignedIdentity', 'userAssignedIdentity'])
+def test_azure_cli_does_not_specify_tenant_id_with_msi(monkeypatch, username):
+    set_home(monkeypatch, '/testdata/azure')
+    set_az_path(monkeypatch)
+    monkeypatch.setenv('FAIL_IF_TENANT_ID_SET', 'true')
+    monkeypatch.setenv('AZ_USER_NAME', username)
+    monkeypatch.setenv('AZ_USER_TYPE', 'servicePrincipal')
+    cfg = Config(auth_type='azure-cli', host='https://adb-123.4.azuredatabricks.net', azure_tenant_id='abc')
diff --git a/tests/testdata/az b/tests/testdata/az
index 5bf43a663..7437babce 100755
--- a/tests/testdata/az
+++ b/tests/testdata/az
@@ -1,7 +1,20 @@
 #!/bin/bash
 
-if [ -n "$WARN" ]; then
-    >&2 /bin/echo "WARNING: ${WARN}"
+# If the arguments are "account show", return the account details.
+if [ "$1" == "account" ] && [ "$2" == "show" ]; then
+    /bin/echo "{
+    \"environmentName\": \"AzureCloud\",
+    \"id\": \"aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee\",
+    \"isDefault\": true,
+    \"name\": \"Pay-As-You-Go\",
+    \"state\": \"Enabled\",
+    \"tenantId\": \"aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee\",
+    \"user\": {
+        \"name\": \"${AZ_USER_NAME:-testuser@databricks.com}\",
+        \"type\": \"${AZ_USER_TYPE:-user}\"
+    }
+}"
+    exit 0
 fi
 
 if [ "yes" == "$FAIL" ]; then
@@ -26,6 +39,21 @@ for arg in "$@"; do
     fi
 done
 
+# Add character to file at $COUNT if it is defined.
+if [ -n "$COUNT" ]; then
+    echo -n x >> "$COUNT"
+fi
+
+# If FAIL_IF_TENANT_ID_SET is set & --tenant-id is passed, fail.
+if [ -n "$FAIL_IF_TENANT_ID_SET" ]; then
+    for arg in "$@"; do
+        if [[ "$arg" == "--tenant" ]]; then
+            echo 1>&2 "ERROR: Tenant shouldn't be specified for managed identity account"
+            exit 1
+        fi
+    done
+fi
+
 # Macos
 EXP="$(/bin/date -v+${EXPIRE:=10S} +'%F %T' 2>/dev/null)"
 if [ -z "${EXP}" ]; then
diff --git a/tests/testdata/windows/az.ps1 b/tests/testdata/windows/az.ps1
index 4aa96adf5..97ecbca7c 100644
--- a/tests/testdata/windows/az.ps1
+++ b/tests/testdata/windows/az.ps1
@@ -1,5 +1,23 @@
 #!/usr/bin/env pwsh
 
+# If the arguments are "account show", return the account details.
+if ($args[0] -eq "account" -and $args[1] -eq "show") {
+    $output = @{
+        environmentName = "AzureCloud"
+        id = "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"
+        isDefault = $true
+        name = "Pay-As-You-Go"
+        state = "Enabled"
+        tenantId = "aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee"
+        user = @{
+            name = if ($env:AZ_USER_NAME) { $env:AZ_USER_NAME } else { "testuser@databricks.com" }
+            type = if ($env:AZ_USER_TYPE) { $env:AZ_USER_TYPE } else { "user" }
+        }
+    }
+    $output | ConvertTo-Json
+    exit 0
+}
+
 if ($env:WARN) {
     Write-Error "WARNING: $env:WARN"
 }
@@ -30,6 +48,16 @@ foreach ($arg in $Args) {
     }
 }
 
+# If FAIL_IF_TENANT_ID_SET is set & --tenant-id is passed, fail.
+if ($env:FAIL_IF_TENANT_ID_SET) {
+    foreach ($arg in $args) {
+        if ($arg -eq "--tenant-id" -or $arg -like "--tenant*") {
+            Write-Error "ERROR: Tenant shouldn't be specified for managed identity account"
+            exit 1
+        }
+    }
+}
+
 try {
     $EXP = (Get-Date).AddSeconds($env:EXPIRE -as [int])
 } catch {

From 5e871cb0ae3f4790c95a4e2e6811998a113a7989 Mon Sep 17 00:00:00 2001
From: Tanmay Rustagi <88379306+tanmay-db@users.noreply.github.com>
Date: Tue, 17 Sep 2024 12:49:32 +0200
Subject: [PATCH 040/136] [Release] Release v0.32.2 (#763)

### New Features and Improvements

* Support Models in `dbutils.fs` operations
([#750](https://github.com/databricks/databricks-sdk-py/pull/750)).


### Bug Fixes

* Do not specify --tenant flag when fetching managed identity access
token from the CLI
([#748](https://github.com/databricks/databricks-sdk-py/pull/748)).
* Fix deserialization of 401/403 errors
([#758](https://github.com/databricks/databricks-sdk-py/pull/758)).
* Use correct optional typing in `WorkspaceClient` for `mypy`
([#760](https://github.com/databricks/databricks-sdk-py/pull/760)).
---
 CHANGELOG.md              | 15 +++++++++++++++
 databricks/sdk/version.py |  2 +-
 2 files changed, 16 insertions(+), 1 deletion(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index cd1ee8136..34927afd0 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,20 @@
 # Version changelog
 
+## [Release] Release v0.32.2
+
+### New Features and Improvements
+
+ * Support Models in `dbutils.fs` operations ([#750](https://github.com/databricks/databricks-sdk-py/pull/750)).
+
+
+### Bug Fixes
+
+ * Do not specify --tenant flag when fetching managed identity access token from the CLI ([#748](https://github.com/databricks/databricks-sdk-py/pull/748)).
+ * Fix deserialization of 401/403 errors ([#758](https://github.com/databricks/databricks-sdk-py/pull/758)).
+ * Use correct optional typing in `WorkspaceClient` for `mypy` ([#760](https://github.com/databricks/databricks-sdk-py/pull/760)).
+
+
+
 ## [Release] Release v0.32.1
 
 ### Bug Fixes
diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py
index 68c3b1326..03de7de68 100644
--- a/databricks/sdk/version.py
+++ b/databricks/sdk/version.py
@@ -1 +1 @@
-__version__ = '0.32.1'
+__version__ = '0.32.2'

From d5ec4333c330104b8fc5ac169257a0de30be6881 Mon Sep 17 00:00:00 2001
From: hectorcast-db 
Date: Tue, 17 Sep 2024 17:57:46 +0200
Subject: [PATCH 041/136] [Fix] Add DataPlane docs to the index (#764)

## Changes
Add DataPlane docs to the index

## Tests


- [ ] `make test` run locally
- [ ] `make fmt` applied
- [ ] relevant integration tests applied
---
 docs/index.rst | 1 +
 1 file changed, 1 insertion(+)

diff --git a/docs/index.rst b/docs/index.rst
index a4873c43e..3d3a5dfc5 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -17,6 +17,7 @@ We are keen to hear feedback from you on these SDKs. Please `file GitHub issues
    pagination
    logging
    dbutils
+   dataplane
    clients/workspace
    workspace/index
    clients/account

From 9d3925415e5c6268880a501ed8c63ab1e6740971 Mon Sep 17 00:00:00 2001
From: Aravind Segu 
Date: Wed, 18 Sep 2024 12:25:18 -0700
Subject: [PATCH 042/136] [Feature] Integrate Databricks SDK with Model Serving
 Auth Provider (#761)

## Changes
This PR introduces a new model serving auth method to Databricks SDK.
- If the correct environment variables are set to identify a model
serving environment
- Check to see if there is an oauth file written by the serving
environment
- If this file exists use the token here for authentication

## Tests
Added Unit tests

- [x] `make test` run locally
- [x] `make fmt` applied
- [x] relevant integration tests applied

---------

Signed-off-by: aravind-segu 
---
 databricks/sdk/credentials_provider.py     | 89 +++++++++++++++++++-
 tests/test_model_serving_auth.py           | 98 ++++++++++++++++++++++
 tests/testdata/model-serving-test-token    |  7 ++
 tests/testdata/model-serving-test-token-v2 |  7 ++
 4 files changed, 199 insertions(+), 2 deletions(-)
 create mode 100644 tests/test_model_serving_auth.py
 create mode 100644 tests/testdata/model-serving-test-token
 create mode 100644 tests/testdata/model-serving-test-token-v2

diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py
index 8c1655af1..b64a66e08 100644
--- a/databricks/sdk/credentials_provider.py
+++ b/databricks/sdk/credentials_provider.py
@@ -9,8 +9,9 @@
 import platform
 import subprocess
 import sys
+import time
 from datetime import datetime
-from typing import Callable, Dict, List, Optional, Union
+from typing import Callable, Dict, List, Optional, Tuple, Union
 
 import google.auth
 import requests
@@ -698,6 +699,90 @@ def inner() -> Dict[str, str]:
     return inner
 
 
+# This Code is derived from Mlflow DatabricksModelServingConfigProvider
+# https://github.com/mlflow/mlflow/blob/1219e3ef1aac7d337a618a352cd859b336cf5c81/mlflow/legacy_databricks_cli/configure/provider.py#L332
+class ModelServingAuthProvider():
+    _MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH = "/var/credentials-secret/model-dependencies-oauth-token"
+
+    def __init__(self):
+        self.expiry_time = -1
+        self.current_token = None
+        self.refresh_duration = 300 # 300 Seconds
+
+    def should_fetch_model_serving_environment_oauth(self) -> bool:
+        """
+        Check whether this is the model serving environment
+        Additionally check if the oauth token file path exists
+        """
+
+        is_in_model_serving_env = (os.environ.get("IS_IN_DB_MODEL_SERVING_ENV")
+                                   or os.environ.get("IS_IN_DATABRICKS_MODEL_SERVING_ENV") or "false")
+        return (is_in_model_serving_env == "true"
+                and os.path.isfile(self._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH))
+
+    def get_model_dependency_oauth_token(self, should_retry=True) -> str:
+        # Use Cached value if it is valid
+        if self.current_token is not None and self.expiry_time > time.time():
+            return self.current_token
+
+        try:
+            with open(self._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH) as f:
+                oauth_dict = json.load(f)
+                self.current_token = oauth_dict["OAUTH_TOKEN"][0]["oauthTokenValue"]
+                self.expiry_time = time.time() + self.refresh_duration
+        except Exception as e:
+            # sleep and retry in case of any race conditions with OAuth refreshing
+            if should_retry:
+                logger.warning("Unable to read oauth token on first attmept in Model Serving Environment",
+                               exc_info=e)
+                time.sleep(0.5)
+                return self.get_model_dependency_oauth_token(should_retry=False)
+            else:
+                raise RuntimeError(
+                    "Unable to read OAuth credentials from the file mounted in Databricks Model Serving"
+                ) from e
+        return self.current_token
+
+    def get_databricks_host_token(self) -> Optional[Tuple[str, str]]:
+        if not self.should_fetch_model_serving_environment_oauth():
+            return None
+
+        # read from DB_MODEL_SERVING_HOST_ENV_VAR if available otherwise MODEL_SERVING_HOST_ENV_VAR
+        host = os.environ.get("DATABRICKS_MODEL_SERVING_HOST_URL") or os.environ.get(
+            "DB_MODEL_SERVING_HOST_URL")
+        token = self.get_model_dependency_oauth_token()
+
+        return (host, token)
+
+
+@credentials_strategy('model-serving', [])
+def model_serving_auth(cfg: 'Config') -> Optional[CredentialsProvider]:
+    try:
+        model_serving_auth_provider = ModelServingAuthProvider()
+        if not model_serving_auth_provider.should_fetch_model_serving_environment_oauth():
+            logger.debug("model-serving: Not in Databricks Model Serving, skipping")
+            return None
+        host, token = model_serving_auth_provider.get_databricks_host_token()
+        if token is None:
+            raise ValueError(
+                "Got malformed auth (empty token) when fetching auth implicitly available in Model Serving Environment. Please contact Databricks support"
+            )
+        if cfg.host is None:
+            cfg.host = host
+    except Exception as e:
+        logger.warning("Unable to get auth from Databricks Model Serving Environment", exc_info=e)
+        return None
+
+    logger.info("Using Databricks Model Serving Authentication")
+
+    def inner() -> Dict[str, str]:
+        # Call here again to get the refreshed token
+        _, token = model_serving_auth_provider.get_databricks_host_token()
+        return {"Authorization": f"Bearer {token}"}
+
+    return inner
+
+
 class DefaultCredentials:
     """ Select the first applicable credential provider from the chain """
 
@@ -706,7 +791,7 @@ def __init__(self) -> None:
         self._auth_providers = [
             pat_auth, basic_auth, metadata_service, oauth_service_principal, azure_service_principal,
             github_oidc_azure, azure_cli, external_browser, databricks_cli, runtime_native_auth,
-            google_credentials, google_id
+            google_credentials, google_id, model_serving_auth
         ]
 
     def auth_type(self) -> str:
diff --git a/tests/test_model_serving_auth.py b/tests/test_model_serving_auth.py
new file mode 100644
index 000000000..0ae211303
--- /dev/null
+++ b/tests/test_model_serving_auth.py
@@ -0,0 +1,98 @@
+import time
+
+import pytest
+
+from databricks.sdk.core import Config
+
+from .conftest import raises
+
+default_auth_base_error_message = \
+    "default auth: cannot configure default credentials, " \
+    "please check https://docs.databricks.com/en/dev-tools/auth.html#databricks-client-unified-authentication " \
+    "to configure credentials for your preferred authentication method"
+
+
+@pytest.mark.parametrize(
+    "env_values, oauth_file_name",
+    [([('IS_IN_DB_MODEL_SERVING_ENV', 'true'),
+       ('DB_MODEL_SERVING_HOST_URL', 'x')], "tests/testdata/model-serving-test-token"),
+     ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true'),
+       ('DB_MODEL_SERVING_HOST_URL', 'x')], "tests/testdata/model-serving-test-token"),
+     ([('IS_IN_DB_MODEL_SERVING_ENV', 'true'),
+       ('DATABRICKS_MODEL_SERVING_HOST_URL', 'x')], "tests/testdata/model-serving-test-token"),
+     ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true'),
+       ('DATABRICKS_MODEL_SERVING_HOST_URL', 'x')], "tests/testdata/model-serving-test-token"), ])
+def test_model_serving_auth(env_values, oauth_file_name, monkeypatch):
+    ## In mlflow we check for these two environment variables to return the correct config
+    for (env_name, env_value) in env_values:
+        monkeypatch.setenv(env_name, env_value)
+    # patch mlflow to read the file from the test directory
+    monkeypatch.setattr(
+        "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH",
+        oauth_file_name)
+
+    cfg = Config()
+
+    assert cfg.auth_type == 'model-serving'
+    headers = cfg.authenticate()
+    assert (cfg.host == 'x')
+    # Token defined in the test file
+    assert headers.get("Authorization") == 'Bearer databricks_sdk_unit_test_token'
+
+
+@pytest.mark.parametrize("env_values, oauth_file_name", [
+    ([], "invalid_file_name"), # Not in Model Serving and Invalid File Name
+    ([('IS_IN_DB_MODEL_SERVING_ENV', 'true')], "invalid_file_name"), # In Model Serving and Invalid File Name
+    ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true')
+      ], "invalid_file_name"), # In Model Serving and Invalid File Name
+    ([], "tests/testdata/model-serving-test-token") # Not in Model Serving and Valid File Name
+])
+@raises(default_auth_base_error_message)
+def test_model_serving_auth_errors(env_values, oauth_file_name, monkeypatch):
+    for (env_name, env_value) in env_values:
+        monkeypatch.setenv(env_name, env_value)
+    monkeypatch.setattr(
+        "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH",
+        oauth_file_name)
+
+    Config()
+
+
+def test_model_serving_auth_refresh(monkeypatch):
+    ## In mlflow we check for these two environment variables to return the correct config
+    monkeypatch.setenv('IS_IN_DB_MODEL_SERVING_ENV', 'true')
+    monkeypatch.setenv('DB_MODEL_SERVING_HOST_URL', 'x')
+
+    # patch mlflow to read the file from the test directory
+    monkeypatch.setattr(
+        "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH",
+        "tests/testdata/model-serving-test-token")
+
+    cfg = Config()
+    assert cfg.auth_type == 'model-serving'
+
+    current_time = time.time()
+    headers = cfg.authenticate()
+    assert (cfg.host == 'x')
+    assert headers.get(
+        "Authorization") == 'Bearer databricks_sdk_unit_test_token' # Token defined in the test file
+
+    # Simulate refreshing the token by patching to to a new file
+    monkeypatch.setattr(
+        "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH",
+        "tests/testdata/model-serving-test-token-v2")
+
+    monkeypatch.setattr('databricks.sdk.credentials_provider.time.time', lambda: current_time + 10)
+
+    headers = cfg.authenticate()
+    assert (cfg.host == 'x')
+    # Read from cache even though new path is set because expiry is still not hit
+    assert headers.get("Authorization") == 'Bearer databricks_sdk_unit_test_token'
+
+    # Expiry is 300 seconds so this should force an expiry and re read from the new file path
+    monkeypatch.setattr('databricks.sdk.credentials_provider.time.time', lambda: current_time + 600)
+
+    headers = cfg.authenticate()
+    assert (cfg.host == 'x')
+    # Read V2 now
+    assert headers.get("Authorization") == 'Bearer databricks_sdk_unit_test_token_v2'
diff --git a/tests/testdata/model-serving-test-token b/tests/testdata/model-serving-test-token
new file mode 100644
index 000000000..3415ff226
--- /dev/null
+++ b/tests/testdata/model-serving-test-token
@@ -0,0 +1,7 @@
+{
+  "OAUTH_TOKEN": [
+    {
+      "oauthTokenValue": "databricks_sdk_unit_test_token"
+    }
+  ]
+}
\ No newline at end of file
diff --git a/tests/testdata/model-serving-test-token-v2 b/tests/testdata/model-serving-test-token-v2
new file mode 100644
index 000000000..2567a7e50
--- /dev/null
+++ b/tests/testdata/model-serving-test-token-v2
@@ -0,0 +1,7 @@
+{
+  "OAUTH_TOKEN": [
+    {
+      "oauthTokenValue": "databricks_sdk_unit_test_token_v2"
+    }
+  ]
+}
\ No newline at end of file

From 8c865a013a539de5a7f46f66ff75ed44d806502b Mon Sep 17 00:00:00 2001
From: Serge Smertin <259697+nfx@users.noreply.github.com>
Date: Thu, 19 Sep 2024 13:21:46 +0200
Subject: [PATCH 043/136] [Fix] `mypy` error: Skipping analyzing "google":
 module is installed, but missing library stubs or py.typed marker (#769)

---
 databricks/sdk/credentials_provider.py | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py
index b64a66e08..232465dab 100644
--- a/databricks/sdk/credentials_provider.py
+++ b/databricks/sdk/credentials_provider.py
@@ -13,11 +13,11 @@
 from datetime import datetime
 from typing import Callable, Dict, List, Optional, Tuple, Union
 
-import google.auth
+import google.auth  # type: ignore
 import requests
-from google.auth import impersonated_credentials
-from google.auth.transport.requests import Request
-from google.oauth2 import service_account
+from google.auth import impersonated_credentials  # type: ignore
+from google.auth.transport.requests import Request  # type: ignore
+from google.oauth2 import service_account  # type: ignore
 
 from .azure import add_sp_management_token, add_workspace_id_header
 from .oauth import (ClientCredentials, OAuthClient, Refreshable, Token,

From e23b4acd0cc967ee2a3cba3c3d26666698600e35 Mon Sep 17 00:00:00 2001
From: Renaud Hartert 
Date: Thu, 19 Sep 2024 15:51:48 +0200
Subject: [PATCH 044/136] [Release] Release v0.32.3 (#770)

### New Features and Improvements

* Integrate Databricks SDK with Model Serving Auth Provider
([#761](https://github.com/databricks/databricks-sdk-py/pull/761)).


### Bug Fixes

* Add DataPlane docs to the index
([#764](https://github.com/databricks/databricks-sdk-py/pull/764)).
* `mypy` error: Skipping analyzing "google": module is installed, but
missing library stubs or py.typed marker
([#769](https://github.com/databricks/databricks-sdk-py/pull/769)).
---
 CHANGELOG.md              | 14 ++++++++++++++
 databricks/sdk/version.py |  2 +-
 2 files changed, 15 insertions(+), 1 deletion(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 34927afd0..e5fd9525c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,19 @@
 # Version changelog
 
+## [Release] Release v0.32.3
+
+### New Features and Improvements
+
+ * Integrate Databricks SDK with Model Serving Auth Provider ([#761](https://github.com/databricks/databricks-sdk-py/pull/761)).
+
+
+### Bug Fixes
+
+ * Add DataPlane docs to the index ([#764](https://github.com/databricks/databricks-sdk-py/pull/764)).
+ * `mypy` error: Skipping analyzing "google": module is installed, but missing library stubs or py.typed marker ([#769](https://github.com/databricks/databricks-sdk-py/pull/769)).
+
+
+
 ## [Release] Release v0.32.2
 
 ### New Features and Improvements
diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py
index 03de7de68..fb0ef16c1 100644
--- a/databricks/sdk/version.py
+++ b/databricks/sdk/version.py
@@ -1 +1 @@
-__version__ = '0.32.2'
+__version__ = '0.32.3'

From 61a41fc246ad83aa98e3c9e3d79c4a4c60c3c811 Mon Sep 17 00:00:00 2001
From: Renaud Hartert 
Date: Wed, 25 Sep 2024 13:56:40 +0200
Subject: [PATCH 045/136] [Internal] Add DCO guidelines (#773)

## Changes

This PR updates the contributing guidelines to include the DCO
(Developer Certificate of Origin) that external contributors must
sign-off to contribute.

## Tests

N/A
---
 CONTRIBUTING.md | 56 +++++++++++--------------------------------------
 DCO             | 25 ++++++++++++++++++++++
 2 files changed, 37 insertions(+), 44 deletions(-)
 create mode 100644 DCO

diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 18b980a55..249d7498e 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -18,54 +18,22 @@ Code style is enforced by a formatter check in your pull request. We use [yapf](
 ## Signed Commits
 This repo requires all contributors to sign their commits. To configure this, you can follow [Github's documentation](https://docs.github.com/en/authentication/managing-commit-signature-verification/signing-commits) to create a GPG key, upload it to your Github account, and configure your git client to sign commits.
 
-## Sign your work
-The sign-off is a simple line at the end of the explanation for the patch. Your signature certifies that you wrote the patch or otherwise have the right to pass it on as an open-source patch. The rules are pretty simple: if you can certify the below (from developercertificate.org):
+## Developer Certificate of Origin
 
-```
-Developer Certificate of Origin
-Version 1.1
-
-Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
-1 Letterman Drive
-Suite D4700
-San Francisco, CA, 94129
-
-Everyone is permitted to copy and distribute verbatim copies of this
-license document, but changing it is not allowed.
-
-
-Developer's Certificate of Origin 1.1
-
-By making a contribution to this project, I certify that:
-
-(a) The contribution was created in whole or in part by me and I
-    have the right to submit it under the open source license
-    indicated in the file; or
+To contribute to this repository, you must sign off your commits to certify 
+that you have the right to contribute the code and that it complies with the 
+open source license. The rules are pretty simple, if you can certify the 
+content of [DCO](./DCO), then simply add a "Signed-off-by" line to your 
+commit message to certify your compliance. Please use your real name as 
+pseudonymous/anonymous contributions are not accepted.
 
-(b) The contribution is based upon previous work that, to the best
-    of my knowledge, is covered under an appropriate open source
-    license and I have the right under that license to submit that
-    work with modifications, whether created in whole or in part
-    by me, under the same open source license (unless I am
-    permitted to submit under a different license), as indicated
-    in the file; or
-
-(c) The contribution was provided directly to me by some other
-    person who certified (a), (b) or (c) and I have not modified
-    it.
-
-(d) I understand and agree that this project and the contribution
-    are public and that a record of the contribution (including all
-    personal information I submit with it, including my sign-off) is
-    maintained indefinitely and may be redistributed consistent with
-    this project or the open source license(s) involved.
+```
+Signed-off-by: Joe Smith 
 ```
 
-Then you just add a line to every git commit message:
+If you set your `user.name` and `user.email` git configs, you can sign your 
+commit automatically with `git commit -s`:
 
 ```
-Signed-off-by: Joe Smith 
+git commit -s -m "Your commit message"
 ```
-
-If you set your `user.name` and `user.email` git configs, you can sign your commit automatically with git commit -s.
-You must use your real name (sorry, no pseudonyms or anonymous contributions).
diff --git a/DCO b/DCO
new file mode 100644
index 000000000..d4f11dfce
--- /dev/null
+++ b/DCO
@@ -0,0 +1,25 @@
+Developer's Certificate of Origin 1.1
+
+By making a contribution to this project, I certify that:
+
+(a) The contribution was created in whole or in part by me and I
+    have the right to submit it under the open source license
+    indicated in the file; or
+
+(b) The contribution is based upon previous work that, to the best
+    of my knowledge, is covered under an appropriate open source
+    license and I have the right under that license to submit that
+    work with modifications, whether created in whole or in part
+    by me, under the same open source license (unless I am
+    permitted to submit under a different license), as indicated
+    in the file; or
+
+(c) The contribution was provided directly to me by some other
+    person who certified (a), (b) or (c) and I have not modified
+    it.
+
+(d) I understand and agree that this project and the contribution
+    are public and that a record of the contribution (including all
+    personal information I submit with it, including my sign-off) is
+    maintained indefinitely and may be redistributed consistent with
+    this project or the open source license(s) involved.

From fbed6b9e793660831dd983d07a9f5d2c6cc363ed Mon Sep 17 00:00:00 2001
From: Tanmay Rustagi <88379306+tanmay-db@users.noreply.github.com>
Date: Wed, 25 Sep 2024 18:05:24 +0200
Subject: [PATCH 046/136] [Internal] Update SDK to latest OpenAPI spec (#766)

## Changes

Updating SDK to latest OpenAPI spec + fix generation (need to import
`Optional`)

Note: `test_github_oidc_flow_works_with_azure` fails on genkit
generate-sdk py but passes when run separately right after generation
without any change. This seems to be non blocker so going ahead with SDK
generation.

## Tests

Unit tests. Nightly tests will run over release PR.
- [ ] `make test` run locally
- [ ] `make fmt` applied
- [ ] relevant integration tests applied
---
 .codegen/__init__.py.tmpl            |   1 +
 .codegen/_openapi_sha                |   2 +-
 .gitattributes                       | 275 -------------------
 databricks/sdk/__init__.py           |   9 +
 databricks/sdk/service/apps.py       | 260 +++++++++++-------
 databricks/sdk/service/catalog.py    | 273 +++++++++++++++++-
 databricks/sdk/service/compute.py    |  86 ++++--
 databricks/sdk/service/dashboards.py |  41 ++-
 databricks/sdk/service/jobs.py       | 184 +++++++------
 databricks/sdk/service/pipelines.py  |  59 +++-
 databricks/sdk/service/serving.py    | 336 ++++++++++++++++++++++-
 databricks/sdk/service/settings.py   | 395 ++++++++++++++++++++++++++-
 databricks/sdk/service/sql.py        | 246 +----------------
 databricks/sdk/service/workspace.py  | 371 ++++++++++++++++++-------
 14 files changed, 1676 insertions(+), 862 deletions(-)

diff --git a/.codegen/__init__.py.tmpl b/.codegen/__init__.py.tmpl
index 7ab8e64f6..5ca160685 100644
--- a/.codegen/__init__.py.tmpl
+++ b/.codegen/__init__.py.tmpl
@@ -9,6 +9,7 @@ from databricks.sdk.mixins.workspace import WorkspaceExt
 from databricks.sdk.service.{{.Package.Name}} import {{.PascalName}}API{{end}}
 from databricks.sdk.service.provisioning import Workspace
 from databricks.sdk import azure
+from typing import Optional
 
 {{$args := list "host" "account_id" "username" "password" "client_id" "client_secret"
   "token" "profile" "config_file" "azure_workspace_resource_id" "azure_client_secret"
diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 4ceeab3d3..e9f9e0a0e 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-d05898328669a3f8ab0c2ecee37db2673d3ea3f7
\ No newline at end of file
+248f4ad9668661da9d0bf4a7b0119a2d44fd1e75
\ No newline at end of file
diff --git a/.gitattributes b/.gitattributes
index c862f312c..c8e5b2f0b 100755
--- a/.gitattributes
+++ b/.gitattributes
@@ -20,278 +20,3 @@ databricks/sdk/service/sharing.py linguist-generated=true
 databricks/sdk/service/sql.py linguist-generated=true
 databricks/sdk/service/vectorsearch.py linguist-generated=true
 databricks/sdk/service/workspace.py linguist-generated=true
-examples/account/billable_usage/download_usage_download.py linguist-generated=true
-examples/account/budgets/create_budgets.py linguist-generated=true
-examples/account/budgets/get_budgets.py linguist-generated=true
-examples/account/budgets/list_budgets.py linguist-generated=true
-examples/account/budgets/update_budgets.py linguist-generated=true
-examples/account/credentials/create_credentials.py linguist-generated=true
-examples/account/credentials/create_log_delivery.py linguist-generated=true
-examples/account/credentials/create_workspaces.py linguist-generated=true
-examples/account/credentials/get_credentials.py linguist-generated=true
-examples/account/credentials/list_credentials.py linguist-generated=true
-examples/account/encryption_keys/create_encryption_keys.py linguist-generated=true
-examples/account/encryption_keys/get_encryption_keys.py linguist-generated=true
-examples/account/encryption_keys/list_encryption_keys.py linguist-generated=true
-examples/account/io/read_usage_download.py linguist-generated=true
-examples/account/log_delivery/create_log_delivery.py linguist-generated=true
-examples/account/log_delivery/get_log_delivery.py linguist-generated=true
-examples/account/log_delivery/list_log_delivery.py linguist-generated=true
-examples/account/metastore_assignments/list_metastore_assignments.py linguist-generated=true
-examples/account/networks/create_networks.py linguist-generated=true
-examples/account/networks/get_networks.py linguist-generated=true
-examples/account/networks/list_networks.py linguist-generated=true
-examples/account/private_access/create_private_access.py linguist-generated=true
-examples/account/private_access/get_private_access.py linguist-generated=true
-examples/account/private_access/list_private_access.py linguist-generated=true
-examples/account/private_access/replace_private_access.py linguist-generated=true
-examples/account/service_principals/create_account_service_principal.py linguist-generated=true
-examples/account/service_principals/create_workspace_assignment_on_aws.py linguist-generated=true
-examples/account/service_principals/get_account_service_principal.py linguist-generated=true
-examples/account/service_principals/list_account_service_principal.py linguist-generated=true
-examples/account/service_principals/patch_account_service_principal.py linguist-generated=true
-examples/account/service_principals/update_account_service_principal.py linguist-generated=true
-examples/account/storage/create_log_delivery.py linguist-generated=true
-examples/account/storage/create_storage.py linguist-generated=true
-examples/account/storage/create_workspaces.py linguist-generated=true
-examples/account/storage/get_storage.py linguist-generated=true
-examples/account/storage/list_storage.py linguist-generated=true
-examples/account/users/create_account_users.py linguist-generated=true
-examples/account/users/delete_account_users.py linguist-generated=true
-examples/account/users/get_account_users.py linguist-generated=true
-examples/account/users/patch_account_users.py linguist-generated=true
-examples/account/vpc_endpoints/create_vpc_endpoints.py linguist-generated=true
-examples/account/vpc_endpoints/get_vpc_endpoints.py linguist-generated=true
-examples/account/vpc_endpoints/list_vpc_endpoints.py linguist-generated=true
-examples/account/waiter/get_workspaces.py linguist-generated=true
-examples/account/workspace_assignment/list_workspace_assignment_on_aws.py linguist-generated=true
-examples/account/workspace_assignment/update_workspace_assignment_on_aws.py linguist-generated=true
-examples/account/workspaces/create_workspaces.py linguist-generated=true
-examples/account/workspaces/get_workspaces.py linguist-generated=true
-examples/account/workspaces/list_workspaces.py linguist-generated=true
-examples/account/workspaces/update_workspaces.py linguist-generated=true
-examples/workspace/alerts/create_alerts.py linguist-generated=true
-examples/workspace/alerts/get_alerts.py linguist-generated=true
-examples/workspace/alerts/list_alerts.py linguist-generated=true
-examples/workspace/alerts/update_alerts.py linguist-generated=true
-examples/workspace/catalogs/create_catalog_workspace_bindings.py linguist-generated=true
-examples/workspace/catalogs/create_catalogs.py linguist-generated=true
-examples/workspace/catalogs/create_schemas.py linguist-generated=true
-examples/workspace/catalogs/create_shares.py linguist-generated=true
-examples/workspace/catalogs/create_tables.py linguist-generated=true
-examples/workspace/catalogs/create_volumes.py linguist-generated=true
-examples/workspace/catalogs/get_catalogs.py linguist-generated=true
-examples/workspace/catalogs/list_catalogs.py linguist-generated=true
-examples/workspace/catalogs/update_catalog_workspace_bindings.py linguist-generated=true
-examples/workspace/catalogs/update_catalogs.py linguist-generated=true
-examples/workspace/cluster_policies/create_cluster_policies.py linguist-generated=true
-examples/workspace/cluster_policies/edit_cluster_policies.py linguist-generated=true
-examples/workspace/cluster_policies/get_cluster_policies.py linguist-generated=true
-examples/workspace/cluster_policies/list_cluster_policies.py linguist-generated=true
-examples/workspace/clusters/change_owner_clusters_api_integration.py linguist-generated=true
-examples/workspace/clusters/create_clusters_api_integration.py linguist-generated=true
-examples/workspace/clusters/delete_clusters_api_integration.py linguist-generated=true
-examples/workspace/clusters/edit_clusters_api_integration.py linguist-generated=true
-examples/workspace/clusters/ensure_cluster_is_running_commands_direct_usage.py linguist-generated=true
-examples/workspace/clusters/events_clusters_api_integration.py linguist-generated=true
-examples/workspace/clusters/get_clusters_api_integration.py linguist-generated=true
-examples/workspace/clusters/list_clusters_api_integration.py linguist-generated=true
-examples/workspace/clusters/list_node_types_clusters_api_integration.py linguist-generated=true
-examples/workspace/clusters/pin_clusters_api_integration.py linguist-generated=true
-examples/workspace/clusters/resize_clusters_api_integration.py linguist-generated=true
-examples/workspace/clusters/restart_clusters_api_integration.py linguist-generated=true
-examples/workspace/clusters/select_node_type_instance_pools.py linguist-generated=true
-examples/workspace/clusters/select_spark_version_clusters_api_integration.py linguist-generated=true
-examples/workspace/clusters/start_clusters_api_integration.py linguist-generated=true
-examples/workspace/clusters/unpin_clusters_api_integration.py linguist-generated=true
-examples/workspace/command_context/execute_commands.py linguist-generated=true
-examples/workspace/command_execution/create_commands_direct_usage.py linguist-generated=true
-examples/workspace/command_execution/execute_commands_direct_usage.py linguist-generated=true
-examples/workspace/command_execution/start_commands.py linguist-generated=true
-examples/workspace/connections/create_connections.py linguist-generated=true
-examples/workspace/connections/get_connections.py linguist-generated=true
-examples/workspace/connections/list_connections.py linguist-generated=true
-examples/workspace/connections/update_connections.py linguist-generated=true
-examples/workspace/current_user/me_current_user.py linguist-generated=true
-examples/workspace/current_user/me_tokens.py linguist-generated=true
-examples/workspace/dashboards/create_dashboards.py linguist-generated=true
-examples/workspace/dashboards/delete_dashboards.py linguist-generated=true
-examples/workspace/dashboards/get_dashboards.py linguist-generated=true
-examples/workspace/dashboards/list_dashboards.py linguist-generated=true
-examples/workspace/dashboards/restore_dashboards.py linguist-generated=true
-examples/workspace/data_sources/list_alerts.py linguist-generated=true
-examples/workspace/data_sources/list_queries.py linguist-generated=true
-examples/workspace/databricks/must_tokens.py linguist-generated=true
-examples/workspace/experiments/create_experiment_experiments.py linguist-generated=true
-examples/workspace/experiments/create_experiment_m_lflow_runs.py linguist-generated=true
-examples/workspace/experiments/create_run_m_lflow_runs.py linguist-generated=true
-examples/workspace/experiments/get_experiment_experiments.py linguist-generated=true
-examples/workspace/experiments/list_experiments_experiments.py linguist-generated=true
-examples/workspace/experiments/update_experiment_experiments.py linguist-generated=true
-examples/workspace/experiments/update_run_m_lflow_runs.py linguist-generated=true
-examples/workspace/external_locations/create_external_locations_on_aws.py linguist-generated=true
-examples/workspace/external_locations/create_volumes.py linguist-generated=true
-examples/workspace/external_locations/get_external_locations_on_aws.py linguist-generated=true
-examples/workspace/external_locations/list_external_locations_on_aws.py linguist-generated=true
-examples/workspace/external_locations/update_external_locations_on_aws.py linguist-generated=true
-examples/workspace/git_credentials/create_git_credentials.py linguist-generated=true
-examples/workspace/git_credentials/get_git_credentials.py linguist-generated=true
-examples/workspace/git_credentials/list_git_credentials.py linguist-generated=true
-examples/workspace/git_credentials/update_git_credentials.py linguist-generated=true
-examples/workspace/global_init_scripts/create_global_init_scripts.py linguist-generated=true
-examples/workspace/global_init_scripts/get_global_init_scripts.py linguist-generated=true
-examples/workspace/global_init_scripts/list_global_init_scripts.py linguist-generated=true
-examples/workspace/global_init_scripts/update_global_init_scripts.py linguist-generated=true
-examples/workspace/grants/get_effective_tables.py linguist-generated=true
-examples/workspace/grants/update_tables.py linguist-generated=true
-examples/workspace/groups/create_generic_permissions.py linguist-generated=true
-examples/workspace/groups/create_groups.py linguist-generated=true
-examples/workspace/groups/create_secrets.py linguist-generated=true
-examples/workspace/groups/delete_generic_permissions.py linguist-generated=true
-examples/workspace/groups/delete_groups.py linguist-generated=true
-examples/workspace/groups/delete_secrets.py linguist-generated=true
-examples/workspace/groups/get_groups.py linguist-generated=true
-examples/workspace/instance_pools/create_instance_pools.py linguist-generated=true
-examples/workspace/instance_pools/edit_instance_pools.py linguist-generated=true
-examples/workspace/instance_pools/get_instance_pools.py linguist-generated=true
-examples/workspace/instance_pools/list_instance_pools.py linguist-generated=true
-examples/workspace/instance_profiles/add_aws_instance_profiles.py linguist-generated=true
-examples/workspace/instance_profiles/edit_aws_instance_profiles.py linguist-generated=true
-examples/workspace/instance_profiles/list_aws_instance_profiles.py linguist-generated=true
-examples/workspace/ip_access_lists/create_ip_access_lists.py linguist-generated=true
-examples/workspace/ip_access_lists/get_ip_access_lists.py linguist-generated=true
-examples/workspace/ip_access_lists/list_ip_access_lists.py linguist-generated=true
-examples/workspace/ip_access_lists/replace_ip_access_lists.py linguist-generated=true
-examples/workspace/jobs/cancel_all_runs_jobs_api_full_integration.py linguist-generated=true
-examples/workspace/jobs/cancel_run_jobs_api_full_integration.py linguist-generated=true
-examples/workspace/jobs/create_jobs_api_full_integration.py linguist-generated=true
-examples/workspace/jobs/export_run_jobs_api_full_integration.py linguist-generated=true
-examples/workspace/jobs/get_jobs_api_full_integration.py linguist-generated=true
-examples/workspace/jobs/get_run_output_jobs_api_full_integration.py linguist-generated=true
-examples/workspace/jobs/list_jobs_api_full_integration.py linguist-generated=true
-examples/workspace/jobs/list_runs_jobs_api_full_integration.py linguist-generated=true
-examples/workspace/jobs/repair_run_jobs_api_full_integration.py linguist-generated=true
-examples/workspace/jobs/reset_jobs_api_full_integration.py linguist-generated=true
-examples/workspace/jobs/run_now_jobs_api_full_integration.py linguist-generated=true
-examples/workspace/jobs/submit_jobs_api_full_integration.py linguist-generated=true
-examples/workspace/jobs/update_jobs_api_full_integration.py linguist-generated=true
-examples/workspace/libraries/update_libraries.py linguist-generated=true
-examples/workspace/metastores/assign_metastores.py linguist-generated=true
-examples/workspace/metastores/create_metastores.py linguist-generated=true
-examples/workspace/metastores/current_metastores.py linguist-generated=true
-examples/workspace/metastores/get_metastores.py linguist-generated=true
-examples/workspace/metastores/list_metastores.py linguist-generated=true
-examples/workspace/metastores/summary_metastores.py linguist-generated=true
-examples/workspace/metastores/unassign_metastores.py linguist-generated=true
-examples/workspace/metastores/update_metastores.py linguist-generated=true
-examples/workspace/model_registry/create_comment_model_version_comments.py linguist-generated=true
-examples/workspace/model_registry/create_model_model_version_comments.py linguist-generated=true
-examples/workspace/model_registry/create_model_model_versions.py linguist-generated=true
-examples/workspace/model_registry/create_model_models.py linguist-generated=true
-examples/workspace/model_registry/create_model_version_model_version_comments.py linguist-generated=true
-examples/workspace/model_registry/create_model_version_model_versions.py linguist-generated=true
-examples/workspace/model_registry/create_webhook_registry_webhooks.py linguist-generated=true
-examples/workspace/model_registry/get_model_models.py linguist-generated=true
-examples/workspace/model_registry/list_models_models.py linguist-generated=true
-examples/workspace/model_registry/list_webhooks_registry_webhooks.py linguist-generated=true
-examples/workspace/model_registry/update_comment_model_version_comments.py linguist-generated=true
-examples/workspace/model_registry/update_model_models.py linguist-generated=true
-examples/workspace/model_registry/update_model_version_model_versions.py linguist-generated=true
-examples/workspace/model_registry/update_webhook_registry_webhooks.py linguist-generated=true
-examples/workspace/permissions/get_generic_permissions.py linguist-generated=true
-examples/workspace/permissions/get_permission_levels_generic_permissions.py linguist-generated=true
-examples/workspace/permissions/set_generic_permissions.py linguist-generated=true
-examples/workspace/pipelines/create_pipelines.py linguist-generated=true
-examples/workspace/pipelines/get_pipelines.py linguist-generated=true
-examples/workspace/pipelines/list_pipeline_events_pipelines.py linguist-generated=true
-examples/workspace/pipelines/list_pipelines_pipelines.py linguist-generated=true
-examples/workspace/pipelines/update_pipelines.py linguist-generated=true
-examples/workspace/policy_families/get_cluster_policy_families.py linguist-generated=true
-examples/workspace/policy_families/list_cluster_policy_families.py linguist-generated=true
-examples/workspace/providers/create_providers.py linguist-generated=true
-examples/workspace/providers/get_providers.py linguist-generated=true
-examples/workspace/providers/list_providers.py linguist-generated=true
-examples/workspace/providers/list_shares_providers.py linguist-generated=true
-examples/workspace/providers/update_providers.py linguist-generated=true
-examples/workspace/queries/create_alerts.py linguist-generated=true
-examples/workspace/queries/create_queries.py linguist-generated=true
-examples/workspace/queries/get_queries.py linguist-generated=true
-examples/workspace/queries/update_queries.py linguist-generated=true
-examples/workspace/query_history/list_sql_query_history.py linguist-generated=true
-examples/workspace/recipients/create_recipients.py linguist-generated=true
-examples/workspace/recipients/get_recipients.py linguist-generated=true
-examples/workspace/recipients/list_recipients.py linguist-generated=true
-examples/workspace/recipients/rotate_token_recipients.py linguist-generated=true
-examples/workspace/recipients/share_permissions_recipients.py linguist-generated=true
-examples/workspace/recipients/update_recipients.py linguist-generated=true
-examples/workspace/repos/create_repos.py linguist-generated=true
-examples/workspace/repos/get_repos.py linguist-generated=true
-examples/workspace/repos/list_repos.py linguist-generated=true
-examples/workspace/repos/update_repos.py linguist-generated=true
-examples/workspace/schemas/create_schemas.py linguist-generated=true
-examples/workspace/schemas/create_shares.py linguist-generated=true
-examples/workspace/schemas/create_tables.py linguist-generated=true
-examples/workspace/schemas/create_volumes.py linguist-generated=true
-examples/workspace/schemas/get_schemas.py linguist-generated=true
-examples/workspace/schemas/list_schemas.py linguist-generated=true
-examples/workspace/schemas/update_schemas.py linguist-generated=true
-examples/workspace/secrets/create_scope_secrets.py linguist-generated=true
-examples/workspace/secrets/list_acls_secrets.py linguist-generated=true
-examples/workspace/secrets/list_scopes_secrets.py linguist-generated=true
-examples/workspace/secrets/list_secrets_secrets.py linguist-generated=true
-examples/workspace/secrets/put_acl_secrets.py linguist-generated=true
-examples/workspace/secrets/put_secret_secrets.py linguist-generated=true
-examples/workspace/service_principals/create_create_obo_token_on_aws.py linguist-generated=true
-examples/workspace/service_principals/create_service_principals_on_aws.py linguist-generated=true
-examples/workspace/service_principals/get_service_principals_on_aws.py linguist-generated=true
-examples/workspace/service_principals/list_service_principals_on_aws.py linguist-generated=true
-examples/workspace/service_principals/patch_service_principals_on_aws.py linguist-generated=true
-examples/workspace/service_principals/update_service_principals_on_aws.py linguist-generated=true
-examples/workspace/shares/create_shares.py linguist-generated=true
-examples/workspace/shares/get_shares.py linguist-generated=true
-examples/workspace/shares/list_shares.py linguist-generated=true
-examples/workspace/shares/update_shares.py linguist-generated=true
-examples/workspace/statement_execution/execute_shares.py linguist-generated=true
-examples/workspace/statement_execution/execute_tables.py linguist-generated=true
-examples/workspace/storage_credentials/create_external_locations_on_aws.py linguist-generated=true
-examples/workspace/storage_credentials/create_storage_credentials_on_aws.py linguist-generated=true
-examples/workspace/storage_credentials/create_volumes.py linguist-generated=true
-examples/workspace/storage_credentials/get_storage_credentials_on_aws.py linguist-generated=true
-examples/workspace/storage_credentials/list_storage_credentials_on_aws.py linguist-generated=true
-examples/workspace/storage_credentials/update_storage_credentials_on_aws.py linguist-generated=true
-examples/workspace/tables/get_tables.py linguist-generated=true
-examples/workspace/tables/list_summaries_tables.py linguist-generated=true
-examples/workspace/tables/list_tables.py linguist-generated=true
-examples/workspace/token_management/create_obo_token_create_obo_token_on_aws.py linguist-generated=true
-examples/workspace/token_management/get_create_obo_token_on_aws.py linguist-generated=true
-examples/workspace/token_management/list_create_obo_token_on_aws.py linguist-generated=true
-examples/workspace/tokens/create_tokens.py linguist-generated=true
-examples/workspace/tokens/get_tokens.py linguist-generated=true
-examples/workspace/tokens/list_tokens.py linguist-generated=true
-examples/workspace/users/create_clusters_api_integration.py linguist-generated=true
-examples/workspace/users/create_workspace_users.py linguist-generated=true
-examples/workspace/users/delete_clusters_api_integration.py linguist-generated=true
-examples/workspace/users/delete_workspace_users.py linguist-generated=true
-examples/workspace/users/get_workspace_users.py linguist-generated=true
-examples/workspace/users/list_workspace_users.py linguist-generated=true
-examples/workspace/users/patch_workspace_users.py linguist-generated=true
-examples/workspace/users/update_workspace_users.py linguist-generated=true
-examples/workspace/volumes/create_volumes.py linguist-generated=true
-examples/workspace/volumes/list_volumes.py linguist-generated=true
-examples/workspace/volumes/read_volumes.py linguist-generated=true
-examples/workspace/volumes/update_volumes.py linguist-generated=true
-examples/workspace/warehouses/create_sql_warehouses.py linguist-generated=true
-examples/workspace/warehouses/edit_sql_warehouses.py linguist-generated=true
-examples/workspace/warehouses/get_sql_warehouses.py linguist-generated=true
-examples/workspace/warehouses/list_sql_warehouses.py linguist-generated=true
-examples/workspace/workspace/export_workspace_integration.py linguist-generated=true
-examples/workspace/workspace/get_status_generic_permissions.py linguist-generated=true
-examples/workspace/workspace/get_status_workspace_integration.py linguist-generated=true
-examples/workspace/workspace/import_generic_permissions.py linguist-generated=true
-examples/workspace/workspace/import_jobs_api_full_integration.py linguist-generated=true
-examples/workspace/workspace/import_pipelines.py linguist-generated=true
-examples/workspace/workspace/import_workspace_integration.py linguist-generated=true
-examples/workspace/workspace/list_workspace_integration.py linguist-generated=true
-examples/workspace/workspace_bindings/get_catalog_workspace_bindings.py linguist-generated=true
-examples/workspace/workspace_bindings/update_catalog_workspace_bindings.py linguist-generated=true
-examples/workspace/workspace_conf/get_status_repos.py linguist-generated=true
diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py
index b177d97dc..617f2cee2 100755
--- a/databricks/sdk/__init__.py
+++ b/databricks/sdk/__init__.py
@@ -24,6 +24,7 @@
                                             StorageCredentialsAPI,
                                             SystemSchemasAPI,
                                             TableConstraintsAPI, TablesAPI,
+                                            TemporaryTableCredentialsAPI,
                                             VolumesAPI, WorkspaceBindingsAPI)
 from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI,
                                             CommandExecutionAPI,
@@ -69,6 +70,8 @@
                                              CredentialsManagerAPI,
                                              CspEnablementAccountAPI,
                                              DefaultNamespaceAPI,
+                                             DisableLegacyAccessAPI,
+                                             DisableLegacyFeaturesAPI,
                                              EnhancedSecurityMonitoringAPI,
                                              EsmEnablementAccountAPI,
                                              IpAccessListsAPI,
@@ -253,6 +256,7 @@ def __init__(self,
         self._system_schemas = SystemSchemasAPI(self._api_client)
         self._table_constraints = TableConstraintsAPI(self._api_client)
         self._tables = TablesAPI(self._api_client)
+        self._temporary_table_credentials = TemporaryTableCredentialsAPI(self._api_client)
         self._token_management = TokenManagementAPI(self._api_client)
         self._tokens = TokensAPI(self._api_client)
         self._users = UsersAPI(self._api_client)
@@ -676,6 +680,11 @@ def tables(self) -> TablesAPI:
         """A table resides in the third layer of Unity Catalog’s three-level namespace."""
         return self._tables
 
+    @property
+    def temporary_table_credentials(self) -> TemporaryTableCredentialsAPI:
+        """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage locationswhere table data is stored in Databricks."""
+        return self._temporary_table_credentials
+
     @property
     def token_management(self) -> TokenManagementAPI:
         """Enables administrators to get all tokens and delete tokens for other users."""
diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py
index 7ec495b19..9cafe235e 100755
--- a/databricks/sdk/service/apps.py
+++ b/databricks/sdk/service/apps.py
@@ -27,6 +27,10 @@ class App:
     active_deployment: Optional[AppDeployment] = None
     """The active deployment of the app."""
 
+    app_status: Optional[ApplicationStatus] = None
+
+    compute_status: Optional[ComputeStatus] = None
+
     create_time: Optional[str] = None
     """The creation time of the app. Formatted timestamp in ISO 6801."""
 
@@ -43,8 +47,6 @@ class App:
 
     service_principal_name: Optional[str] = None
 
-    status: Optional[AppStatus] = None
-
     update_time: Optional[str] = None
     """The update time of the app. Formatted timestamp in ISO 6801."""
 
@@ -58,6 +60,8 @@ def as_dict(self) -> dict:
         """Serializes the App into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.active_deployment: body['active_deployment'] = self.active_deployment.as_dict()
+        if self.app_status: body['app_status'] = self.app_status.as_dict()
+        if self.compute_status: body['compute_status'] = self.compute_status.as_dict()
         if self.create_time is not None: body['create_time'] = self.create_time
         if self.creator is not None: body['creator'] = self.creator
         if self.description is not None: body['description'] = self.description
@@ -66,7 +70,6 @@ def as_dict(self) -> dict:
         if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id
         if self.service_principal_name is not None:
             body['service_principal_name'] = self.service_principal_name
-        if self.status: body['status'] = self.status.as_dict()
         if self.update_time is not None: body['update_time'] = self.update_time
         if self.updater is not None: body['updater'] = self.updater
         if self.url is not None: body['url'] = self.url
@@ -76,6 +79,8 @@ def as_dict(self) -> dict:
     def from_dict(cls, d: Dict[str, any]) -> App:
         """Deserializes the App from a dictionary."""
         return cls(active_deployment=_from_dict(d, 'active_deployment', AppDeployment),
+                   app_status=_from_dict(d, 'app_status', ApplicationStatus),
+                   compute_status=_from_dict(d, 'compute_status', ComputeStatus),
                    create_time=d.get('create_time', None),
                    creator=d.get('creator', None),
                    description=d.get('description', None),
@@ -83,7 +88,6 @@ def from_dict(cls, d: Dict[str, any]) -> App:
                    pending_deployment=_from_dict(d, 'pending_deployment', AppDeployment),
                    service_principal_id=d.get('service_principal_id', None),
                    service_principal_name=d.get('service_principal_name', None),
-                   status=_from_dict(d, 'status', AppStatus),
                    update_time=d.get('update_time', None),
                    updater=d.get('updater', None),
                    url=d.get('url', None))
@@ -162,13 +166,6 @@ def from_dict(cls, d: Dict[str, any]) -> AppAccessControlResponse:
 
 @dataclass
 class AppDeployment:
-    source_code_path: str
-    """The workspace file system path of the source code used to create the app deployment. This is
-    different from `deployment_artifacts.source_code_path`, which is the path used by the deployed
-    app. The former refers to the original source code location of the app in the workspace during
-    deployment creation, whereas the latter provides a system generated stable snapshotted source
-    code path used by the deployment."""
-
     create_time: Optional[str] = None
     """The creation time of the deployment. Formatted timestamp in ISO 6801."""
 
@@ -184,6 +181,13 @@ class AppDeployment:
     mode: Optional[AppDeploymentMode] = None
     """The mode of which the deployment will manage the source code."""
 
+    source_code_path: Optional[str] = None
+    """The workspace file system path of the source code used to create the app deployment. This is
+    different from `deployment_artifacts.source_code_path`, which is the path used by the deployed
+    app. The former refers to the original source code location of the app in the workspace during
+    deployment creation, whereas the latter provides a system generated stable snapshotted source
+    code path used by the deployment."""
+
     status: Optional[AppDeploymentStatus] = None
     """Status and status message of the deployment"""
 
@@ -241,9 +245,9 @@ class AppDeploymentMode(Enum):
 
 class AppDeploymentState(Enum):
 
+    CANCELLED = 'CANCELLED'
     FAILED = 'FAILED'
     IN_PROGRESS = 'IN_PROGRESS'
-    STOPPED = 'STOPPED'
     SUCCEEDED = 'SUCCEEDED'
 
 
@@ -368,57 +372,90 @@ def from_dict(cls, d: Dict[str, any]) -> AppPermissionsRequest:
                    app_name=d.get('app_name', None))
 
 
-class AppState(Enum):
+class ApplicationState(Enum):
+
+    CRASHED = 'CRASHED'
+    DEPLOYING = 'DEPLOYING'
+    RUNNING = 'RUNNING'
+    UNAVAILABLE = 'UNAVAILABLE'
+
+
+@dataclass
+class ApplicationStatus:
+    message: Optional[str] = None
+    """Application status message"""
+
+    state: Optional[ApplicationState] = None
+    """State of the application."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ApplicationStatus into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        if self.state is not None: body['state'] = self.state.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ApplicationStatus:
+        """Deserializes the ApplicationStatus from a dictionary."""
+        return cls(message=d.get('message', None), state=_enum(d, 'state', ApplicationState))
+
+
+class ComputeState(Enum):
 
-    CREATING = 'CREATING'
-    DELETED = 'DELETED'
+    ACTIVE = 'ACTIVE'
     DELETING = 'DELETING'
     ERROR = 'ERROR'
-    IDLE = 'IDLE'
-    RUNNING = 'RUNNING'
     STARTING = 'STARTING'
+    STOPPED = 'STOPPED'
+    STOPPING = 'STOPPING'
+    UPDATING = 'UPDATING'
 
 
 @dataclass
-class AppStatus:
+class ComputeStatus:
     message: Optional[str] = None
-    """Message corresponding with the app state."""
+    """Compute status message"""
 
-    state: Optional[AppState] = None
-    """State of the app."""
+    state: Optional[ComputeState] = None
+    """State of the app compute."""
 
     def as_dict(self) -> dict:
-        """Serializes the AppStatus into a dictionary suitable for use as a JSON request body."""
+        """Serializes the ComputeStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.message is not None: body['message'] = self.message
         if self.state is not None: body['state'] = self.state.value
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> AppStatus:
-        """Deserializes the AppStatus from a dictionary."""
-        return cls(message=d.get('message', None), state=_enum(d, 'state', AppState))
+    def from_dict(cls, d: Dict[str, any]) -> ComputeStatus:
+        """Deserializes the ComputeStatus from a dictionary."""
+        return cls(message=d.get('message', None), state=_enum(d, 'state', ComputeState))
 
 
 @dataclass
 class CreateAppDeploymentRequest:
-    source_code_path: str
-    """The workspace file system path of the source code used to create the app deployment. This is
-    different from `deployment_artifacts.source_code_path`, which is the path used by the deployed
-    app. The former refers to the original source code location of the app in the workspace during
-    deployment creation, whereas the latter provides a system generated stable snapshotted source
-    code path used by the deployment."""
-
     app_name: Optional[str] = None
     """The name of the app."""
 
+    deployment_id: Optional[str] = None
+    """The unique id of the deployment."""
+
     mode: Optional[AppDeploymentMode] = None
     """The mode of which the deployment will manage the source code."""
 
+    source_code_path: Optional[str] = None
+    """The workspace file system path of the source code used to create the app deployment. This is
+    different from `deployment_artifacts.source_code_path`, which is the path used by the deployed
+    app. The former refers to the original source code location of the app in the workspace during
+    deployment creation, whereas the latter provides a system generated stable snapshotted source
+    code path used by the deployment."""
+
     def as_dict(self) -> dict:
         """Serializes the CreateAppDeploymentRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.app_name is not None: body['app_name'] = self.app_name
+        if self.deployment_id is not None: body['deployment_id'] = self.deployment_id
         if self.mode is not None: body['mode'] = self.mode.value
         if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
         return body
@@ -427,6 +464,7 @@ def as_dict(self) -> dict:
     def from_dict(cls, d: Dict[str, any]) -> CreateAppDeploymentRequest:
         """Deserializes the CreateAppDeploymentRequest from a dictionary."""
         return cls(app_name=d.get('app_name', None),
+                   deployment_id=d.get('deployment_id', None),
                    mode=_enum(d, 'mode', AppDeploymentMode),
                    source_code_path=d.get('source_code_path', None))
 
@@ -453,20 +491,6 @@ def from_dict(cls, d: Dict[str, any]) -> CreateAppRequest:
         return cls(description=d.get('description', None), name=d.get('name', None))
 
 
-@dataclass
-class DeleteResponse:
-
-    def as_dict(self) -> dict:
-        """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
-        """Deserializes the DeleteResponse from a dictionary."""
-        return cls()
-
-
 @dataclass
 class GetAppPermissionLevelsResponse:
     permission_levels: Optional[List[AppPermissionsDescription]] = None
@@ -538,20 +562,6 @@ class StopAppRequest:
     """The name of the app."""
 
 
-@dataclass
-class StopAppResponse:
-
-    def as_dict(self) -> dict:
-        """Serializes the StopAppResponse into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> StopAppResponse:
-        """Deserializes the StopAppResponse from a dictionary."""
-        return cls()
-
-
 @dataclass
 class UpdateAppRequest:
     name: str
@@ -581,27 +591,59 @@ class AppsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def wait_get_app_idle(self,
-                          name: str,
-                          timeout=timedelta(minutes=20),
-                          callback: Optional[Callable[[App], None]] = None) -> App:
+    def wait_get_app_active(self,
+                            name: str,
+                            timeout=timedelta(minutes=20),
+                            callback: Optional[Callable[[App], None]] = None) -> App:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (AppState.IDLE, )
-        failure_states = (AppState.ERROR, )
+        target_states = (ComputeState.ACTIVE, )
+        failure_states = (ComputeState.ERROR, )
         status_message = 'polling...'
         attempt = 1
         while time.time() < deadline:
             poll = self.get(name=name)
-            status = poll.status.state
+            status = poll.compute_status.state
             status_message = f'current status: {status}'
-            if poll.status:
-                status_message = poll.status.message
+            if poll.compute_status:
+                status_message = poll.compute_status.message
+            if status in target_states:
+                return poll
+            if callback:
+                callback(poll)
+            if status in failure_states:
+                msg = f'failed to reach ACTIVE, got {status}: {status_message}'
+                raise OperationFailed(msg)
+            prefix = f"name={name}"
+            sleep = attempt
+            if sleep > 10:
+                # sleep 10s max per attempt
+                sleep = 10
+            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            time.sleep(sleep + random.random())
+            attempt += 1
+        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+
+    def wait_get_app_stopped(self,
+                             name: str,
+                             timeout=timedelta(minutes=20),
+                             callback: Optional[Callable[[App], None]] = None) -> App:
+        deadline = time.time() + timeout.total_seconds()
+        target_states = (ComputeState.STOPPED, )
+        failure_states = (ComputeState.ERROR, )
+        status_message = 'polling...'
+        attempt = 1
+        while time.time() < deadline:
+            poll = self.get(name=name)
+            status = poll.compute_status.state
+            status_message = f'current status: {status}'
+            if poll.compute_status:
+                status_message = poll.compute_status.message
             if status in target_states:
                 return poll
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach IDLE, got {status}: {status_message}'
+                msg = f'failed to reach STOPPED, got {status}: {status_message}'
                 raise OperationFailed(msg)
             prefix = f"name={name}"
             sleep = attempt
@@ -660,15 +702,15 @@ def create(self, name: str, *, description: Optional[str] = None) -> Wait[App]:
         
         :returns:
           Long-running operation waiter for :class:`App`.
-          See :method:wait_get_app_idle for more details.
+          See :method:wait_get_app_active for more details.
         """
         body = {}
         if description is not None: body['description'] = description
         if name is not None: body['name'] = name
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        op_response = self._api.do('POST', '/api/2.0/preview/apps', body=body, headers=headers)
-        return Wait(self.wait_get_app_idle, response=App.from_dict(op_response), name=op_response['name'])
+        op_response = self._api.do('POST', '/api/2.0/apps', body=body, headers=headers)
+        return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response['name'])
 
     def create_and_wait(self,
                         name: str,
@@ -677,7 +719,7 @@ def create_and_wait(self,
                         timeout=timedelta(minutes=20)) -> App:
         return self.create(description=description, name=name).result(timeout=timeout)
 
-    def delete(self, name: str):
+    def delete(self, name: str) -> App:
         """Delete an app.
         
         Deletes an app.
@@ -685,44 +727,49 @@ def delete(self, name: str):
         :param name: str
           The name of the app.
         
-        
+        :returns: :class:`App`
         """
 
         headers = {'Accept': 'application/json', }
 
-        self._api.do('DELETE', f'/api/2.0/preview/apps/{name}', headers=headers)
+        res = self._api.do('DELETE', f'/api/2.0/apps/{name}', headers=headers)
+        return App.from_dict(res)
 
     def deploy(self,
                app_name: str,
-               source_code_path: str,
                *,
-               mode: Optional[AppDeploymentMode] = None) -> Wait[AppDeployment]:
+               deployment_id: Optional[str] = None,
+               mode: Optional[AppDeploymentMode] = None,
+               source_code_path: Optional[str] = None) -> Wait[AppDeployment]:
         """Create an app deployment.
         
         Creates an app deployment for the app with the supplied name.
         
         :param app_name: str
           The name of the app.
-        :param source_code_path: str
+        :param deployment_id: str (optional)
+          The unique id of the deployment.
+        :param mode: :class:`AppDeploymentMode` (optional)
+          The mode of which the deployment will manage the source code.
+        :param source_code_path: str (optional)
           The workspace file system path of the source code used to create the app deployment. This is
           different from `deployment_artifacts.source_code_path`, which is the path used by the deployed app.
           The former refers to the original source code location of the app in the workspace during deployment
           creation, whereas the latter provides a system generated stable snapshotted source code path used by
           the deployment.
-        :param mode: :class:`AppDeploymentMode` (optional)
-          The mode of which the deployment will manage the source code.
         
         :returns:
           Long-running operation waiter for :class:`AppDeployment`.
           See :method:wait_get_deployment_app_succeeded for more details.
         """
         body = {}
+        if deployment_id is not None: body['deployment_id'] = deployment_id
         if mode is not None: body['mode'] = mode.value
         if source_code_path is not None: body['source_code_path'] = source_code_path
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         op_response = self._api.do('POST',
-                                   f'/api/2.0/preview/apps/{app_name}/deployments',
+                                   f'/api/2.0/apps/{app_name}/deployments',
                                    body=body,
                                    headers=headers)
         return Wait(self.wait_get_deployment_app_succeeded,
@@ -733,11 +780,14 @@ def deploy(self,
     def deploy_and_wait(
         self,
         app_name: str,
-        source_code_path: str,
         *,
+        deployment_id: Optional[str] = None,
         mode: Optional[AppDeploymentMode] = None,
+        source_code_path: Optional[str] = None,
         timeout=timedelta(minutes=20)) -> AppDeployment:
-        return self.deploy(app_name=app_name, mode=mode,
+        return self.deploy(app_name=app_name,
+                           deployment_id=deployment_id,
+                           mode=mode,
                            source_code_path=source_code_path).result(timeout=timeout)
 
     def get(self, name: str) -> App:
@@ -753,7 +803,7 @@ def get(self, name: str) -> App:
 
         headers = {'Accept': 'application/json', }
 
-        res = self._api.do('GET', f'/api/2.0/preview/apps/{name}', headers=headers)
+        res = self._api.do('GET', f'/api/2.0/apps/{name}', headers=headers)
         return App.from_dict(res)
 
     def get_deployment(self, app_name: str, deployment_id: str) -> AppDeployment:
@@ -771,9 +821,7 @@ def get_deployment(self, app_name: str, deployment_id: str) -> AppDeployment:
 
         headers = {'Accept': 'application/json', }
 
-        res = self._api.do('GET',
-                           f'/api/2.0/preview/apps/{app_name}/deployments/{deployment_id}',
-                           headers=headers)
+        res = self._api.do('GET', f'/api/2.0/apps/{app_name}/deployments/{deployment_id}', headers=headers)
         return AppDeployment.from_dict(res)
 
     def get_permission_levels(self, app_name: str) -> GetAppPermissionLevelsResponse:
@@ -827,7 +875,7 @@ def list(self, *, page_size: Optional[int] = None, page_token: Optional[str] = N
         headers = {'Accept': 'application/json', }
 
         while True:
-            json = self._api.do('GET', '/api/2.0/preview/apps', query=query, headers=headers)
+            json = self._api.do('GET', '/api/2.0/apps', query=query, headers=headers)
             if 'apps' in json:
                 for v in json['apps']:
                     yield App.from_dict(v)
@@ -860,10 +908,7 @@ def list_deployments(self,
         headers = {'Accept': 'application/json', }
 
         while True:
-            json = self._api.do('GET',
-                                f'/api/2.0/preview/apps/{app_name}/deployments',
-                                query=query,
-                                headers=headers)
+            json = self._api.do('GET', f'/api/2.0/apps/{app_name}/deployments', query=query, headers=headers)
             if 'app_deployments' in json:
                 for v in json['app_deployments']:
                     yield AppDeployment.from_dict(v)
@@ -894,7 +939,7 @@ def set_permissions(
         res = self._api.do('PUT', f'/api/2.0/permissions/apps/{app_name}', body=body, headers=headers)
         return AppPermissions.from_dict(res)
 
-    def start(self, name: str) -> Wait[AppDeployment]:
+    def start(self, name: str) -> Wait[App]:
         """Start an app.
         
         Start the last active deployment of the app in the workspace.
@@ -903,22 +948,19 @@ def start(self, name: str) -> Wait[AppDeployment]:
           The name of the app.
         
         :returns:
-          Long-running operation waiter for :class:`AppDeployment`.
-          See :method:wait_get_deployment_app_succeeded for more details.
+          Long-running operation waiter for :class:`App`.
+          See :method:wait_get_app_active for more details.
         """
 
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        op_response = self._api.do('POST', f'/api/2.0/preview/apps/{name}/start', headers=headers)
-        return Wait(self.wait_get_deployment_app_succeeded,
-                    response=AppDeployment.from_dict(op_response),
-                    app_name=name,
-                    deployment_id=op_response['deployment_id'])
+        op_response = self._api.do('POST', f'/api/2.0/apps/{name}/start', headers=headers)
+        return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response['name'])
 
-    def start_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> AppDeployment:
+    def start_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App:
         return self.start(name=name).result(timeout=timeout)
 
-    def stop(self, name: str):
+    def stop(self, name: str) -> Wait[App]:
         """Stop an app.
         
         Stops the active deployment of the app in the workspace.
@@ -926,12 +968,18 @@ def stop(self, name: str):
         :param name: str
           The name of the app.
         
-        
+        :returns:
+          Long-running operation waiter for :class:`App`.
+          See :method:wait_get_app_stopped for more details.
         """
 
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        self._api.do('POST', f'/api/2.0/preview/apps/{name}/stop', headers=headers)
+        op_response = self._api.do('POST', f'/api/2.0/apps/{name}/stop', headers=headers)
+        return Wait(self.wait_get_app_stopped, response=App.from_dict(op_response), name=op_response['name'])
+
+    def stop_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App:
+        return self.stop(name=name).result(timeout=timeout)
 
     def update(self, name: str, *, description: Optional[str] = None) -> App:
         """Update an app.
@@ -950,7 +998,7 @@ def update(self, name: str, *, description: Optional[str] = None) -> App:
         if description is not None: body['description'] = description
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        res = self._api.do('PATCH', f'/api/2.0/preview/apps/{name}', body=body, headers=headers)
+        res = self._api.do('PATCH', f'/api/2.0/apps/{name}', body=body, headers=headers)
         return App.from_dict(res)
 
     def update_permissions(
diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py
index b372bc9b2..9c795dc2a 100755
--- a/databricks/sdk/service/catalog.py
+++ b/databricks/sdk/service/catalog.py
@@ -274,6 +274,42 @@ def from_dict(cls, d: Dict[str, any]) -> AssignResponse:
         return cls()
 
 
+@dataclass
+class AwsCredentials:
+    """AWS temporary credentials for API authentication. Read more at
+    https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html."""
+
+    access_key_id: Optional[str] = None
+    """The access key ID that identifies the temporary credentials."""
+
+    access_point: Optional[str] = None
+    """The Amazon Resource Name (ARN) of the S3 access point for temporary credentials related the
+    external location."""
+
+    secret_access_key: Optional[str] = None
+    """The secret access key that can be used to sign AWS API requests."""
+
+    session_token: Optional[str] = None
+    """The token that users must pass to AWS API to use the temporary credentials."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AwsCredentials into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.access_key_id is not None: body['access_key_id'] = self.access_key_id
+        if self.access_point is not None: body['access_point'] = self.access_point
+        if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
+        if self.session_token is not None: body['session_token'] = self.session_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AwsCredentials:
+        """Deserializes the AwsCredentials from a dictionary."""
+        return cls(access_key_id=d.get('access_key_id', None),
+                   access_point=d.get('access_point', None),
+                   secret_access_key=d.get('secret_access_key', None),
+                   session_token=d.get('session_token', None))
+
+
 @dataclass
 class AwsIamRoleRequest:
     role_arn: str
@@ -405,6 +441,26 @@ def from_dict(cls, d: Dict[str, any]) -> AzureServicePrincipal:
                    directory_id=d.get('directory_id', None))
 
 
+@dataclass
+class AzureUserDelegationSas:
+    """Azure temporary credentials for API authentication. Read more at
+    https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas"""
+
+    sas_token: Optional[str] = None
+    """The signed URI (SAS Token) used to access blob services for a given path"""
+
+    def as_dict(self) -> dict:
+        """Serializes the AzureUserDelegationSas into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.sas_token is not None: body['sas_token'] = self.sas_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AzureUserDelegationSas:
+        """Deserializes the AzureUserDelegationSas from a dictionary."""
+        return cls(sas_token=d.get('sas_token', None))
+
+
 @dataclass
 class CancelRefreshResponse:
 
@@ -1086,9 +1142,6 @@ class CreateFunction:
     full_data_type: str
     """Pretty printed function data type."""
 
-    return_params: FunctionParameterInfos
-    """Table function return parameters."""
-
     routine_body: CreateFunctionRoutineBody
     """Function language. When **EXTERNAL** is used, the language of the routine function should be
     specified in the __external_language__ field, and the __return_params__ of the function cannot
@@ -1098,9 +1151,6 @@ class CreateFunction:
     routine_definition: str
     """Function body."""
 
-    routine_dependencies: DependencyList
-    """Function dependencies."""
-
     parameter_style: CreateFunctionParameterStyle
     """Function parameter style. **S** is the value for SQL."""
 
@@ -1131,6 +1181,12 @@ class CreateFunction:
     properties: Optional[str] = None
     """JSON-serialized key-value pair map, encoded (escaped) as a string."""
 
+    return_params: Optional[FunctionParameterInfos] = None
+    """Table function return parameters."""
+
+    routine_dependencies: Optional[DependencyList] = None
+    """Function dependencies."""
+
     sql_path: Optional[str] = None
     """List of schemes whose objects can be referenced without qualification."""
 
@@ -2438,6 +2494,97 @@ class FunctionParameterType(Enum):
     PARAM = 'PARAM'
 
 
+@dataclass
+class GcpOauthToken:
+    """GCP temporary credentials for API authentication. Read more at
+    https://developers.google.com/identity/protocols/oauth2/service-account"""
+
+    oauth_token: Optional[str] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the GcpOauthToken into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.oauth_token is not None: body['oauth_token'] = self.oauth_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GcpOauthToken:
+        """Deserializes the GcpOauthToken from a dictionary."""
+        return cls(oauth_token=d.get('oauth_token', None))
+
+
+@dataclass
+class GenerateTemporaryTableCredentialRequest:
+    operation: Optional[TableOperation] = None
+    """The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is
+    specified, the credentials returned will have write permissions, otherwise, it will be read
+    only."""
+
+    table_id: Optional[str] = None
+    """UUID of the table to read or write."""
+
+    def as_dict(self) -> dict:
+        """Serializes the GenerateTemporaryTableCredentialRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.operation is not None: body['operation'] = self.operation.value
+        if self.table_id is not None: body['table_id'] = self.table_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryTableCredentialRequest:
+        """Deserializes the GenerateTemporaryTableCredentialRequest from a dictionary."""
+        return cls(operation=_enum(d, 'operation', TableOperation), table_id=d.get('table_id', None))
+
+
+@dataclass
+class GenerateTemporaryTableCredentialResponse:
+    aws_temp_credentials: Optional[AwsCredentials] = None
+    """AWS temporary credentials for API authentication. Read more at
+    https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html."""
+
+    azure_user_delegation_sas: Optional[AzureUserDelegationSas] = None
+    """Azure temporary credentials for API authentication. Read more at
+    https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas"""
+
+    expiration_time: Optional[int] = None
+    """Server time when the credential will expire, in unix epoch milliseconds since January 1, 1970 at
+    00:00:00 UTC. The API client is advised to cache the credential given this expiration time."""
+
+    gcp_oauth_token: Optional[GcpOauthToken] = None
+    """GCP temporary credentials for API authentication. Read more at
+    https://developers.google.com/identity/protocols/oauth2/service-account"""
+
+    r2_temp_credentials: Optional[R2Credentials] = None
+    """R2 temporary credentials for API authentication. Read more at
+    https://developers.cloudflare.com/r2/api/s3/tokens/."""
+
+    url: Optional[str] = None
+    """The URL of the storage path accessible by the temporary credential."""
+
+    def as_dict(self) -> dict:
+        """Serializes the GenerateTemporaryTableCredentialResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials.as_dict()
+        if self.azure_user_delegation_sas:
+            body['azure_user_delegation_sas'] = self.azure_user_delegation_sas.as_dict()
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token.as_dict()
+        if self.r2_temp_credentials: body['r2_temp_credentials'] = self.r2_temp_credentials.as_dict()
+        if self.url is not None: body['url'] = self.url
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryTableCredentialResponse:
+        """Deserializes the GenerateTemporaryTableCredentialResponse from a dictionary."""
+        return cls(aws_temp_credentials=_from_dict(d, 'aws_temp_credentials', AwsCredentials),
+                   azure_user_delegation_sas=_from_dict(d, 'azure_user_delegation_sas',
+                                                        AzureUserDelegationSas),
+                   expiration_time=d.get('expiration_time', None),
+                   gcp_oauth_token=_from_dict(d, 'gcp_oauth_token', GcpOauthToken),
+                   r2_temp_credentials=_from_dict(d, 'r2_temp_credentials', R2Credentials),
+                   url=d.get('url', None))
+
+
 class GetBindingsSecurableType(Enum):
 
     CATALOG = 'catalog'
@@ -2469,6 +2616,9 @@ class GetMetastoreSummaryResponse:
     delta_sharing_scope: Optional[GetMetastoreSummaryResponseDeltaSharingScope] = None
     """The scope of Delta Sharing enabled for the metastore."""
 
+    external_access_enabled: Optional[bool] = None
+    """Whether to allow non-DBR clients to directly access entities under the metastore."""
+
     global_metastore_id: Optional[str] = None
     """Globally unique metastore ID across clouds and regions, of the form `cloud:region:metastore_id`."""
 
@@ -2516,6 +2666,8 @@ def as_dict(self) -> dict:
             body[
                 'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds
         if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope.value
+        if self.external_access_enabled is not None:
+            body['external_access_enabled'] = self.external_access_enabled
         if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
         if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
         if self.name is not None: body['name'] = self.name
@@ -2544,6 +2696,7 @@ def from_dict(cls, d: Dict[str, any]) -> GetMetastoreSummaryResponse:
                        'delta_sharing_recipient_token_lifetime_in_seconds', None),
                    delta_sharing_scope=_enum(d, 'delta_sharing_scope',
                                              GetMetastoreSummaryResponseDeltaSharingScope),
+                   external_access_enabled=d.get('external_access_enabled', None),
                    global_metastore_id=d.get('global_metastore_id', None),
                    metastore_id=d.get('metastore_id', None),
                    name=d.get('name', None),
@@ -2996,6 +3149,9 @@ class MetastoreInfo:
     delta_sharing_scope: Optional[MetastoreInfoDeltaSharingScope] = None
     """The scope of Delta Sharing enabled for the metastore."""
 
+    external_access_enabled: Optional[bool] = None
+    """Whether to allow non-DBR clients to directly access entities under the metastore."""
+
     global_metastore_id: Optional[str] = None
     """Globally unique metastore ID across clouds and regions, of the form `cloud:region:metastore_id`."""
 
@@ -3043,6 +3199,8 @@ def as_dict(self) -> dict:
             body[
                 'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds
         if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope.value
+        if self.external_access_enabled is not None:
+            body['external_access_enabled'] = self.external_access_enabled
         if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
         if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
         if self.name is not None: body['name'] = self.name
@@ -3070,6 +3228,7 @@ def from_dict(cls, d: Dict[str, any]) -> MetastoreInfo:
                    delta_sharing_recipient_token_lifetime_in_seconds=d.get(
                        'delta_sharing_recipient_token_lifetime_in_seconds', None),
                    delta_sharing_scope=_enum(d, 'delta_sharing_scope', MetastoreInfoDeltaSharingScope),
+                   external_access_enabled=d.get('external_access_enabled', None),
                    global_metastore_id=d.get('global_metastore_id', None),
                    metastore_id=d.get('metastore_id', None),
                    name=d.get('name', None),
@@ -4151,6 +4310,36 @@ def from_dict(cls, d: Dict[str, any]) -> QuotaInfo:
                    quota_name=d.get('quota_name', None))
 
 
+@dataclass
+class R2Credentials:
+    """R2 temporary credentials for API authentication. Read more at
+    https://developers.cloudflare.com/r2/api/s3/tokens/."""
+
+    access_key_id: Optional[str] = None
+    """The access key ID that identifies the temporary credentials."""
+
+    secret_access_key: Optional[str] = None
+    """The secret access key associated with the access key."""
+
+    session_token: Optional[str] = None
+    """The generated JWT that users must pass to use the temporary credentials."""
+
+    def as_dict(self) -> dict:
+        """Serializes the R2Credentials into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.access_key_id is not None: body['access_key_id'] = self.access_key_id
+        if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
+        if self.session_token is not None: body['session_token'] = self.session_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> R2Credentials:
+        """Deserializes the R2Credentials from a dictionary."""
+        return cls(access_key_id=d.get('access_key_id', None),
+                   secret_access_key=d.get('secret_access_key', None),
+                   session_token=d.get('session_token', None))
+
+
 @dataclass
 class RegenerateDashboardRequest:
     table_name: Optional[str] = None
@@ -4896,6 +5085,12 @@ def from_dict(cls, d: Dict[str, any]) -> TableInfo:
                    view_dependencies=_from_dict(d, 'view_dependencies', DependencyList))
 
 
+class TableOperation(Enum):
+
+    READ = 'READ'
+    READ_WRITE = 'READ_WRITE'
+
+
 @dataclass
 class TableRowFilter:
     function_name: str
@@ -9135,7 +9330,8 @@ def get(self,
             full_name: str,
             *,
             include_browse: Optional[bool] = None,
-            include_delta_metadata: Optional[bool] = None) -> TableInfo:
+            include_delta_metadata: Optional[bool] = None,
+            include_manifest_capabilities: Optional[bool] = None) -> TableInfo:
         """Get a table.
         
         Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one of the
@@ -9151,6 +9347,8 @@ def get(self,
           for
         :param include_delta_metadata: bool (optional)
           Whether delta metadata should be included in the response.
+        :param include_manifest_capabilities: bool (optional)
+          Whether to include a manifest containing capabilities the table has.
         
         :returns: :class:`TableInfo`
         """
@@ -9158,6 +9356,8 @@ def get(self,
         query = {}
         if include_browse is not None: query['include_browse'] = include_browse
         if include_delta_metadata is not None: query['include_delta_metadata'] = include_delta_metadata
+        if include_manifest_capabilities is not None:
+            query['include_manifest_capabilities'] = include_manifest_capabilities
         headers = {'Accept': 'application/json', }
 
         res = self._api.do('GET', f'/api/2.1/unity-catalog/tables/{full_name}', query=query, headers=headers)
@@ -9169,6 +9369,7 @@ def list(self,
              *,
              include_browse: Optional[bool] = None,
              include_delta_metadata: Optional[bool] = None,
+             include_manifest_capabilities: Optional[bool] = None,
              max_results: Optional[int] = None,
              omit_columns: Optional[bool] = None,
              omit_properties: Optional[bool] = None,
@@ -9190,6 +9391,8 @@ def list(self,
           for
         :param include_delta_metadata: bool (optional)
           Whether delta metadata should be included in the response.
+        :param include_manifest_capabilities: bool (optional)
+          Whether to include a manifest containing capabilities the table has.
         :param max_results: int (optional)
           Maximum number of tables to return. If not set, all the tables are returned (not recommended). -
           when set to a value greater than 0, the page length is the minimum of this value and a server
@@ -9209,6 +9412,8 @@ def list(self,
         if catalog_name is not None: query['catalog_name'] = catalog_name
         if include_browse is not None: query['include_browse'] = include_browse
         if include_delta_metadata is not None: query['include_delta_metadata'] = include_delta_metadata
+        if include_manifest_capabilities is not None:
+            query['include_manifest_capabilities'] = include_manifest_capabilities
         if max_results is not None: query['max_results'] = max_results
         if omit_columns is not None: query['omit_columns'] = omit_columns
         if omit_properties is not None: query['omit_properties'] = omit_properties
@@ -9228,6 +9433,7 @@ def list(self,
     def list_summaries(self,
                        catalog_name: str,
                        *,
+                       include_manifest_capabilities: Optional[bool] = None,
                        max_results: Optional[int] = None,
                        page_token: Optional[str] = None,
                        schema_name_pattern: Optional[str] = None,
@@ -9247,6 +9453,8 @@ def list_summaries(self,
         
         :param catalog_name: str
           Name of parent catalog for tables of interest.
+        :param include_manifest_capabilities: bool (optional)
+          Whether to include a manifest containing capabilities the table has.
         :param max_results: int (optional)
           Maximum number of summaries for tables to return. If not set, the page length is set to a server
           configured value (10000, as of 1/5/2024). - when set to a value greater than 0, the page length is
@@ -9265,6 +9473,8 @@ def list_summaries(self,
 
         query = {}
         if catalog_name is not None: query['catalog_name'] = catalog_name
+        if include_manifest_capabilities is not None:
+            query['include_manifest_capabilities'] = include_manifest_capabilities
         if max_results is not None: query['max_results'] = max_results
         if page_token is not None: query['page_token'] = page_token
         if schema_name_pattern is not None: query['schema_name_pattern'] = schema_name_pattern
@@ -9301,6 +9511,55 @@ def update(self, full_name: str, *, owner: Optional[str] = None):
         self._api.do('PATCH', f'/api/2.1/unity-catalog/tables/{full_name}', body=body, headers=headers)
 
 
+class TemporaryTableCredentialsAPI:
+    """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage
+    locationswhere table data is stored in Databricks. These credentials are employed to provide secure and
+    time-limitedaccess to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud provider
+    has its own typeof credentials: AWS uses temporary session tokens via AWS Security Token Service (STS),
+    Azure utilizesShared Access Signatures (SAS) for its data storage services, and Google Cloud supports
+    temporary credentialsthrough OAuth 2.0.Temporary table credentials ensure that data access is limited in
+    scope and duration, reducing the risk ofunauthorized access or misuse. To use the temporary table
+    credentials API, a metastore admin needs to enable the external_access_enabled flag (off by default) at
+    the metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema level
+    by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by
+    catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for
+    security reason."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def generate_temporary_table_credentials(
+            self,
+            *,
+            operation: Optional[TableOperation] = None,
+            table_id: Optional[str] = None) -> GenerateTemporaryTableCredentialResponse:
+        """Generate a temporary table credential.
+        
+        Get a short-lived credential for directly accessing the table data on cloud storage. The metastore
+        must have external_access_enabled flag set to true (default false). The caller must have
+        EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog
+        owners.
+        
+        :param operation: :class:`TableOperation` (optional)
+          The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is
+          specified, the credentials returned will have write permissions, otherwise, it will be read only.
+        :param table_id: str (optional)
+          UUID of the table to read or write.
+        
+        :returns: :class:`GenerateTemporaryTableCredentialResponse`
+        """
+        body = {}
+        if operation is not None: body['operation'] = operation.value
+        if table_id is not None: body['table_id'] = table_id
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST',
+                           '/api/2.0/unity-catalog/temporary-table-credentials',
+                           body=body,
+                           headers=headers)
+        return GenerateTemporaryTableCredentialResponse.from_dict(res)
+
+
 class VolumesAPI:
     """Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing
     files. Use cases include running machine learning on unstructured data such as image, audio, video, or PDF
diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py
index 567518222..4a77496de 100755
--- a/databricks/sdk/service/compute.py
+++ b/databricks/sdk/service/compute.py
@@ -598,8 +598,13 @@ class ClusterAttributes:
     """The ID of the cluster policy used to create the cluster if applicable."""
 
     runtime_engine: Optional[RuntimeEngine] = None
-    """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
-    engine is inferred from spark_version."""
+    """Determines the cluster's runtime engine, either standard or Photon.
+    
+    This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+    `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+    
+    If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+    -photon-, in which case Photon will be used."""
 
     single_user_name: Optional[str] = None
     """Single user name if data_security_mode is `SINGLE_USER`"""
@@ -882,8 +887,13 @@ class ClusterDetails:
     """The ID of the cluster policy used to create the cluster if applicable."""
 
     runtime_engine: Optional[RuntimeEngine] = None
-    """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
-    engine is inferred from spark_version."""
+    """Determines the cluster's runtime engine, either standard or Photon.
+    
+    This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+    `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+    
+    If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+    -photon-, in which case Photon will be used."""
 
     single_user_name: Optional[str] = None
     """Single user name if data_security_mode is `SINGLE_USER`"""
@@ -1596,8 +1606,13 @@ class ClusterSpec:
     """The ID of the cluster policy used to create the cluster if applicable."""
 
     runtime_engine: Optional[RuntimeEngine] = None
-    """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
-    engine is inferred from spark_version."""
+    """Determines the cluster's runtime engine, either standard or Photon.
+    
+    This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+    `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+    
+    If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+    -photon-, in which case Photon will be used."""
 
     single_user_name: Optional[str] = None
     """Single user name if data_security_mode is `SINGLE_USER`"""
@@ -1912,8 +1927,13 @@ class CreateCluster:
     """The ID of the cluster policy used to create the cluster if applicable."""
 
     runtime_engine: Optional[RuntimeEngine] = None
-    """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
-    engine is inferred from spark_version."""
+    """Determines the cluster's runtime engine, either standard or Photon.
+    
+    This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+    `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+    
+    If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+    -photon-, in which case Photon will be used."""
 
     single_user_name: Optional[str] = None
     """Single user name if data_security_mode is `SINGLE_USER`"""
@@ -2759,8 +2779,13 @@ class EditCluster:
     """The ID of the cluster policy used to create the cluster if applicable."""
 
     runtime_engine: Optional[RuntimeEngine] = None
-    """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
-    engine is inferred from spark_version."""
+    """Determines the cluster's runtime engine, either standard or Photon.
+    
+    This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+    `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+    
+    If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+    -photon-, in which case Photon will be used."""
 
     single_user_name: Optional[str] = None
     """Single user name if data_security_mode is `SINGLE_USER`"""
@@ -5647,8 +5672,13 @@ def from_dict(cls, d: Dict[str, any]) -> Results:
 
 
 class RuntimeEngine(Enum):
-    """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
-    engine is inferred from spark_version."""
+    """Determines the cluster's runtime engine, either standard or Photon.
+    
+    This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+    `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+    
+    If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+    -photon-, in which case Photon will be used."""
 
     NULL = 'NULL'
     PHOTON = 'PHOTON'
@@ -6181,8 +6211,13 @@ class UpdateClusterResource:
     """The ID of the cluster policy used to create the cluster if applicable."""
 
     runtime_engine: Optional[RuntimeEngine] = None
-    """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
-    engine is inferred from spark_version."""
+    """Determines the cluster's runtime engine, either standard or Photon.
+    
+    This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+    `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+    
+    If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+    -photon-, in which case Photon will be used."""
 
     single_user_name: Optional[str] = None
     """Single user name if data_security_mode is `SINGLE_USER`"""
@@ -6805,6 +6840,11 @@ def create(self,
         If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will succeed.
         Otherwise the cluster will terminate with an informative error message.
         
+        Rather than authoring the cluster's JSON definition from scratch, Databricks recommends filling out
+        the [create compute UI] and then copying the generated JSON definition from the UI.
+        
+        [create compute UI]: https://docs.databricks.com/compute/configure.html
+        
         :param spark_version: str
           The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be
           retrieved by using the :method:clusters/sparkVersions API call.
@@ -6900,8 +6940,13 @@ def create(self,
         :param policy_id: str (optional)
           The ID of the cluster policy used to create the cluster if applicable.
         :param runtime_engine: :class:`RuntimeEngine` (optional)
-          Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime engine
-          is inferred from spark_version.
+          Determines the cluster's runtime engine, either standard or Photon.
+          
+          This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+          `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+          
+          If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+          -photon-, in which case Photon will be used.
         :param single_user_name: str (optional)
           Single user name if data_security_mode is `SINGLE_USER`
         :param spark_conf: Dict[str,str] (optional)
@@ -7194,8 +7239,13 @@ def edit(self,
         :param policy_id: str (optional)
           The ID of the cluster policy used to create the cluster if applicable.
         :param runtime_engine: :class:`RuntimeEngine` (optional)
-          Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime engine
-          is inferred from spark_version.
+          Determines the cluster's runtime engine, either standard or Photon.
+          
+          This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+          `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+          
+          If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+          -photon-, in which case Photon will be used.
         :param single_user_name: str (optional)
           Single user name if data_security_mode is `SINGLE_USER`
         :param spark_conf: Dict[str,str] (optional)
diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py
index 7169531e5..6e85cf45c 100755
--- a/databricks/sdk/service/dashboards.py
+++ b/databricks/sdk/service/dashboards.py
@@ -31,7 +31,11 @@ class CreateDashboardRequest:
 
     serialized_dashboard: Optional[str] = None
     """The contents of the dashboard in serialized string form. This field is excluded in List
-    Dashboards responses."""
+    Dashboards responses. Use the [get dashboard API] to retrieve an example response, which
+    includes the `serialized_dashboard` field. This field provides the structure of the JSON string
+    that represents the dashboard's layout and components.
+    
+    [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get"""
 
     warehouse_id: Optional[str] = None
     """The warehouse ID used to run the dashboard."""
@@ -170,7 +174,11 @@ class Dashboard:
 
     serialized_dashboard: Optional[str] = None
     """The contents of the dashboard in serialized string form. This field is excluded in List
-    Dashboards responses."""
+    Dashboards responses. Use the [get dashboard API] to retrieve an example response, which
+    includes the `serialized_dashboard` field. This field provides the structure of the JSON string
+    that represents the dashboard's layout and components.
+    
+    [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get"""
 
     update_time: Optional[str] = None
     """The timestamp of when the dashboard was last updated by the user. This field is excluded in List
@@ -382,8 +390,9 @@ class GenieMessage:
 
     status: Optional[MessageStatus] = None
     """MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data
-    sources. * `ASKING_AI`: Waiting for the LLM to respond to the users question. *
-    `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling
+    sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. *
+    `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`:
+    Executing AI provided SQL query. Get the SQL query result by calling
     [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message
     status will stay in the `EXECUTING_QUERY` until a client calls
     [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a
@@ -615,8 +624,9 @@ class MessageErrorType(Enum):
 
 class MessageStatus(Enum):
     """MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data
-    sources. * `ASKING_AI`: Waiting for the LLM to respond to the users question. *
-    `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling
+    sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. *
+    `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`:
+    Executing AI provided SQL query. Get the SQL query result by calling
     [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message
     status will stay in the `EXECUTING_QUERY` until a client calls
     [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a
@@ -632,6 +642,7 @@ class MessageStatus(Enum):
     EXECUTING_QUERY = 'EXECUTING_QUERY'
     FAILED = 'FAILED'
     FETCHING_METADATA = 'FETCHING_METADATA'
+    FILTERING_CONTEXT = 'FILTERING_CONTEXT'
     QUERY_RESULT_EXPIRED = 'QUERY_RESULT_EXPIRED'
     SUBMITTED = 'SUBMITTED'
 
@@ -1028,7 +1039,11 @@ class UpdateDashboardRequest:
 
     serialized_dashboard: Optional[str] = None
     """The contents of the dashboard in serialized string form. This field is excluded in List
-    Dashboards responses."""
+    Dashboards responses. Use the [get dashboard API] to retrieve an example response, which
+    includes the `serialized_dashboard` field. This field provides the structure of the JSON string
+    that represents the dashboard's layout and components.
+    
+    [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get"""
 
     warehouse_id: Optional[str] = None
     """The warehouse ID used to run the dashboard."""
@@ -1308,7 +1323,11 @@ def create(self,
           slash. This field is excluded in List Dashboards responses.
         :param serialized_dashboard: str (optional)
           The contents of the dashboard in serialized string form. This field is excluded in List Dashboards
-          responses.
+          responses. Use the [get dashboard API] to retrieve an example response, which includes the
+          `serialized_dashboard` field. This field provides the structure of the JSON string that represents
+          the dashboard's layout and components.
+          
+          [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get
         :param warehouse_id: str (optional)
           The warehouse ID used to run the dashboard.
         
@@ -1723,7 +1742,11 @@ def update(self,
           not been modified since the last read. This field is excluded in List Dashboards responses.
         :param serialized_dashboard: str (optional)
           The contents of the dashboard in serialized string form. This field is excluded in List Dashboards
-          responses.
+          responses. Use the [get dashboard API] to retrieve an example response, which includes the
+          `serialized_dashboard` field. This field provides the structure of the JSON string that represents
+          the dashboard's layout and components.
+          
+          [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get
         :param warehouse_id: str (optional)
           The warehouse ID used to run the dashboard.
         
diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index da6cd586c..e7fbddb48 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -505,7 +505,11 @@ class CreateJob:
     well as when this job is deleted."""
 
     environments: Optional[List[JobEnvironment]] = None
-    """A list of task execution environment specifications that can be referenced by tasks of this job."""
+    """A list of task execution environment specifications that can be referenced by serverless tasks
+    of this job. An environment is required to be present for serverless tasks. For serverless
+    notebook tasks, the environment is accessible in the notebook environment panel. For other
+    serverless tasks, the task environment is required to be specified using environment_key in the
+    task settings."""
 
     format: Optional[Format] = None
     """Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls.
@@ -553,12 +557,11 @@ class CreateJob:
     """The queue settings of the job."""
 
     run_as: Optional[JobRunAs] = None
-    """Write-only setting, available only in Create/Update/Reset and Submit calls. Specifies the user
-    or service principal that the job runs as. If not specified, the job runs as the user who
-    created the job.
+    """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
+    as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
     
-    Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
-    is thrown."""
+    Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not,
+    an error is thrown."""
 
     schedule: Optional[CronSchedule] = None
     """An optional periodic schedule for this job. The default behavior is that the job only runs when
@@ -1462,7 +1465,8 @@ class JobEditMode(Enum):
 @dataclass
 class JobEmailNotifications:
     no_alert_for_skipped_runs: Optional[bool] = None
-    """If true, do not send email to recipients specified in `on_failure` if the run is skipped."""
+    """If true, do not send email to recipients specified in `on_failure` if the run is skipped. This
+    field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field."""
 
     on_duration_warning_threshold_exceeded: Optional[List[str]] = None
     """A list of email addresses to be notified when the duration of a run exceeds the threshold
@@ -1720,12 +1724,11 @@ def from_dict(cls, d: Dict[str, any]) -> JobPermissionsRequest:
 
 @dataclass
 class JobRunAs:
-    """Write-only setting, available only in Create/Update/Reset and Submit calls. Specifies the user
-    or service principal that the job runs as. If not specified, the job runs as the user who
-    created the job.
+    """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
+    as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
     
-    Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
-    is thrown."""
+    Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not,
+    an error is thrown."""
 
     service_principal_name: Optional[str] = None
     """Application ID of an active service principal. Setting this field requires the
@@ -1773,7 +1776,11 @@ class JobSettings:
     well as when this job is deleted."""
 
     environments: Optional[List[JobEnvironment]] = None
-    """A list of task execution environment specifications that can be referenced by tasks of this job."""
+    """A list of task execution environment specifications that can be referenced by serverless tasks
+    of this job. An environment is required to be present for serverless tasks. For serverless
+    notebook tasks, the environment is accessible in the notebook environment panel. For other
+    serverless tasks, the task environment is required to be specified using environment_key in the
+    task settings."""
 
     format: Optional[Format] = None
     """Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls.
@@ -1821,12 +1828,11 @@ class JobSettings:
     """The queue settings of the job."""
 
     run_as: Optional[JobRunAs] = None
-    """Write-only setting, available only in Create/Update/Reset and Submit calls. Specifies the user
-    or service principal that the job runs as. If not specified, the job runs as the user who
-    created the job.
+    """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
+    as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
     
-    Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
-    is thrown."""
+    Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not,
+    an error is thrown."""
 
     schedule: Optional[CronSchedule] = None
     """An optional periodic schedule for this job. The default behavior is that the job only runs when
@@ -3617,9 +3623,11 @@ class RunResultState(Enum):
     reached. * `EXCLUDED`: The run was skipped because the necessary conditions were not met. *
     `SUCCESS_WITH_FAILURES`: The job run completed successfully with some failures; leaf tasks were
     successful. * `UPSTREAM_FAILED`: The run was skipped because of an upstream failure. *
-    `UPSTREAM_CANCELED`: The run was skipped because an upstream task was canceled."""
+    `UPSTREAM_CANCELED`: The run was skipped because an upstream task was canceled. * `DISABLED`:
+    The run was skipped because it was disabled explicitly by the user."""
 
     CANCELED = 'CANCELED'
+    DISABLED = 'DISABLED'
     EXCLUDED = 'EXCLUDED'
     FAILED = 'FAILED'
     MAXIMUM_CONCURRENT_RUNS_REACHED = 'MAXIMUM_CONCURRENT_RUNS_REACHED'
@@ -5034,7 +5042,8 @@ def from_dict(cls, d: Dict[str, any]) -> TaskDependency:
 @dataclass
 class TaskEmailNotifications:
     no_alert_for_skipped_runs: Optional[bool] = None
-    """If true, do not send email to recipients specified in `on_failure` if the run is skipped."""
+    """If true, do not send email to recipients specified in `on_failure` if the run is skipped. This
+    field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field."""
 
     on_duration_warning_threshold_exceeded: Optional[List[str]] = None
     """A list of email addresses to be notified when the duration of a run exceeds the threshold
@@ -5128,36 +5137,36 @@ def from_dict(cls, d: Dict[str, any]) -> TaskNotificationSettings:
 
 class TerminationCodeCode(Enum):
     """The code indicates why the run was terminated. Additional codes might be introduced in future
-    releases. * `SUCCESS`: The run was completed successfully. * `CANCELED`: The run was canceled
-    during execution by the Databricks platform; for example, if the maximum run duration was
-    exceeded. * `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the
-    dependency type condition was not met, or there were no material tasks to execute. *
-    `INTERNAL_ERROR`: The run encountered an unexpected error. Refer to the state message for
-    further details. * `DRIVER_ERROR`: The run encountered an error while communicating with the
-    Spark Driver. * `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state
-    message for further details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due
-    to an error when communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The
-    run failed because it issued an invalid request to start the cluster. *
-    `WORKSPACE_RUN_LIMIT_EXCEEDED`: The workspace has reached the quota for the maximum number of
-    concurrent active runs. Consider scheduling the runs over a larger time frame. *
-    `FEATURE_DISABLED`: The run failed because it tried to access a feature unavailable for the
-    workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The number of cluster creation, start, and upsize
-    requests have exceeded the allotted rate limit. Consider spreading the run execution over a
-    larger time frame. * `STORAGE_ACCESS_ERROR`: The run failed due to an error when accessing the
-    customer blob storage. Refer to the state message for further details. * `RUN_EXECUTION_ERROR`:
-    The run was completed with task failures. For more details, refer to the state message or run
-    output. * `UNAUTHORIZED_ERROR`: The run failed due to a permission issue while accessing a
-    resource. Refer to the state message for further details. * `LIBRARY_INSTALLATION_ERROR`: The
-    run failed while installing the user-requested library. Refer to the state message for further
-    details. The causes might include, but are not limited to: The provided library is invalid,
-    there are insufficient permissions to install the library, and so forth. *
-    `MAX_CONCURRENT_RUNS_EXCEEDED`: The scheduled run exceeds the limit of maximum concurrent runs
-    set for the job. * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a cluster that has
-    already reached the maximum number of contexts it is configured to create. See: [Link]. *
-    `RESOURCE_NOT_FOUND`: A resource necessary for run execution does not exist. Refer to the state
-    message for further details. * `INVALID_RUN_CONFIGURATION`: The run failed due to an invalid
-    configuration. Refer to the state message for further details. * `CLOUD_FAILURE`: The run failed
-    due to a cloud provider issue. Refer to the state message for further details. *
+    releases. * `SUCCESS`: The run was completed successfully. * `USER_CANCELED`: The run was
+    successfully canceled during execution by a user. * `CANCELED`: The run was canceled during
+    execution by the Databricks platform; for example, if the maximum run duration was exceeded. *
+    `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the dependency
+    type condition was not met, or there were no material tasks to execute. * `INTERNAL_ERROR`: The
+    run encountered an unexpected error. Refer to the state message for further details. *
+    `DRIVER_ERROR`: The run encountered an error while communicating with the Spark Driver. *
+    `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state message for further
+    details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due to an error when
+    communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The run failed because
+    it issued an invalid request to start the cluster. * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The
+    workspace has reached the quota for the maximum number of concurrent active runs. Consider
+    scheduling the runs over a larger time frame. * `FEATURE_DISABLED`: The run failed because it
+    tried to access a feature unavailable for the workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The
+    number of cluster creation, start, and upsize requests have exceeded the allotted rate limit.
+    Consider spreading the run execution over a larger time frame. * `STORAGE_ACCESS_ERROR`: The run
+    failed due to an error when accessing the customer blob storage. Refer to the state message for
+    further details. * `RUN_EXECUTION_ERROR`: The run was completed with task failures. For more
+    details, refer to the state message or run output. * `UNAUTHORIZED_ERROR`: The run failed due to
+    a permission issue while accessing a resource. Refer to the state message for further details. *
+    `LIBRARY_INSTALLATION_ERROR`: The run failed while installing the user-requested library. Refer
+    to the state message for further details. The causes might include, but are not limited to: The
+    provided library is invalid, there are insufficient permissions to install the library, and so
+    forth. * `MAX_CONCURRENT_RUNS_EXCEEDED`: The scheduled run exceeds the limit of maximum
+    concurrent runs set for the job. * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a
+    cluster that has already reached the maximum number of contexts it is configured to create. See:
+    [Link]. * `RESOURCE_NOT_FOUND`: A resource necessary for run execution does not exist. Refer to
+    the state message for further details. * `INVALID_RUN_CONFIGURATION`: The run failed due to an
+    invalid configuration. Refer to the state message for further details. * `CLOUD_FAILURE`: The
+    run failed due to a cloud provider issue. Refer to the state message for further details. *
     `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size
     limit.
     
@@ -5183,6 +5192,7 @@ class TerminationCodeCode(Enum):
     STORAGE_ACCESS_ERROR = 'STORAGE_ACCESS_ERROR'
     SUCCESS = 'SUCCESS'
     UNAUTHORIZED_ERROR = 'UNAUTHORIZED_ERROR'
+    USER_CANCELED = 'USER_CANCELED'
     WORKSPACE_RUN_LIMIT_EXCEEDED = 'WORKSPACE_RUN_LIMIT_EXCEEDED'
 
 
@@ -5190,36 +5200,36 @@ class TerminationCodeCode(Enum):
 class TerminationDetails:
     code: Optional[TerminationCodeCode] = None
     """The code indicates why the run was terminated. Additional codes might be introduced in future
-    releases. * `SUCCESS`: The run was completed successfully. * `CANCELED`: The run was canceled
-    during execution by the Databricks platform; for example, if the maximum run duration was
-    exceeded. * `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the
-    dependency type condition was not met, or there were no material tasks to execute. *
-    `INTERNAL_ERROR`: The run encountered an unexpected error. Refer to the state message for
-    further details. * `DRIVER_ERROR`: The run encountered an error while communicating with the
-    Spark Driver. * `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state
-    message for further details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due
-    to an error when communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The
-    run failed because it issued an invalid request to start the cluster. *
-    `WORKSPACE_RUN_LIMIT_EXCEEDED`: The workspace has reached the quota for the maximum number of
-    concurrent active runs. Consider scheduling the runs over a larger time frame. *
-    `FEATURE_DISABLED`: The run failed because it tried to access a feature unavailable for the
-    workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The number of cluster creation, start, and upsize
-    requests have exceeded the allotted rate limit. Consider spreading the run execution over a
-    larger time frame. * `STORAGE_ACCESS_ERROR`: The run failed due to an error when accessing the
-    customer blob storage. Refer to the state message for further details. * `RUN_EXECUTION_ERROR`:
-    The run was completed with task failures. For more details, refer to the state message or run
-    output. * `UNAUTHORIZED_ERROR`: The run failed due to a permission issue while accessing a
-    resource. Refer to the state message for further details. * `LIBRARY_INSTALLATION_ERROR`: The
-    run failed while installing the user-requested library. Refer to the state message for further
-    details. The causes might include, but are not limited to: The provided library is invalid,
-    there are insufficient permissions to install the library, and so forth. *
-    `MAX_CONCURRENT_RUNS_EXCEEDED`: The scheduled run exceeds the limit of maximum concurrent runs
-    set for the job. * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a cluster that has
-    already reached the maximum number of contexts it is configured to create. See: [Link]. *
-    `RESOURCE_NOT_FOUND`: A resource necessary for run execution does not exist. Refer to the state
-    message for further details. * `INVALID_RUN_CONFIGURATION`: The run failed due to an invalid
-    configuration. Refer to the state message for further details. * `CLOUD_FAILURE`: The run failed
-    due to a cloud provider issue. Refer to the state message for further details. *
+    releases. * `SUCCESS`: The run was completed successfully. * `USER_CANCELED`: The run was
+    successfully canceled during execution by a user. * `CANCELED`: The run was canceled during
+    execution by the Databricks platform; for example, if the maximum run duration was exceeded. *
+    `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the dependency
+    type condition was not met, or there were no material tasks to execute. * `INTERNAL_ERROR`: The
+    run encountered an unexpected error. Refer to the state message for further details. *
+    `DRIVER_ERROR`: The run encountered an error while communicating with the Spark Driver. *
+    `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state message for further
+    details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due to an error when
+    communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The run failed because
+    it issued an invalid request to start the cluster. * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The
+    workspace has reached the quota for the maximum number of concurrent active runs. Consider
+    scheduling the runs over a larger time frame. * `FEATURE_DISABLED`: The run failed because it
+    tried to access a feature unavailable for the workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The
+    number of cluster creation, start, and upsize requests have exceeded the allotted rate limit.
+    Consider spreading the run execution over a larger time frame. * `STORAGE_ACCESS_ERROR`: The run
+    failed due to an error when accessing the customer blob storage. Refer to the state message for
+    further details. * `RUN_EXECUTION_ERROR`: The run was completed with task failures. For more
+    details, refer to the state message or run output. * `UNAUTHORIZED_ERROR`: The run failed due to
+    a permission issue while accessing a resource. Refer to the state message for further details. *
+    `LIBRARY_INSTALLATION_ERROR`: The run failed while installing the user-requested library. Refer
+    to the state message for further details. The causes might include, but are not limited to: The
+    provided library is invalid, there are insufficient permissions to install the library, and so
+    forth. * `MAX_CONCURRENT_RUNS_EXCEEDED`: The scheduled run exceeds the limit of maximum
+    concurrent runs set for the job. * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a
+    cluster that has already reached the maximum number of contexts it is configured to create. See:
+    [Link]. * `RESOURCE_NOT_FOUND`: A resource necessary for run execution does not exist. Refer to
+    the state message for further details. * `INVALID_RUN_CONFIGURATION`: The run failed due to an
+    invalid configuration. Refer to the state message for further details. * `CLOUD_FAILURE`: The
+    run failed due to a cloud provider issue. Refer to the state message for further details. *
     `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size
     limit.
     
@@ -5649,7 +5659,10 @@ def create(self,
           An optional set of email addresses that is notified when runs of this job begin or complete as well
           as when this job is deleted.
         :param environments: List[:class:`JobEnvironment`] (optional)
-          A list of task execution environment specifications that can be referenced by tasks of this job.
+          A list of task execution environment specifications that can be referenced by serverless tasks of
+          this job. An environment is required to be present for serverless tasks. For serverless notebook
+          tasks, the environment is accessible in the notebook environment panel. For other serverless tasks,
+          the task environment is required to be specified using environment_key in the task settings.
         :param format: :class:`Format` (optional)
           Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. When
           using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`.
@@ -5686,12 +5699,11 @@ def create(self,
         :param queue: :class:`QueueSettings` (optional)
           The queue settings of the job.
         :param run_as: :class:`JobRunAs` (optional)
-          Write-only setting, available only in Create/Update/Reset and Submit calls. Specifies the user or
-          service principal that the job runs as. If not specified, the job runs as the user who created the
-          job.
+          Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If
+          not specified, the job/pipeline runs as the user who created the job/pipeline.
           
-          Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
-          thrown.
+          Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, an
+          error is thrown.
         :param schedule: :class:`CronSchedule` (optional)
           An optional periodic schedule for this job. The default behavior is that the job only runs when
           triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.
diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py
index ae76632ef..f102bdc9d 100755
--- a/databricks/sdk/service/pipelines.py
+++ b/databricks/sdk/service/pipelines.py
@@ -25,6 +25,9 @@ class CreatePipeline:
     allow_duplicate_names: Optional[bool] = None
     """If false, deployment will fail if name conflicts with that of another pipeline."""
 
+    budget_policy_id: Optional[str] = None
+    """Budget policy of this pipeline."""
+
     catalog: Optional[str] = None
     """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified,
     tables in this pipeline are published to a `target` schema inside `catalog` (for example,
@@ -79,6 +82,10 @@ class CreatePipeline:
     photon: Optional[bool] = None
     """Whether Photon is enabled for this pipeline."""
 
+    schema: Optional[str] = None
+    """The default schema (database) where tables are read from or published to. The presence of this
+    field implies that the pipeline is in direct publishing mode."""
+
     serverless: Optional[bool] = None
     """Whether serverless compute is enabled for this pipeline."""
 
@@ -97,6 +104,7 @@ def as_dict(self) -> dict:
         """Serializes the CreatePipeline into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
         if self.catalog is not None: body['catalog'] = self.catalog
         if self.channel is not None: body['channel'] = self.channel
         if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters]
@@ -114,6 +122,7 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
         if self.photon is not None: body['photon'] = self.photon
+        if self.schema is not None: body['schema'] = self.schema
         if self.serverless is not None: body['serverless'] = self.serverless
         if self.storage is not None: body['storage'] = self.storage
         if self.target is not None: body['target'] = self.target
@@ -124,6 +133,7 @@ def as_dict(self) -> dict:
     def from_dict(cls, d: Dict[str, any]) -> CreatePipeline:
         """Deserializes the CreatePipeline from a dictionary."""
         return cls(allow_duplicate_names=d.get('allow_duplicate_names', None),
+                   budget_policy_id=d.get('budget_policy_id', None),
                    catalog=d.get('catalog', None),
                    channel=d.get('channel', None),
                    clusters=_repeated_dict(d, 'clusters', PipelineCluster),
@@ -141,6 +151,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreatePipeline:
                    name=d.get('name', None),
                    notifications=_repeated_dict(d, 'notifications', Notifications),
                    photon=d.get('photon', None),
+                   schema=d.get('schema', None),
                    serverless=d.get('serverless', None),
                    storage=d.get('storage', None),
                    target=d.get('target', None),
@@ -236,6 +247,9 @@ class EditPipeline:
     allow_duplicate_names: Optional[bool] = None
     """If false, deployment will fail if name has changed and conflicts the name of another pipeline."""
 
+    budget_policy_id: Optional[str] = None
+    """Budget policy of this pipeline."""
+
     catalog: Optional[str] = None
     """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified,
     tables in this pipeline are published to a `target` schema inside `catalog` (for example,
@@ -295,6 +309,10 @@ class EditPipeline:
     pipeline_id: Optional[str] = None
     """Unique identifier for this pipeline."""
 
+    schema: Optional[str] = None
+    """The default schema (database) where tables are read from or published to. The presence of this
+    field implies that the pipeline is in direct publishing mode."""
+
     serverless: Optional[bool] = None
     """Whether serverless compute is enabled for this pipeline."""
 
@@ -313,6 +331,7 @@ def as_dict(self) -> dict:
         """Serializes the EditPipeline into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
         if self.catalog is not None: body['catalog'] = self.catalog
         if self.channel is not None: body['channel'] = self.channel
         if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters]
@@ -332,6 +351,7 @@ def as_dict(self) -> dict:
         if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
         if self.photon is not None: body['photon'] = self.photon
         if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.schema is not None: body['schema'] = self.schema
         if self.serverless is not None: body['serverless'] = self.serverless
         if self.storage is not None: body['storage'] = self.storage
         if self.target is not None: body['target'] = self.target
@@ -342,6 +362,7 @@ def as_dict(self) -> dict:
     def from_dict(cls, d: Dict[str, any]) -> EditPipeline:
         """Deserializes the EditPipeline from a dictionary."""
         return cls(allow_duplicate_names=d.get('allow_duplicate_names', None),
+                   budget_policy_id=d.get('budget_policy_id', None),
                    catalog=d.get('catalog', None),
                    channel=d.get('channel', None),
                    clusters=_repeated_dict(d, 'clusters', PipelineCluster),
@@ -360,6 +381,7 @@ def from_dict(cls, d: Dict[str, any]) -> EditPipeline:
                    notifications=_repeated_dict(d, 'notifications', Notifications),
                    photon=d.get('photon', None),
                    pipeline_id=d.get('pipeline_id', None),
+                   schema=d.get('schema', None),
                    serverless=d.get('serverless', None),
                    storage=d.get('storage', None),
                    target=d.get('target', None),
@@ -477,6 +499,9 @@ class GetPipelineResponse:
     creator_user_name: Optional[str] = None
     """The username of the pipeline creator."""
 
+    effective_budget_policy_id: Optional[str] = None
+    """Serverless budget policy ID of this pipeline."""
+
     health: Optional[GetPipelineResponseHealth] = None
     """The health of a pipeline."""
 
@@ -507,6 +532,8 @@ def as_dict(self) -> dict:
         if self.cause is not None: body['cause'] = self.cause
         if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
         if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.effective_budget_policy_id is not None:
+            body['effective_budget_policy_id'] = self.effective_budget_policy_id
         if self.health is not None: body['health'] = self.health.value
         if self.last_modified is not None: body['last_modified'] = self.last_modified
         if self.latest_updates: body['latest_updates'] = [v.as_dict() for v in self.latest_updates]
@@ -523,6 +550,7 @@ def from_dict(cls, d: Dict[str, any]) -> GetPipelineResponse:
         return cls(cause=d.get('cause', None),
                    cluster_id=d.get('cluster_id', None),
                    creator_user_name=d.get('creator_user_name', None),
+                   effective_budget_policy_id=d.get('effective_budget_policy_id', None),
                    health=_enum(d, 'health', GetPipelineResponseHealth),
                    last_modified=d.get('last_modified', None),
                    latest_updates=_repeated_dict(d, 'latest_updates', UpdateStateInfo),
@@ -1376,6 +1404,9 @@ def from_dict(cls, d: Dict[str, any]) -> PipelinePermissionsRequest:
 
 @dataclass
 class PipelineSpec:
+    budget_policy_id: Optional[str] = None
+    """Budget policy of this pipeline."""
+
     catalog: Optional[str] = None
     """A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified,
     tables in this pipeline are published to a `target` schema inside `catalog` (for example,
@@ -1428,6 +1459,10 @@ class PipelineSpec:
     photon: Optional[bool] = None
     """Whether Photon is enabled for this pipeline."""
 
+    schema: Optional[str] = None
+    """The default schema (database) where tables are read from or published to. The presence of this
+    field implies that the pipeline is in direct publishing mode."""
+
     serverless: Optional[bool] = None
     """Whether serverless compute is enabled for this pipeline."""
 
@@ -1445,6 +1480,7 @@ class PipelineSpec:
     def as_dict(self) -> dict:
         """Serializes the PipelineSpec into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
         if self.catalog is not None: body['catalog'] = self.catalog
         if self.channel is not None: body['channel'] = self.channel
         if self.clusters: body['clusters'] = [v.as_dict() for v in self.clusters]
@@ -1461,6 +1497,7 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
         if self.photon is not None: body['photon'] = self.photon
+        if self.schema is not None: body['schema'] = self.schema
         if self.serverless is not None: body['serverless'] = self.serverless
         if self.storage is not None: body['storage'] = self.storage
         if self.target is not None: body['target'] = self.target
@@ -1470,7 +1507,8 @@ def as_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineSpec:
         """Deserializes the PipelineSpec from a dictionary."""
-        return cls(catalog=d.get('catalog', None),
+        return cls(budget_policy_id=d.get('budget_policy_id', None),
+                   catalog=d.get('catalog', None),
                    channel=d.get('channel', None),
                    clusters=_repeated_dict(d, 'clusters', PipelineCluster),
                    configuration=d.get('configuration', None),
@@ -1486,6 +1524,7 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineSpec:
                    name=d.get('name', None),
                    notifications=_repeated_dict(d, 'notifications', Notifications),
                    photon=d.get('photon', None),
+                   schema=d.get('schema', None),
                    serverless=d.get('serverless', None),
                    storage=d.get('storage', None),
                    target=d.get('target', None),
@@ -2098,6 +2137,7 @@ def wait_get_pipeline_running(
     def create(self,
                *,
                allow_duplicate_names: Optional[bool] = None,
+               budget_policy_id: Optional[str] = None,
                catalog: Optional[str] = None,
                channel: Optional[str] = None,
                clusters: Optional[List[PipelineCluster]] = None,
@@ -2115,6 +2155,7 @@ def create(self,
                name: Optional[str] = None,
                notifications: Optional[List[Notifications]] = None,
                photon: Optional[bool] = None,
+               schema: Optional[str] = None,
                serverless: Optional[bool] = None,
                storage: Optional[str] = None,
                target: Optional[str] = None,
@@ -2126,6 +2167,8 @@ def create(self,
         
         :param allow_duplicate_names: bool (optional)
           If false, deployment will fail if name conflicts with that of another pipeline.
+        :param budget_policy_id: str (optional)
+          Budget policy of this pipeline.
         :param catalog: str (optional)
           A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables
           in this pipeline are published to a `target` schema inside `catalog` (for example,
@@ -2162,6 +2205,9 @@ def create(self,
           List of notification settings for this pipeline.
         :param photon: bool (optional)
           Whether Photon is enabled for this pipeline.
+        :param schema: str (optional)
+          The default schema (database) where tables are read from or published to. The presence of this field
+          implies that the pipeline is in direct publishing mode.
         :param serverless: bool (optional)
           Whether serverless compute is enabled for this pipeline.
         :param storage: str (optional)
@@ -2176,6 +2222,7 @@ def create(self,
         """
         body = {}
         if allow_duplicate_names is not None: body['allow_duplicate_names'] = allow_duplicate_names
+        if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id
         if catalog is not None: body['catalog'] = catalog
         if channel is not None: body['channel'] = channel
         if clusters is not None: body['clusters'] = [v.as_dict() for v in clusters]
@@ -2193,6 +2240,7 @@ def create(self,
         if name is not None: body['name'] = name
         if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications]
         if photon is not None: body['photon'] = photon
+        if schema is not None: body['schema'] = schema
         if serverless is not None: body['serverless'] = serverless
         if storage is not None: body['storage'] = storage
         if target is not None: body['target'] = target
@@ -2506,6 +2554,7 @@ def update(self,
                pipeline_id: str,
                *,
                allow_duplicate_names: Optional[bool] = None,
+               budget_policy_id: Optional[str] = None,
                catalog: Optional[str] = None,
                channel: Optional[str] = None,
                clusters: Optional[List[PipelineCluster]] = None,
@@ -2523,6 +2572,7 @@ def update(self,
                name: Optional[str] = None,
                notifications: Optional[List[Notifications]] = None,
                photon: Optional[bool] = None,
+               schema: Optional[str] = None,
                serverless: Optional[bool] = None,
                storage: Optional[str] = None,
                target: Optional[str] = None,
@@ -2535,6 +2585,8 @@ def update(self,
           Unique identifier for this pipeline.
         :param allow_duplicate_names: bool (optional)
           If false, deployment will fail if name has changed and conflicts the name of another pipeline.
+        :param budget_policy_id: str (optional)
+          Budget policy of this pipeline.
         :param catalog: str (optional)
           A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables
           in this pipeline are published to a `target` schema inside `catalog` (for example,
@@ -2573,6 +2625,9 @@ def update(self,
           List of notification settings for this pipeline.
         :param photon: bool (optional)
           Whether Photon is enabled for this pipeline.
+        :param schema: str (optional)
+          The default schema (database) where tables are read from or published to. The presence of this field
+          implies that the pipeline is in direct publishing mode.
         :param serverless: bool (optional)
           Whether serverless compute is enabled for this pipeline.
         :param storage: str (optional)
@@ -2587,6 +2642,7 @@ def update(self,
         """
         body = {}
         if allow_duplicate_names is not None: body['allow_duplicate_names'] = allow_duplicate_names
+        if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id
         if catalog is not None: body['catalog'] = catalog
         if channel is not None: body['channel'] = channel
         if clusters is not None: body['clusters'] = [v.as_dict() for v in clusters]
@@ -2604,6 +2660,7 @@ def update(self,
         if name is not None: body['name'] = name
         if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications]
         if photon is not None: body['photon'] = photon
+        if schema is not None: body['schema'] = schema
         if serverless is not None: body['serverless'] = serverless
         if storage is not None: body['storage'] = storage
         if target is not None: body['target'] = target
diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py
index e41f34a63..7639d96fb 100755
--- a/databricks/sdk/service/serving.py
+++ b/databricks/sdk/service/serving.py
@@ -50,6 +50,222 @@ def from_dict(cls, d: Dict[str, any]) -> Ai21LabsConfig:
                    ai21labs_api_key_plaintext=d.get('ai21labs_api_key_plaintext', None))
 
 
+@dataclass
+class AiGatewayConfig:
+    guardrails: Optional[AiGatewayGuardrails] = None
+    """Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and
+    responses."""
+
+    inference_table_config: Optional[AiGatewayInferenceTableConfig] = None
+    """Configuration for payload logging using inference tables. Use these tables to monitor and audit
+    data being sent to and received from model APIs and to improve model quality."""
+
+    rate_limits: Optional[List[AiGatewayRateLimit]] = None
+    """Configuration for rate limits which can be set to limit endpoint traffic."""
+
+    usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None
+    """Configuration to enable usage tracking using system tables. These tables allow you to monitor
+    operational usage on endpoints and their associated costs."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AiGatewayConfig into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.guardrails: body['guardrails'] = self.guardrails.as_dict()
+        if self.inference_table_config: body['inference_table_config'] = self.inference_table_config.as_dict()
+        if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits]
+        if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AiGatewayConfig:
+        """Deserializes the AiGatewayConfig from a dictionary."""
+        return cls(guardrails=_from_dict(d, 'guardrails', AiGatewayGuardrails),
+                   inference_table_config=_from_dict(d, 'inference_table_config',
+                                                     AiGatewayInferenceTableConfig),
+                   rate_limits=_repeated_dict(d, 'rate_limits', AiGatewayRateLimit),
+                   usage_tracking_config=_from_dict(d, 'usage_tracking_config', AiGatewayUsageTrackingConfig))
+
+
+@dataclass
+class AiGatewayGuardrailParameters:
+    invalid_keywords: Optional[List[str]] = None
+    """List of invalid keywords. AI guardrail uses keyword or string matching to decide if the keyword
+    exists in the request or response content."""
+
+    pii: Optional[AiGatewayGuardrailPiiBehavior] = None
+    """Configuration for guardrail PII filter."""
+
+    safety: Optional[bool] = None
+    """Indicates whether the safety filter is enabled."""
+
+    valid_topics: Optional[List[str]] = None
+    """The list of allowed topics. Given a chat request, this guardrail flags the request if its topic
+    is not in the allowed topics."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AiGatewayGuardrailParameters into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.invalid_keywords: body['invalid_keywords'] = [v for v in self.invalid_keywords]
+        if self.pii: body['pii'] = self.pii.as_dict()
+        if self.safety is not None: body['safety'] = self.safety
+        if self.valid_topics: body['valid_topics'] = [v for v in self.valid_topics]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrailParameters:
+        """Deserializes the AiGatewayGuardrailParameters from a dictionary."""
+        return cls(invalid_keywords=d.get('invalid_keywords', None),
+                   pii=_from_dict(d, 'pii', AiGatewayGuardrailPiiBehavior),
+                   safety=d.get('safety', None),
+                   valid_topics=d.get('valid_topics', None))
+
+
+@dataclass
+class AiGatewayGuardrailPiiBehavior:
+    behavior: AiGatewayGuardrailPiiBehaviorBehavior
+    """Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input
+    guardrail and the request contains PII, the request is not sent to the model server and 400
+    status code is returned; if 'BLOCK' is set for the output guardrail and the model response
+    contains PII, the PII info in the response is redacted and 400 status code is returned."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AiGatewayGuardrailPiiBehavior into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.behavior is not None: body['behavior'] = self.behavior.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrailPiiBehavior:
+        """Deserializes the AiGatewayGuardrailPiiBehavior from a dictionary."""
+        return cls(behavior=_enum(d, 'behavior', AiGatewayGuardrailPiiBehaviorBehavior))
+
+
+class AiGatewayGuardrailPiiBehaviorBehavior(Enum):
+    """Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input
+    guardrail and the request contains PII, the request is not sent to the model server and 400
+    status code is returned; if 'BLOCK' is set for the output guardrail and the model response
+    contains PII, the PII info in the response is redacted and 400 status code is returned."""
+
+    BLOCK = 'BLOCK'
+    NONE = 'NONE'
+
+
+@dataclass
+class AiGatewayGuardrails:
+    input: Optional[AiGatewayGuardrailParameters] = None
+    """Configuration for input guardrail filters."""
+
+    output: Optional[AiGatewayGuardrailParameters] = None
+    """Configuration for output guardrail filters."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AiGatewayGuardrails into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.input: body['input'] = self.input.as_dict()
+        if self.output: body['output'] = self.output.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrails:
+        """Deserializes the AiGatewayGuardrails from a dictionary."""
+        return cls(input=_from_dict(d, 'input', AiGatewayGuardrailParameters),
+                   output=_from_dict(d, 'output', AiGatewayGuardrailParameters))
+
+
+@dataclass
+class AiGatewayInferenceTableConfig:
+    catalog_name: Optional[str] = None
+    """The name of the catalog in Unity Catalog. Required when enabling inference tables. NOTE: On
+    update, you have to disable inference table first in order to change the catalog name."""
+
+    enabled: Optional[bool] = None
+    """Indicates whether the inference table is enabled."""
+
+    schema_name: Optional[str] = None
+    """The name of the schema in Unity Catalog. Required when enabling inference tables. NOTE: On
+    update, you have to disable inference table first in order to change the schema name."""
+
+    table_name_prefix: Optional[str] = None
+    """The prefix of the table in Unity Catalog. NOTE: On update, you have to disable inference table
+    first in order to change the prefix name."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AiGatewayInferenceTableConfig into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AiGatewayInferenceTableConfig:
+        """Deserializes the AiGatewayInferenceTableConfig from a dictionary."""
+        return cls(catalog_name=d.get('catalog_name', None),
+                   enabled=d.get('enabled', None),
+                   schema_name=d.get('schema_name', None),
+                   table_name_prefix=d.get('table_name_prefix', None))
+
+
+@dataclass
+class AiGatewayRateLimit:
+    calls: int
+    """Used to specify how many calls are allowed for a key within the renewal_period."""
+
+    renewal_period: AiGatewayRateLimitRenewalPeriod
+    """Renewal period field for a rate limit. Currently, only 'minute' is supported."""
+
+    key: Optional[AiGatewayRateLimitKey] = None
+    """Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint'
+    being the default if not specified."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AiGatewayRateLimit into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.calls is not None: body['calls'] = self.calls
+        if self.key is not None: body['key'] = self.key.value
+        if self.renewal_period is not None: body['renewal_period'] = self.renewal_period.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AiGatewayRateLimit:
+        """Deserializes the AiGatewayRateLimit from a dictionary."""
+        return cls(calls=d.get('calls', None),
+                   key=_enum(d, 'key', AiGatewayRateLimitKey),
+                   renewal_period=_enum(d, 'renewal_period', AiGatewayRateLimitRenewalPeriod))
+
+
+class AiGatewayRateLimitKey(Enum):
+    """Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint'
+    being the default if not specified."""
+
+    ENDPOINT = 'endpoint'
+    USER = 'user'
+
+
+class AiGatewayRateLimitRenewalPeriod(Enum):
+    """Renewal period field for a rate limit. Currently, only 'minute' is supported."""
+
+    MINUTE = 'minute'
+
+
+@dataclass
+class AiGatewayUsageTrackingConfig:
+    enabled: Optional[bool] = None
+    """Whether to enable usage tracking."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AiGatewayUsageTrackingConfig into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.enabled is not None: body['enabled'] = self.enabled
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AiGatewayUsageTrackingConfig:
+        """Deserializes the AiGatewayUsageTrackingConfig from a dictionary."""
+        return cls(enabled=d.get('enabled', None))
+
+
 @dataclass
 class AmazonBedrockConfig:
     aws_region: str
@@ -319,9 +535,13 @@ class CreateServingEndpoint:
     config: EndpointCoreConfigInput
     """The core config of the serving endpoint."""
 
+    ai_gateway: Optional[AiGatewayConfig] = None
+    """The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
+    supported as of now."""
+
     rate_limits: Optional[List[RateLimit]] = None
-    """Rate limits to be applied to the serving endpoint. NOTE: only external and foundation model
-    endpoints are supported as of now."""
+    """Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
+    Gateway to manage rate limits."""
 
     route_optimized: Optional[bool] = None
     """Enable route optimization for the serving endpoint."""
@@ -332,6 +552,7 @@ class CreateServingEndpoint:
     def as_dict(self) -> dict:
         """Serializes the CreateServingEndpoint into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.ai_gateway: body['ai_gateway'] = self.ai_gateway.as_dict()
         if self.config: body['config'] = self.config.as_dict()
         if self.name is not None: body['name'] = self.name
         if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits]
@@ -342,7 +563,8 @@ def as_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateServingEndpoint:
         """Deserializes the CreateServingEndpoint from a dictionary."""
-        return cls(config=_from_dict(d, 'config', EndpointCoreConfigInput),
+        return cls(ai_gateway=_from_dict(d, 'ai_gateway', AiGatewayConfig),
+                   config=_from_dict(d, 'config', EndpointCoreConfigInput),
                    name=d.get('name', None),
                    rate_limits=_repeated_dict(d, 'rate_limits', RateLimit),
                    route_optimized=d.get('route_optimized', None),
@@ -1119,6 +1341,42 @@ def from_dict(cls, d: Dict[str, any]) -> PayloadTable:
                    status_message=d.get('status_message', None))
 
 
+@dataclass
+class PutAiGatewayResponse:
+    guardrails: Optional[AiGatewayGuardrails] = None
+    """Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and
+    responses."""
+
+    inference_table_config: Optional[AiGatewayInferenceTableConfig] = None
+    """Configuration for payload logging using inference tables. Use these tables to monitor and audit
+    data being sent to and received from model APIs and to improve model quality ."""
+
+    rate_limits: Optional[List[AiGatewayRateLimit]] = None
+    """Configuration for rate limits which can be set to limit endpoint traffic."""
+
+    usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None
+    """Configuration to enable usage tracking using system tables. These tables allow you to monitor
+    operational usage on endpoints and their associated costs."""
+
+    def as_dict(self) -> dict:
+        """Serializes the PutAiGatewayResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.guardrails: body['guardrails'] = self.guardrails.as_dict()
+        if self.inference_table_config: body['inference_table_config'] = self.inference_table_config.as_dict()
+        if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits]
+        if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> PutAiGatewayResponse:
+        """Deserializes the PutAiGatewayResponse from a dictionary."""
+        return cls(guardrails=_from_dict(d, 'guardrails', AiGatewayGuardrails),
+                   inference_table_config=_from_dict(d, 'inference_table_config',
+                                                     AiGatewayInferenceTableConfig),
+                   rate_limits=_repeated_dict(d, 'rate_limits', AiGatewayRateLimit),
+                   usage_tracking_config=_from_dict(d, 'usage_tracking_config', AiGatewayUsageTrackingConfig))
+
+
 @dataclass
 class PutResponse:
     rate_limits: Optional[List[RateLimit]] = None
@@ -1905,6 +2163,10 @@ def from_dict(cls, d: Dict[str, any]) -> ServerLogsResponse:
 
 @dataclass
 class ServingEndpoint:
+    ai_gateway: Optional[AiGatewayConfig] = None
+    """The AI Gateway configuration for the serving endpoint. NOTE: Only external model endpoints are
+    currently supported."""
+
     config: Optional[EndpointCoreConfigSummary] = None
     """The config that is currently being served by the endpoint."""
 
@@ -1936,6 +2198,7 @@ class ServingEndpoint:
     def as_dict(self) -> dict:
         """Serializes the ServingEndpoint into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.ai_gateway: body['ai_gateway'] = self.ai_gateway.as_dict()
         if self.config: body['config'] = self.config.as_dict()
         if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
         if self.creator is not None: body['creator'] = self.creator
@@ -1951,7 +2214,8 @@ def as_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpoint:
         """Deserializes the ServingEndpoint from a dictionary."""
-        return cls(config=_from_dict(d, 'config', EndpointCoreConfigSummary),
+        return cls(ai_gateway=_from_dict(d, 'ai_gateway', AiGatewayConfig),
+                   config=_from_dict(d, 'config', EndpointCoreConfigSummary),
                    creation_timestamp=d.get('creation_timestamp', None),
                    creator=d.get('creator', None),
                    id=d.get('id', None),
@@ -2035,6 +2299,10 @@ def from_dict(cls, d: Dict[str, any]) -> ServingEndpointAccessControlResponse:
 
 @dataclass
 class ServingEndpointDetailed:
+    ai_gateway: Optional[AiGatewayConfig] = None
+    """The AI Gateway configuration for the serving endpoint. NOTE: Only external model endpoints are
+    currently supported."""
+
     config: Optional[EndpointCoreConfigOutput] = None
     """The config that is currently being served by the endpoint."""
 
@@ -2081,6 +2349,7 @@ class ServingEndpointDetailed:
     def as_dict(self) -> dict:
         """Serializes the ServingEndpointDetailed into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.ai_gateway: body['ai_gateway'] = self.ai_gateway.as_dict()
         if self.config: body['config'] = self.config.as_dict()
         if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
         if self.creator is not None: body['creator'] = self.creator
@@ -2101,7 +2370,8 @@ def as_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointDetailed:
         """Deserializes the ServingEndpointDetailed from a dictionary."""
-        return cls(config=_from_dict(d, 'config', EndpointCoreConfigOutput),
+        return cls(ai_gateway=_from_dict(d, 'ai_gateway', AiGatewayConfig),
+                   config=_from_dict(d, 'config', EndpointCoreConfigOutput),
                    creation_timestamp=d.get('creation_timestamp', None),
                    creator=d.get('creator', None),
                    data_plane_info=_from_dict(d, 'data_plane_info', ModelDataPlaneInfo),
@@ -2353,6 +2623,7 @@ def create(self,
                name: str,
                config: EndpointCoreConfigInput,
                *,
+               ai_gateway: Optional[AiGatewayConfig] = None,
                rate_limits: Optional[List[RateLimit]] = None,
                route_optimized: Optional[bool] = None,
                tags: Optional[List[EndpointTag]] = None) -> Wait[ServingEndpointDetailed]:
@@ -2363,9 +2634,12 @@ def create(self,
           workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.
         :param config: :class:`EndpointCoreConfigInput`
           The core config of the serving endpoint.
+        :param ai_gateway: :class:`AiGatewayConfig` (optional)
+          The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
+          supported as of now.
         :param rate_limits: List[:class:`RateLimit`] (optional)
-          Rate limits to be applied to the serving endpoint. NOTE: only external and foundation model
-          endpoints are supported as of now.
+          Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
+          Gateway to manage rate limits.
         :param route_optimized: bool (optional)
           Enable route optimization for the serving endpoint.
         :param tags: List[:class:`EndpointTag`] (optional)
@@ -2376,6 +2650,7 @@ def create(self,
           See :method:wait_get_serving_endpoint_not_updating for more details.
         """
         body = {}
+        if ai_gateway is not None: body['ai_gateway'] = ai_gateway.as_dict()
         if config is not None: body['config'] = config.as_dict()
         if name is not None: body['name'] = name
         if rate_limits is not None: body['rate_limits'] = [v.as_dict() for v in rate_limits]
@@ -2393,11 +2668,13 @@ def create_and_wait(
         name: str,
         config: EndpointCoreConfigInput,
         *,
+        ai_gateway: Optional[AiGatewayConfig] = None,
         rate_limits: Optional[List[RateLimit]] = None,
         route_optimized: Optional[bool] = None,
         tags: Optional[List[EndpointTag]] = None,
         timeout=timedelta(minutes=20)) -> ServingEndpointDetailed:
-        return self.create(config=config,
+        return self.create(ai_gateway=ai_gateway,
+                           config=config,
                            name=name,
                            rate_limits=rate_limits,
                            route_optimized=route_optimized,
@@ -2563,8 +2840,8 @@ def patch(self,
     def put(self, name: str, *, rate_limits: Optional[List[RateLimit]] = None) -> PutResponse:
         """Update rate limits of a serving endpoint.
         
-        Used to update the rate limits of a serving endpoint. NOTE: only external and foundation model
-        endpoints are supported as of now.
+        Used to update the rate limits of a serving endpoint. NOTE: Only foundation model endpoints are
+        currently supported. For external models, use AI Gateway to manage rate limits.
         
         :param name: str
           The name of the serving endpoint whose rate limits are being updated. This field is required.
@@ -2583,6 +2860,45 @@ def put(self, name: str, *, rate_limits: Optional[List[RateLimit]] = None) -> Pu
                            headers=headers)
         return PutResponse.from_dict(res)
 
+    def put_ai_gateway(
+            self,
+            name: str,
+            *,
+            guardrails: Optional[AiGatewayGuardrails] = None,
+            inference_table_config: Optional[AiGatewayInferenceTableConfig] = None,
+            rate_limits: Optional[List[AiGatewayRateLimit]] = None,
+            usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None) -> PutAiGatewayResponse:
+        """Update AI Gateway of a serving endpoint.
+        
+        Used to update the AI Gateway of a serving endpoint. NOTE: Only external model endpoints are currently
+        supported.
+        
+        :param name: str
+          The name of the serving endpoint whose AI Gateway is being updated. This field is required.
+        :param guardrails: :class:`AiGatewayGuardrails` (optional)
+          Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.
+        :param inference_table_config: :class:`AiGatewayInferenceTableConfig` (optional)
+          Configuration for payload logging using inference tables. Use these tables to monitor and audit data
+          being sent to and received from model APIs and to improve model quality.
+        :param rate_limits: List[:class:`AiGatewayRateLimit`] (optional)
+          Configuration for rate limits which can be set to limit endpoint traffic.
+        :param usage_tracking_config: :class:`AiGatewayUsageTrackingConfig` (optional)
+          Configuration to enable usage tracking using system tables. These tables allow you to monitor
+          operational usage on endpoints and their associated costs.
+        
+        :returns: :class:`PutAiGatewayResponse`
+        """
+        body = {}
+        if guardrails is not None: body['guardrails'] = guardrails.as_dict()
+        if inference_table_config is not None:
+            body['inference_table_config'] = inference_table_config.as_dict()
+        if rate_limits is not None: body['rate_limits'] = [v.as_dict() for v in rate_limits]
+        if usage_tracking_config is not None: body['usage_tracking_config'] = usage_tracking_config.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PUT', f'/api/2.0/serving-endpoints/{name}/ai-gateway', body=body, headers=headers)
+        return PutAiGatewayResponse.from_dict(res)
+
     def query(self,
               name: str,
               *,
diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py
index d5593a1e1..fd7ed5dd8 100755
--- a/databricks/sdk/service/settings.py
+++ b/databricks/sdk/service/settings.py
@@ -50,6 +50,22 @@ def from_dict(cls, d: Dict[str, any]) -> AutomaticClusterUpdateSetting:
                    setting_name=d.get('setting_name', None))
 
 
+@dataclass
+class BooleanMessage:
+    value: Optional[bool] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the BooleanMessage into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.value is not None: body['value'] = self.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> BooleanMessage:
+        """Deserializes the BooleanMessage from a dictionary."""
+        return cls(value=d.get('value', None))
+
+
 @dataclass
 class ClusterAutoRestartMessage:
     can_toggle: Optional[bool] = None
@@ -680,6 +696,54 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteDefaultNamespaceSettingResponse:
         return cls(etag=d.get('etag', None))
 
 
+@dataclass
+class DeleteDisableLegacyAccessResponse:
+    """The etag is returned."""
+
+    etag: str
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+    an etag from a GET request, and pass it with the DELETE request to identify the rule set version
+    you are deleting."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteDisableLegacyAccessResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyAccessResponse:
+        """Deserializes the DeleteDisableLegacyAccessResponse from a dictionary."""
+        return cls(etag=d.get('etag', None))
+
+
+@dataclass
+class DeleteDisableLegacyFeaturesResponse:
+    """The etag is returned."""
+
+    etag: str
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+    an etag from a GET request, and pass it with the DELETE request to identify the rule set version
+    you are deleting."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteDisableLegacyFeaturesResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyFeaturesResponse:
+        """Deserializes the DeleteDisableLegacyFeaturesResponse from a dictionary."""
+        return cls(etag=d.get('etag', None))
+
+
 @dataclass
 class DeleteNetworkConnectivityConfigurationResponse:
 
@@ -765,6 +829,75 @@ class DestinationType(Enum):
     WEBHOOK = 'WEBHOOK'
 
 
+@dataclass
+class DisableLegacyAccess:
+    disable_legacy_access: BooleanMessage
+
+    etag: Optional[str] = None
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+    etag from a GET request, and pass it with the PATCH request to identify the setting version you
+    are updating."""
+
+    setting_name: Optional[str] = None
+    """Name of the corresponding setting. This field is populated in the response, but it will not be
+    respected even if it's set in the request body. The setting name in the path parameter will be
+    respected instead. Setting name is required to be 'default' if the setting only has one instance
+    per workspace."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DisableLegacyAccess into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.disable_legacy_access: body['disable_legacy_access'] = self.disable_legacy_access.as_dict()
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DisableLegacyAccess:
+        """Deserializes the DisableLegacyAccess from a dictionary."""
+        return cls(disable_legacy_access=_from_dict(d, 'disable_legacy_access', BooleanMessage),
+                   etag=d.get('etag', None),
+                   setting_name=d.get('setting_name', None))
+
+
+@dataclass
+class DisableLegacyFeatures:
+    disable_legacy_features: BooleanMessage
+
+    etag: Optional[str] = None
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+    etag from a GET request, and pass it with the PATCH request to identify the setting version you
+    are updating."""
+
+    setting_name: Optional[str] = None
+    """Name of the corresponding setting. This field is populated in the response, but it will not be
+    respected even if it's set in the request body. The setting name in the path parameter will be
+    respected instead. Setting name is required to be 'default' if the setting only has one instance
+    per workspace."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DisableLegacyFeatures into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.disable_legacy_features:
+            body['disable_legacy_features'] = self.disable_legacy_features.as_dict()
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DisableLegacyFeatures:
+        """Deserializes the DisableLegacyFeatures from a dictionary."""
+        return cls(disable_legacy_features=_from_dict(d, 'disable_legacy_features', BooleanMessage),
+                   etag=d.get('etag', None),
+                   setting_name=d.get('setting_name', None))
+
+
 @dataclass
 class EmailConfig:
     addresses: Optional[List[str]] = None
@@ -2114,6 +2247,9 @@ class TokenInfo:
     token_id: Optional[str] = None
     """ID of the token."""
 
+    workspace_id: Optional[int] = None
+    """If applicable, the ID of the workspace that the token was created in."""
+
     def as_dict(self) -> dict:
         """Serializes the TokenInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
@@ -2124,6 +2260,7 @@ def as_dict(self) -> dict:
         if self.expiry_time is not None: body['expiry_time'] = self.expiry_time
         if self.owner_id is not None: body['owner_id'] = self.owner_id
         if self.token_id is not None: body['token_id'] = self.token_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
     @classmethod
@@ -2135,7 +2272,8 @@ def from_dict(cls, d: Dict[str, any]) -> TokenInfo:
                    creation_time=d.get('creation_time', None),
                    expiry_time=d.get('expiry_time', None),
                    owner_id=d.get('owner_id', None),
-                   token_id=d.get('token_id', None))
+                   token_id=d.get('token_id', None),
+                   workspace_id=d.get('workspace_id', None))
 
 
 @dataclass
@@ -2235,6 +2373,7 @@ def from_dict(cls, d: Dict[str, any]) -> TokenPermissionsRequest:
 class TokenType(Enum):
     """The type of token request. As of now, only `AZURE_ACTIVE_DIRECTORY_TOKEN` is supported."""
 
+    ARCLIGHT_AZURE_EXCHANGE_TOKEN = 'ARCLIGHT_AZURE_EXCHANGE_TOKEN'
     AZURE_ACTIVE_DIRECTORY_TOKEN = 'AZURE_ACTIVE_DIRECTORY_TOKEN'
 
 
@@ -2365,6 +2504,66 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateDefaultNamespaceSettingRequest:
                    setting=_from_dict(d, 'setting', DefaultNamespaceSetting))
 
 
+@dataclass
+class UpdateDisableLegacyAccessRequest:
+    """Details required to update a setting."""
+
+    allow_missing: bool
+    """This should always be set to true for Settings API. Added for AIP compliance."""
+
+    setting: DisableLegacyAccess
+
+    field_mask: str
+    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
+    the setting payload will be updated. The field mask needs to be supplied as single string. To
+    specify multiple fields in the field mask, use comma as the separator (no space)."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateDisableLegacyAccessRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyAccessRequest:
+        """Deserializes the UpdateDisableLegacyAccessRequest from a dictionary."""
+        return cls(allow_missing=d.get('allow_missing', None),
+                   field_mask=d.get('field_mask', None),
+                   setting=_from_dict(d, 'setting', DisableLegacyAccess))
+
+
+@dataclass
+class UpdateDisableLegacyFeaturesRequest:
+    """Details required to update a setting."""
+
+    allow_missing: bool
+    """This should always be set to true for Settings API. Added for AIP compliance."""
+
+    setting: DisableLegacyFeatures
+
+    field_mask: str
+    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
+    the setting payload will be updated. The field mask needs to be supplied as single string. To
+    specify multiple fields in the field mask, use comma as the separator (no space)."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateDisableLegacyFeaturesRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyFeaturesRequest:
+        """Deserializes the UpdateDisableLegacyFeaturesRequest from a dictionary."""
+        return cls(allow_missing=d.get('allow_missing', None),
+                   field_mask=d.get('field_mask', None),
+                   setting=_from_dict(d, 'setting', DisableLegacyFeatures))
+
+
 @dataclass
 class UpdateEnhancedSecurityMonitoringSettingRequest:
     """Details required to update a setting."""
@@ -2791,6 +2990,7 @@ def __init__(self, api_client):
         self._api = api_client
 
         self._csp_enablement_account = CspEnablementAccountAPI(self._api)
+        self._disable_legacy_features = DisableLegacyFeaturesAPI(self._api)
         self._esm_enablement_account = EsmEnablementAccountAPI(self._api)
         self._personal_compute = PersonalComputeAPI(self._api)
 
@@ -2799,6 +2999,11 @@ def csp_enablement_account(self) -> CspEnablementAccountAPI:
         """The compliance security profile settings at the account level control whether to enable it for new workspaces."""
         return self._csp_enablement_account
 
+    @property
+    def disable_legacy_features(self) -> DisableLegacyFeaturesAPI:
+        """Disable legacy features for new Databricks workspaces."""
+        return self._disable_legacy_features
+
     @property
     def esm_enablement_account(self) -> EsmEnablementAccountAPI:
         """The enhanced security monitoring setting at the account level controls whether to enable the feature on new workspaces."""
@@ -3152,6 +3357,188 @@ def update(self, allow_missing: bool, setting: DefaultNamespaceSetting,
         return DefaultNamespaceSetting.from_dict(res)
 
 
+class DisableLegacyAccessAPI:
+    """'Disabling legacy access' has the following impacts:
+    
+    1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore through HMS
+    Federation. 2. Disables Fallback Mode (docs link) on any External Location access from the workspace. 3.
+    Alters DBFS path access to use External Location permissions in place of legacy credentials. 4. Enforces
+    Unity Catalog access on all path based access."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyAccessResponse:
+        """Delete Legacy Access Disablement Status.
+        
+        Deletes legacy access disablement status.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteDisableLegacyAccessResponse`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('DELETE',
+                           '/api/2.0/settings/types/disable_legacy_access/names/default',
+                           query=query,
+                           headers=headers)
+        return DeleteDisableLegacyAccessResponse.from_dict(res)
+
+    def get(self, *, etag: Optional[str] = None) -> DisableLegacyAccess:
+        """Retrieve Legacy Access Disablement Status.
+        
+        Retrieves legacy access disablement Status.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DisableLegacyAccess`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET',
+                           '/api/2.0/settings/types/disable_legacy_access/names/default',
+                           query=query,
+                           headers=headers)
+        return DisableLegacyAccess.from_dict(res)
+
+    def update(self, allow_missing: bool, setting: DisableLegacyAccess,
+               field_mask: str) -> DisableLegacyAccess:
+        """Update Legacy Access Disablement Status.
+        
+        Updates legacy access disablement status.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`DisableLegacyAccess`
+        :param field_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        
+        :returns: :class:`DisableLegacyAccess`
+        """
+        body = {}
+        if allow_missing is not None: body['allow_missing'] = allow_missing
+        if field_mask is not None: body['field_mask'] = field_mask
+        if setting is not None: body['setting'] = setting.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH',
+                           '/api/2.0/settings/types/disable_legacy_access/names/default',
+                           body=body,
+                           headers=headers)
+        return DisableLegacyAccess.from_dict(res)
+
+
+class DisableLegacyFeaturesAPI:
+    """Disable legacy features for new Databricks workspaces.
+    
+    For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be
+    provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions
+    prior to 13.3LTS."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyFeaturesResponse:
+        """Delete the disable legacy features setting.
+        
+        Deletes the disable legacy features setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteDisableLegacyFeaturesResponse`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'DELETE',
+            f'/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default',
+            query=query,
+            headers=headers)
+        return DeleteDisableLegacyFeaturesResponse.from_dict(res)
+
+    def get(self, *, etag: Optional[str] = None) -> DisableLegacyFeatures:
+        """Get the disable legacy features setting.
+        
+        Gets the value of the disable legacy features setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DisableLegacyFeatures`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'GET',
+            f'/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default',
+            query=query,
+            headers=headers)
+        return DisableLegacyFeatures.from_dict(res)
+
+    def update(self, allow_missing: bool, setting: DisableLegacyFeatures,
+               field_mask: str) -> DisableLegacyFeatures:
+        """Update the disable legacy features setting.
+        
+        Updates the value of the disable legacy features setting.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`DisableLegacyFeatures`
+        :param field_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        
+        :returns: :class:`DisableLegacyFeatures`
+        """
+        body = {}
+        if allow_missing is not None: body['allow_missing'] = allow_missing
+        if field_mask is not None: body['field_mask'] = field_mask
+        if setting is not None: body['setting'] = setting.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do(
+            'PATCH',
+            f'/api/2.0/accounts/{self._api.account_id}/settings/types/disable_legacy_features/names/default',
+            body=body,
+            headers=headers)
+        return DisableLegacyFeatures.from_dict(res)
+
+
 class EnhancedSecurityMonitoringAPI:
     """Controls whether enhanced security monitoring is enabled for the current workspace. If the compliance
     security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the
@@ -4023,6 +4410,7 @@ def __init__(self, api_client):
         self._automatic_cluster_update = AutomaticClusterUpdateAPI(self._api)
         self._compliance_security_profile = ComplianceSecurityProfileAPI(self._api)
         self._default_namespace = DefaultNamespaceAPI(self._api)
+        self._disable_legacy_access = DisableLegacyAccessAPI(self._api)
         self._enhanced_security_monitoring = EnhancedSecurityMonitoringAPI(self._api)
         self._restrict_workspace_admins = RestrictWorkspaceAdminsAPI(self._api)
 
@@ -4041,6 +4429,11 @@ def default_namespace(self) -> DefaultNamespaceAPI:
         """The default namespace setting API allows users to configure the default namespace for a Databricks workspace."""
         return self._default_namespace
 
+    @property
+    def disable_legacy_access(self) -> DisableLegacyAccessAPI:
+        """'Disabling legacy access' has the following impacts: 1."""
+        return self._disable_legacy_access
+
     @property
     def enhanced_security_monitoring(self) -> EnhancedSecurityMonitoringAPI:
         """Controls whether enhanced security monitoring is enabled for the current workspace."""
diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py
index b77e5b5e6..348a27123 100755
--- a/databricks/sdk/service/sql.py
+++ b/databricks/sdk/service/sql.py
@@ -503,29 +503,6 @@ class ChannelName(Enum):
     CHANNEL_NAME_UNSPECIFIED = 'CHANNEL_NAME_UNSPECIFIED'
 
 
-@dataclass
-class ClientCallContext:
-    """Client code that triggered the request"""
-
-    file_name: Optional[EncodedText] = None
-    """File name that contains the last line that triggered the request."""
-
-    line_number: Optional[int] = None
-    """Last line number within a file or notebook cell that triggered the request."""
-
-    def as_dict(self) -> dict:
-        """Serializes the ClientCallContext into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.file_name: body['file_name'] = self.file_name.as_dict()
-        if self.line_number is not None: body['line_number'] = self.line_number
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ClientCallContext:
-        """Deserializes the ClientCallContext from a dictionary."""
-        return cls(file_name=_from_dict(d, 'file_name', EncodedText), line_number=d.get('line_number', None))
-
-
 @dataclass
 class ColumnInfo:
     name: Optional[str] = None
@@ -1615,34 +1592,6 @@ def from_dict(cls, d: Dict[str, any]) -> Empty:
         return cls()
 
 
-@dataclass
-class EncodedText:
-    encoding: Optional[EncodedTextEncoding] = None
-    """Carry text data in different form."""
-
-    text: Optional[str] = None
-    """text data"""
-
-    def as_dict(self) -> dict:
-        """Serializes the EncodedText into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.encoding is not None: body['encoding'] = self.encoding.value
-        if self.text is not None: body['text'] = self.text
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> EncodedText:
-        """Deserializes the EncodedText from a dictionary."""
-        return cls(encoding=_enum(d, 'encoding', EncodedTextEncoding), text=d.get('text', None))
-
-
-class EncodedTextEncoding(Enum):
-    """Carry text data in different form."""
-
-    BASE64 = 'BASE64'
-    PLAIN = 'PLAIN'
-
-
 @dataclass
 class EndpointConfPair:
     key: Optional[str] = None
@@ -3447,8 +3396,6 @@ class QueryInfo:
     query_id: Optional[str] = None
     """The query ID."""
 
-    query_source: Optional[QuerySource] = None
-
     query_start_time_ms: Optional[int] = None
     """The time the query started."""
 
@@ -3496,7 +3443,6 @@ def as_dict(self) -> dict:
         if self.plans_state is not None: body['plans_state'] = self.plans_state.value
         if self.query_end_time_ms is not None: body['query_end_time_ms'] = self.query_end_time_ms
         if self.query_id is not None: body['query_id'] = self.query_id
-        if self.query_source: body['query_source'] = self.query_source.as_dict()
         if self.query_start_time_ms is not None: body['query_start_time_ms'] = self.query_start_time_ms
         if self.query_text is not None: body['query_text'] = self.query_text
         if self.rows_produced is not None: body['rows_produced'] = self.rows_produced
@@ -3524,7 +3470,6 @@ def from_dict(cls, d: Dict[str, any]) -> QueryInfo:
                    plans_state=_enum(d, 'plans_state', PlansState),
                    query_end_time_ms=d.get('query_end_time_ms', None),
                    query_id=d.get('query_id', None),
-                   query_source=_from_dict(d, 'query_source', QuerySource),
                    query_start_time_ms=d.get('query_start_time_ms', None),
                    query_text=d.get('query_text', None),
                    rows_produced=d.get('rows_produced', None),
@@ -3841,176 +3786,6 @@ def from_dict(cls, d: Dict[str, any]) -> QueryPostContent:
                    tags=d.get('tags', None))
 
 
-@dataclass
-class QuerySource:
-    alert_id: Optional[str] = None
-    """UUID"""
-
-    client_call_context: Optional[ClientCallContext] = None
-    """Client code that triggered the request"""
-
-    command_id: Optional[str] = None
-    """Id associated with a notebook cell"""
-
-    command_run_id: Optional[str] = None
-    """Id associated with a notebook run or execution"""
-
-    dashboard_id: Optional[str] = None
-    """UUID"""
-
-    dashboard_v3_id: Optional[str] = None
-    """UUID for Lakeview Dashboards, separate from DBSQL Dashboards (dashboard_id)"""
-
-    driver_info: Optional[QuerySourceDriverInfo] = None
-
-    entry_point: Optional[QuerySourceEntryPoint] = None
-    """Spark service that received and processed the query"""
-
-    genie_space_id: Optional[str] = None
-    """UUID for Genie space"""
-
-    is_cloud_fetch: Optional[bool] = None
-
-    is_databricks_sql_exec_api: Optional[bool] = None
-
-    job_id: Optional[str] = None
-
-    job_managed_by: Optional[QuerySourceJobManager] = None
-    """With background compute, jobs can be managed by different internal teams. When not specified,
-    not a background compute job When specified and the value is not JOBS, it is a background
-    compute job"""
-
-    notebook_id: Optional[str] = None
-
-    query_tags: Optional[str] = None
-    """String provided by a customer that'll help them identify the query"""
-
-    run_id: Optional[str] = None
-    """Id associated with a job run or execution"""
-
-    runnable_command_id: Optional[str] = None
-    """Id associated with a notebook cell run or execution"""
-
-    scheduled_by: Optional[QuerySourceTrigger] = None
-
-    serverless_channel_info: Optional[ServerlessChannelInfo] = None
-
-    source_query_id: Optional[str] = None
-    """UUID"""
-
-    def as_dict(self) -> dict:
-        """Serializes the QuerySource into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.alert_id is not None: body['alert_id'] = self.alert_id
-        if self.client_call_context: body['client_call_context'] = self.client_call_context.as_dict()
-        if self.command_id is not None: body['command_id'] = self.command_id
-        if self.command_run_id is not None: body['command_run_id'] = self.command_run_id
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.dashboard_v3_id is not None: body['dashboard_v3_id'] = self.dashboard_v3_id
-        if self.driver_info: body['driver_info'] = self.driver_info.as_dict()
-        if self.entry_point is not None: body['entry_point'] = self.entry_point.value
-        if self.genie_space_id is not None: body['genie_space_id'] = self.genie_space_id
-        if self.is_cloud_fetch is not None: body['is_cloud_fetch'] = self.is_cloud_fetch
-        if self.is_databricks_sql_exec_api is not None:
-            body['is_databricks_sql_exec_api'] = self.is_databricks_sql_exec_api
-        if self.job_id is not None: body['job_id'] = self.job_id
-        if self.job_managed_by is not None: body['job_managed_by'] = self.job_managed_by.value
-        if self.notebook_id is not None: body['notebook_id'] = self.notebook_id
-        if self.query_tags is not None: body['query_tags'] = self.query_tags
-        if self.run_id is not None: body['run_id'] = self.run_id
-        if self.runnable_command_id is not None: body['runnable_command_id'] = self.runnable_command_id
-        if self.scheduled_by is not None: body['scheduled_by'] = self.scheduled_by.value
-        if self.serverless_channel_info:
-            body['serverless_channel_info'] = self.serverless_channel_info.as_dict()
-        if self.source_query_id is not None: body['source_query_id'] = self.source_query_id
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> QuerySource:
-        """Deserializes the QuerySource from a dictionary."""
-        return cls(alert_id=d.get('alert_id', None),
-                   client_call_context=_from_dict(d, 'client_call_context', ClientCallContext),
-                   command_id=d.get('command_id', None),
-                   command_run_id=d.get('command_run_id', None),
-                   dashboard_id=d.get('dashboard_id', None),
-                   dashboard_v3_id=d.get('dashboard_v3_id', None),
-                   driver_info=_from_dict(d, 'driver_info', QuerySourceDriverInfo),
-                   entry_point=_enum(d, 'entry_point', QuerySourceEntryPoint),
-                   genie_space_id=d.get('genie_space_id', None),
-                   is_cloud_fetch=d.get('is_cloud_fetch', None),
-                   is_databricks_sql_exec_api=d.get('is_databricks_sql_exec_api', None),
-                   job_id=d.get('job_id', None),
-                   job_managed_by=_enum(d, 'job_managed_by', QuerySourceJobManager),
-                   notebook_id=d.get('notebook_id', None),
-                   query_tags=d.get('query_tags', None),
-                   run_id=d.get('run_id', None),
-                   runnable_command_id=d.get('runnable_command_id', None),
-                   scheduled_by=_enum(d, 'scheduled_by', QuerySourceTrigger),
-                   serverless_channel_info=_from_dict(d, 'serverless_channel_info', ServerlessChannelInfo),
-                   source_query_id=d.get('source_query_id', None))
-
-
-@dataclass
-class QuerySourceDriverInfo:
-    bi_tool_entry: Optional[str] = None
-
-    driver_name: Optional[str] = None
-
-    simba_branding_vendor: Optional[str] = None
-
-    version_number: Optional[str] = None
-
-    def as_dict(self) -> dict:
-        """Serializes the QuerySourceDriverInfo into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.bi_tool_entry is not None: body['bi_tool_entry'] = self.bi_tool_entry
-        if self.driver_name is not None: body['driver_name'] = self.driver_name
-        if self.simba_branding_vendor is not None: body['simba_branding_vendor'] = self.simba_branding_vendor
-        if self.version_number is not None: body['version_number'] = self.version_number
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> QuerySourceDriverInfo:
-        """Deserializes the QuerySourceDriverInfo from a dictionary."""
-        return cls(bi_tool_entry=d.get('bi_tool_entry', None),
-                   driver_name=d.get('driver_name', None),
-                   simba_branding_vendor=d.get('simba_branding_vendor', None),
-                   version_number=d.get('version_number', None))
-
-
-class QuerySourceEntryPoint(Enum):
-    """Spark service that received and processed the query"""
-
-    DLT = 'DLT'
-    SPARK_CONNECT = 'SPARK_CONNECT'
-    THRIFT_SERVER = 'THRIFT_SERVER'
-
-
-class QuerySourceJobManager(Enum):
-    """Copied from elastic-spark-common/api/messages/manager.proto with enum values changed by 1 to
-    accommodate JOB_MANAGER_UNSPECIFIED"""
-
-    APP_SYSTEM_TABLE = 'APP_SYSTEM_TABLE'
-    AUTOML = 'AUTOML'
-    AUTO_MAINTENANCE = 'AUTO_MAINTENANCE'
-    CLEAN_ROOMS = 'CLEAN_ROOMS'
-    DATA_MONITORING = 'DATA_MONITORING'
-    DATA_SHARING = 'DATA_SHARING'
-    ENCRYPTION = 'ENCRYPTION'
-    FABRIC_CRAWLER = 'FABRIC_CRAWLER'
-    JOBS = 'JOBS'
-    LAKEVIEW = 'LAKEVIEW'
-    MANAGED_RAG = 'MANAGED_RAG'
-    SCHEDULED_MV_REFRESH = 'SCHEDULED_MV_REFRESH'
-    TESTING = 'TESTING'
-
-
-class QuerySourceTrigger(Enum):
-
-    MANUAL = 'MANUAL'
-    SCHEDULED = 'SCHEDULED'
-
-
 class QueryStatementType(Enum):
 
     ALTER = 'ALTER'
@@ -4228,23 +4003,6 @@ class RunAsRole(Enum):
     VIEWER = 'viewer'
 
 
-@dataclass
-class ServerlessChannelInfo:
-    name: Optional[ChannelName] = None
-    """Name of the Channel"""
-
-    def as_dict(self) -> dict:
-        """Serializes the ServerlessChannelInfo into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.name is not None: body['name'] = self.name.value
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ServerlessChannelInfo:
-        """Deserializes the ServerlessChannelInfo from a dictionary."""
-        return cls(name=_enum(d, 'name', ChannelName))
-
-
 @dataclass
 class ServiceError:
     error_code: Optional[ServiceErrorCode] = None
@@ -6800,7 +6558,9 @@ class StatementExecutionAPI:
     are approximate, occur server-side, and cannot account for things such as caller delays and network
     latency from caller to service. - The system will auto-close a statement after one hour if the client
     stops polling and thus you must poll at least once an hour. - The results are only available for one hour
-    after success; polling does not extend this.
+    after success; polling does not extend this. - The SQL Execution API must be used for the entire lifecycle
+    of the statement. For example, you cannot use the Jobs API to execute the command, and then the SQL
+    Execution API to cancel it.
     
     [Apache Arrow Columnar]: https://arrow.apache.org/overview/
     [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html"""
diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py
index 7be221323..6c12c6039 100755
--- a/databricks/sdk/service/workspace.py
+++ b/databricks/sdk/service/workspace.py
@@ -64,11 +64,11 @@ def from_dict(cls, d: Dict[str, any]) -> AzureKeyVaultSecretScopeMetadata:
 
 
 @dataclass
-class CreateCredentials:
+class CreateCredentialsRequest:
     git_provider: str
-    """Git provider. This field is case-insensitive. The available Git providers are gitHub,
-    bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-    gitLabEnterpriseEdition and awsCodeCommit."""
+    """Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+    `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+    `gitLabEnterpriseEdition` and `awsCodeCommit`."""
 
     git_username: Optional[str] = None
     """The username or email provided with your Git provider account, depending on which provider you
@@ -79,13 +79,12 @@ class CreateCredentials:
 
     personal_access_token: Optional[str] = None
     """The personal access token used to authenticate to the corresponding Git provider. For certain
-    providers, support may exist for other types of scoped access tokens. [Learn more]. The personal
-    access token used to authenticate to the corresponding Git
+    providers, support may exist for other types of scoped access tokens. [Learn more].
     
     [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html"""
 
     def as_dict(self) -> dict:
-        """Serializes the CreateCredentials into a dictionary suitable for use as a JSON request body."""
+        """Serializes the CreateCredentialsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.git_provider is not None: body['git_provider'] = self.git_provider
         if self.git_username is not None: body['git_username'] = self.git_username
@@ -93,8 +92,8 @@ def as_dict(self) -> dict:
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateCredentials:
-        """Deserializes the CreateCredentials from a dictionary."""
+    def from_dict(cls, d: Dict[str, any]) -> CreateCredentialsRequest:
+        """Deserializes the CreateCredentialsRequest from a dictionary."""
         return cls(git_provider=d.get('git_provider', None),
                    git_username=d.get('git_username', None),
                    personal_access_token=d.get('personal_access_token', None))
@@ -102,20 +101,15 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCredentials:
 
 @dataclass
 class CreateCredentialsResponse:
-    credential_id: Optional[int] = None
+    credential_id: int
     """ID of the credential object in the workspace."""
 
-    git_provider: Optional[str] = None
-    """Git provider. This field is case-insensitive. The available Git providers are gitHub,
-    bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-    gitLabEnterpriseEdition and awsCodeCommit."""
+    git_provider: str
+    """The Git provider associated with the credential."""
 
     git_username: Optional[str] = None
-    """The username or email provided with your Git provider account, depending on which provider you
-    are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or
-    username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS
-    CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please
-    see your provider's Personal Access Token authentication documentation to see what is supported."""
+    """The username or email provided with your Git provider account and associated with the
+    credential."""
 
     def as_dict(self) -> dict:
         """Serializes the CreateCredentialsResponse into a dictionary suitable for use as a JSON request body."""
@@ -134,25 +128,25 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCredentialsResponse:
 
 
 @dataclass
-class CreateRepo:
+class CreateRepoRequest:
     url: str
     """URL of the Git repository to be linked."""
 
     provider: str
-    """Git provider. This field is case-insensitive. The available Git providers are gitHub,
-    bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-    gitLabEnterpriseEdition and awsCodeCommit."""
+    """Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+    `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+    `gitLabEnterpriseEdition` and `awsCodeCommit`."""
 
     path: Optional[str] = None
     """Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If
-    repo is created in /Repos, path must be in the format /Repos/{folder}/{repo-name}."""
+    repo is created in `/Repos`, path must be in the format `/Repos/{folder}/{repo-name}`."""
 
     sparse_checkout: Optional[SparseCheckout] = None
     """If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable
     sparse checkout after the repo is created."""
 
     def as_dict(self) -> dict:
-        """Serializes the CreateRepo into a dictionary suitable for use as a JSON request body."""
+        """Serializes the CreateRepoRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.path is not None: body['path'] = self.path
         if self.provider is not None: body['provider'] = self.provider
@@ -161,14 +155,61 @@ def as_dict(self) -> dict:
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateRepo:
-        """Deserializes the CreateRepo from a dictionary."""
+    def from_dict(cls, d: Dict[str, any]) -> CreateRepoRequest:
+        """Deserializes the CreateRepoRequest from a dictionary."""
         return cls(path=d.get('path', None),
                    provider=d.get('provider', None),
                    sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout),
                    url=d.get('url', None))
 
 
+@dataclass
+class CreateRepoResponse:
+    branch: Optional[str] = None
+    """Branch that the Git folder (repo) is checked out to."""
+
+    head_commit_id: Optional[str] = None
+    """SHA-1 hash representing the commit ID of the current HEAD of the Git folder (repo)."""
+
+    id: Optional[int] = None
+    """ID of the Git folder (repo) object in the workspace."""
+
+    path: Optional[str] = None
+    """Path of the Git folder (repo) in the workspace."""
+
+    provider: Optional[str] = None
+    """Git provider of the linked Git repository."""
+
+    sparse_checkout: Optional[SparseCheckout] = None
+    """Sparse checkout settings for the Git folder (repo)."""
+
+    url: Optional[str] = None
+    """URL of the linked Git repository."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateRepoResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.branch is not None: body['branch'] = self.branch
+        if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id
+        if self.id is not None: body['id'] = self.id
+        if self.path is not None: body['path'] = self.path
+        if self.provider is not None: body['provider'] = self.provider
+        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict()
+        if self.url is not None: body['url'] = self.url
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateRepoResponse:
+        """Deserializes the CreateRepoResponse from a dictionary."""
+        return cls(branch=d.get('branch', None),
+                   head_commit_id=d.get('head_commit_id', None),
+                   id=d.get('id', None),
+                   path=d.get('path', None),
+                   provider=d.get('provider', None),
+                   sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout),
+                   url=d.get('url', None))
+
+
 @dataclass
 class CreateScope:
     scope: str
@@ -219,20 +260,15 @@ def from_dict(cls, d: Dict[str, any]) -> CreateScopeResponse:
 
 @dataclass
 class CredentialInfo:
-    credential_id: Optional[int] = None
+    credential_id: int
     """ID of the credential object in the workspace."""
 
     git_provider: Optional[str] = None
-    """Git provider. This field is case-insensitive. The available Git providers are gitHub,
-    gitHubOAuth, bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-    gitLabEnterpriseEdition and awsCodeCommit."""
+    """The Git provider associated with the credential."""
 
     git_username: Optional[str] = None
-    """The username or email provided with your Git provider account, depending on which provider you
-    are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or
-    username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS
-    CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please
-    see your provider's Personal Access Token authentication documentation to see what is supported."""
+    """The username or email provided with your Git provider account and associated with the
+    credential."""
 
     def as_dict(self) -> dict:
         """Serializes the CredentialInfo into a dictionary suitable for use as a JSON request body."""
@@ -308,6 +344,34 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteAclResponse:
         return cls()
 
 
+@dataclass
+class DeleteCredentialsResponse:
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteCredentialsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteCredentialsResponse:
+        """Deserializes the DeleteCredentialsResponse from a dictionary."""
+        return cls()
+
+
+@dataclass
+class DeleteRepoResponse:
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteRepoResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteRepoResponse:
+        """Deserializes the DeleteRepoResponse from a dictionary."""
+        return cls()
+
+
 @dataclass
 class DeleteResponse:
 
@@ -422,18 +486,30 @@ def from_dict(cls, d: Dict[str, any]) -> ExportResponse:
 
 @dataclass
 class GetCredentialsResponse:
-    credentials: Optional[List[CredentialInfo]] = None
+    credential_id: int
+    """ID of the credential object in the workspace."""
+
+    git_provider: Optional[str] = None
+    """The Git provider associated with the credential."""
+
+    git_username: Optional[str] = None
+    """The username or email provided with your Git provider account and associated with the
+    credential."""
 
     def as_dict(self) -> dict:
         """Serializes the GetCredentialsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.credentials: body['credentials'] = [v.as_dict() for v in self.credentials]
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.git_provider is not None: body['git_provider'] = self.git_provider
+        if self.git_username is not None: body['git_username'] = self.git_username
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetCredentialsResponse:
         """Deserializes the GetCredentialsResponse from a dictionary."""
-        return cls(credentials=_repeated_dict(d, 'credentials', CredentialInfo))
+        return cls(credential_id=d.get('credential_id', None),
+                   git_provider=d.get('git_provider', None),
+                   git_username=d.get('git_username', None))
 
 
 @dataclass
@@ -453,6 +529,53 @@ def from_dict(cls, d: Dict[str, any]) -> GetRepoPermissionLevelsResponse:
         return cls(permission_levels=_repeated_dict(d, 'permission_levels', RepoPermissionsDescription))
 
 
+@dataclass
+class GetRepoResponse:
+    branch: Optional[str] = None
+    """Branch that the local version of the repo is checked out to."""
+
+    head_commit_id: Optional[str] = None
+    """SHA-1 hash representing the commit ID of the current HEAD of the repo."""
+
+    id: Optional[int] = None
+    """ID of the Git folder (repo) object in the workspace."""
+
+    path: Optional[str] = None
+    """Path of the Git folder (repo) in the workspace."""
+
+    provider: Optional[str] = None
+    """Git provider of the linked Git repository."""
+
+    sparse_checkout: Optional[SparseCheckout] = None
+    """Sparse checkout settings for the Git folder (repo)."""
+
+    url: Optional[str] = None
+    """URL of the linked Git repository."""
+
+    def as_dict(self) -> dict:
+        """Serializes the GetRepoResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.branch is not None: body['branch'] = self.branch
+        if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id
+        if self.id is not None: body['id'] = self.id
+        if self.path is not None: body['path'] = self.path
+        if self.provider is not None: body['provider'] = self.provider
+        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout.as_dict()
+        if self.url is not None: body['url'] = self.url
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GetRepoResponse:
+        """Deserializes the GetRepoResponse from a dictionary."""
+        return cls(branch=d.get('branch', None),
+                   head_commit_id=d.get('head_commit_id', None),
+                   id=d.get('id', None),
+                   path=d.get('path', None),
+                   provider=d.get('provider', None),
+                   sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckout),
+                   url=d.get('url', None))
+
+
 @dataclass
 class GetSecretResponse:
     key: Optional[str] = None
@@ -605,13 +728,31 @@ def from_dict(cls, d: Dict[str, any]) -> ListAclsResponse:
         return cls(items=_repeated_dict(d, 'items', AclItem))
 
 
+@dataclass
+class ListCredentialsResponse:
+    credentials: Optional[List[CredentialInfo]] = None
+    """List of credentials."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListCredentialsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.credentials: body['credentials'] = [v.as_dict() for v in self.credentials]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListCredentialsResponse:
+        """Deserializes the ListCredentialsResponse from a dictionary."""
+        return cls(credentials=_repeated_dict(d, 'credentials', CredentialInfo))
+
+
 @dataclass
 class ListReposResponse:
     next_page_token: Optional[str] = None
-    """Token that can be specified as a query parameter to the GET /repos endpoint to retrieve the next
-    page of results."""
+    """Token that can be specified as a query parameter to the `GET /repos` endpoint to retrieve the
+    next page of results."""
 
     repos: Optional[List[RepoInfo]] = None
+    """List of Git folders (repos)."""
 
     def as_dict(self) -> dict:
         """Serializes the ListReposResponse into a dictionary suitable for use as a JSON request body."""
@@ -940,28 +1081,28 @@ def from_dict(cls, d: Dict[str, any]) -> RepoAccessControlResponse:
 
 @dataclass
 class RepoInfo:
+    """Git folder (repo) information."""
+
     branch: Optional[str] = None
-    """Branch that the local version of the repo is checked out to."""
+    """Name of the current git branch of the git folder (repo)."""
 
     head_commit_id: Optional[str] = None
-    """SHA-1 hash representing the commit ID of the current HEAD of the repo."""
+    """Current git commit id of the git folder (repo)."""
 
     id: Optional[int] = None
-    """ID of the repo object in the workspace."""
+    """Id of the git folder (repo) in the Workspace."""
 
     path: Optional[str] = None
-    """Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If
-    repo is created in /Repos, path must be in the format /Repos/{folder}/{repo-name}."""
+    """Root path of the git folder (repo) in the Workspace."""
 
     provider: Optional[str] = None
-    """Git provider. This field is case-insensitive. The available Git providers are gitHub,
-    bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-    gitLabEnterpriseEdition and awsCodeCommit."""
+    """Git provider of the remote git repository, e.g. `gitHub`."""
 
     sparse_checkout: Optional[SparseCheckout] = None
+    """Sparse checkout config for the git folder (repo)."""
 
     url: Optional[str] = None
-    """URL of the Git repository to be linked."""
+    """URL of the remote git repository."""
 
     def as_dict(self) -> dict:
         """Serializes the RepoInfo into a dictionary suitable for use as a JSON request body."""
@@ -1146,8 +1287,12 @@ def from_dict(cls, d: Dict[str, any]) -> SecretScope:
 
 @dataclass
 class SparseCheckout:
+    """Sparse checkout configuration, it contains options like cone patterns."""
+
     patterns: Optional[List[str]] = None
-    """List of patterns to include for sparse checkout."""
+    """List of sparse checkout cone patterns, see [cone mode handling] for details.
+    
+    [cone mode handling]: https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling"""
 
     def as_dict(self) -> dict:
         """Serializes the SparseCheckout into a dictionary suitable for use as a JSON request body."""
@@ -1163,8 +1308,12 @@ def from_dict(cls, d: Dict[str, any]) -> SparseCheckout:
 
 @dataclass
 class SparseCheckoutUpdate:
+    """Sparse checkout configuration, it contains options like cone patterns."""
+
     patterns: Optional[List[str]] = None
-    """List of patterns to include for sparse checkout."""
+    """List of sparse checkout cone patterns, see [cone mode handling] for details.
+    
+    [cone mode handling]: https://git-scm.com/docs/git-sparse-checkout#_internalscone_mode_handling"""
 
     def as_dict(self) -> dict:
         """Serializes the SparseCheckoutUpdate into a dictionary suitable for use as a JSON request body."""
@@ -1179,15 +1328,15 @@ def from_dict(cls, d: Dict[str, any]) -> SparseCheckoutUpdate:
 
 
 @dataclass
-class UpdateCredentials:
+class UpdateCredentialsRequest:
+    git_provider: str
+    """Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+    `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+    `gitLabEnterpriseEdition` and `awsCodeCommit`."""
+
     credential_id: Optional[int] = None
     """The ID for the corresponding credential to access."""
 
-    git_provider: Optional[str] = None
-    """Git provider. This field is case-insensitive. The available Git providers are gitHub,
-    bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-    gitLabEnterpriseEdition and awsCodeCommit."""
-
     git_username: Optional[str] = None
     """The username or email provided with your Git provider account, depending on which provider you
     are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or
@@ -1197,13 +1346,12 @@ class UpdateCredentials:
 
     personal_access_token: Optional[str] = None
     """The personal access token used to authenticate to the corresponding Git provider. For certain
-    providers, support may exist for other types of scoped access tokens. [Learn more]. The personal
-    access token used to authenticate to the corresponding Git
+    providers, support may exist for other types of scoped access tokens. [Learn more].
     
     [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html"""
 
     def as_dict(self) -> dict:
-        """Serializes the UpdateCredentials into a dictionary suitable for use as a JSON request body."""
+        """Serializes the UpdateCredentialsRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.credential_id is not None: body['credential_id'] = self.credential_id
         if self.git_provider is not None: body['git_provider'] = self.git_provider
@@ -1212,8 +1360,8 @@ def as_dict(self) -> dict:
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateCredentials:
-        """Deserializes the UpdateCredentials from a dictionary."""
+    def from_dict(cls, d: Dict[str, any]) -> UpdateCredentialsRequest:
+        """Deserializes the UpdateCredentialsRequest from a dictionary."""
         return cls(credential_id=d.get('credential_id', None),
                    git_provider=d.get('git_provider', None),
                    git_username=d.get('git_username', None),
@@ -1221,12 +1369,26 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateCredentials:
 
 
 @dataclass
-class UpdateRepo:
+class UpdateCredentialsResponse:
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateCredentialsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateCredentialsResponse:
+        """Deserializes the UpdateCredentialsResponse from a dictionary."""
+        return cls()
+
+
+@dataclass
+class UpdateRepoRequest:
     branch: Optional[str] = None
     """Branch that the local version of the repo is checked out to."""
 
     repo_id: Optional[int] = None
-    """The ID for the corresponding repo to access."""
+    """ID of the Git folder (repo) object in the workspace."""
 
     sparse_checkout: Optional[SparseCheckoutUpdate] = None
     """If specified, update the sparse checkout settings. The update will fail if sparse checkout is
@@ -1238,7 +1400,7 @@ class UpdateRepo:
     branch instead of the detached HEAD."""
 
     def as_dict(self) -> dict:
-        """Serializes the UpdateRepo into a dictionary suitable for use as a JSON request body."""
+        """Serializes the UpdateRepoRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.branch is not None: body['branch'] = self.branch
         if self.repo_id is not None: body['repo_id'] = self.repo_id
@@ -1247,8 +1409,8 @@ def as_dict(self) -> dict:
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateRepo:
-        """Deserializes the UpdateRepo from a dictionary."""
+    def from_dict(cls, d: Dict[str, any]) -> UpdateRepoRequest:
+        """Deserializes the UpdateRepoRequest from a dictionary."""
         return cls(branch=d.get('branch', None),
                    repo_id=d.get('repo_id', None),
                    sparse_checkout=_from_dict(d, 'sparse_checkout', SparseCheckoutUpdate),
@@ -1256,16 +1418,16 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateRepo:
 
 
 @dataclass
-class UpdateResponse:
+class UpdateRepoResponse:
 
     def as_dict(self) -> dict:
-        """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body."""
+        """Serializes the UpdateRepoResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
-        """Deserializes the UpdateResponse from a dictionary."""
+    def from_dict(cls, d: Dict[str, any]) -> UpdateRepoResponse:
+        """Deserializes the UpdateRepoResponse from a dictionary."""
         return cls()
 
 
@@ -1471,9 +1633,9 @@ def create(self,
         existing credentials, or the DELETE endpoint to delete existing credentials.
         
         :param git_provider: str
-          Git provider. This field is case-insensitive. The available Git providers are gitHub,
-          bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-          gitLabEnterpriseEdition and awsCodeCommit.
+          Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+          `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+          `gitLabEnterpriseEdition` and `awsCodeCommit`.
         :param git_username: str (optional)
           The username or email provided with your Git provider account, depending on which provider you are
           using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may
@@ -1482,8 +1644,7 @@ def create(self,
           Access Token authentication documentation to see what is supported.
         :param personal_access_token: str (optional)
           The personal access token used to authenticate to the corresponding Git provider. For certain
-          providers, support may exist for other types of scoped access tokens. [Learn more]. The personal
-          access token used to authenticate to the corresponding Git
+          providers, support may exist for other types of scoped access tokens. [Learn more].
           
           [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
         
@@ -1509,11 +1670,11 @@ def delete(self, credential_id: int):
         
         """
 
-        headers = {}
+        headers = {'Accept': 'application/json', }
 
         self._api.do('DELETE', f'/api/2.0/git-credentials/{credential_id}', headers=headers)
 
-    def get(self, credential_id: int) -> CredentialInfo:
+    def get(self, credential_id: int) -> GetCredentialsResponse:
         """Get a credential entry.
         
         Gets the Git credential with the specified credential ID.
@@ -1521,13 +1682,13 @@ def get(self, credential_id: int) -> CredentialInfo:
         :param credential_id: int
           The ID for the corresponding credential to access.
         
-        :returns: :class:`CredentialInfo`
+        :returns: :class:`GetCredentialsResponse`
         """
 
         headers = {'Accept': 'application/json', }
 
         res = self._api.do('GET', f'/api/2.0/git-credentials/{credential_id}', headers=headers)
-        return CredentialInfo.from_dict(res)
+        return GetCredentialsResponse.from_dict(res)
 
     def list(self) -> Iterator[CredentialInfo]:
         """Get Git credentials.
@@ -1540,13 +1701,13 @@ def list(self) -> Iterator[CredentialInfo]:
         headers = {'Accept': 'application/json', }
 
         json = self._api.do('GET', '/api/2.0/git-credentials', headers=headers)
-        parsed = GetCredentialsResponse.from_dict(json).credentials
+        parsed = ListCredentialsResponse.from_dict(json).credentials
         return parsed if parsed is not None else []
 
     def update(self,
                credential_id: int,
+               git_provider: str,
                *,
-               git_provider: Optional[str] = None,
                git_username: Optional[str] = None,
                personal_access_token: Optional[str] = None):
         """Update a credential.
@@ -1555,10 +1716,10 @@ def update(self,
         
         :param credential_id: int
           The ID for the corresponding credential to access.
-        :param git_provider: str (optional)
-          Git provider. This field is case-insensitive. The available Git providers are gitHub,
-          bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-          gitLabEnterpriseEdition and awsCodeCommit.
+        :param git_provider: str
+          Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+          `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+          `gitLabEnterpriseEdition` and `awsCodeCommit`.
         :param git_username: str (optional)
           The username or email provided with your Git provider account, depending on which provider you are
           using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may
@@ -1567,8 +1728,7 @@ def update(self,
           Access Token authentication documentation to see what is supported.
         :param personal_access_token: str (optional)
           The personal access token used to authenticate to the corresponding Git provider. For certain
-          providers, support may exist for other types of scoped access tokens. [Learn more]. The personal
-          access token used to authenticate to the corresponding Git
+          providers, support may exist for other types of scoped access tokens. [Learn more].
           
           [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
         
@@ -1602,7 +1762,7 @@ def create(self,
                provider: str,
                *,
                path: Optional[str] = None,
-               sparse_checkout: Optional[SparseCheckout] = None) -> RepoInfo:
+               sparse_checkout: Optional[SparseCheckout] = None) -> CreateRepoResponse:
         """Create a repo.
         
         Creates a repo in the workspace and links it to the remote Git repo specified. Note that repos created
@@ -1611,17 +1771,17 @@ def create(self,
         :param url: str
           URL of the Git repository to be linked.
         :param provider: str
-          Git provider. This field is case-insensitive. The available Git providers are gitHub,
-          bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-          gitLabEnterpriseEdition and awsCodeCommit.
+          Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+          `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+          `gitLabEnterpriseEdition` and `awsCodeCommit`.
         :param path: str (optional)
           Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If repo
-          is created in /Repos, path must be in the format /Repos/{folder}/{repo-name}.
+          is created in `/Repos`, path must be in the format `/Repos/{folder}/{repo-name}`.
         :param sparse_checkout: :class:`SparseCheckout` (optional)
           If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable
           sparse checkout after the repo is created.
         
-        :returns: :class:`RepoInfo`
+        :returns: :class:`CreateRepoResponse`
         """
         body = {}
         if path is not None: body['path'] = path
@@ -1631,7 +1791,7 @@ def create(self,
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST', '/api/2.0/repos', body=body, headers=headers)
-        return RepoInfo.from_dict(res)
+        return CreateRepoResponse.from_dict(res)
 
     def delete(self, repo_id: int):
         """Delete a repo.
@@ -1639,30 +1799,30 @@ def delete(self, repo_id: int):
         Deletes the specified repo.
         
         :param repo_id: int
-          The ID for the corresponding repo to access.
+          ID of the Git folder (repo) object in the workspace.
         
         
         """
 
-        headers = {}
+        headers = {'Accept': 'application/json', }
 
         self._api.do('DELETE', f'/api/2.0/repos/{repo_id}', headers=headers)
 
-    def get(self, repo_id: int) -> RepoInfo:
+    def get(self, repo_id: int) -> GetRepoResponse:
         """Get a repo.
         
         Returns the repo with the given repo ID.
         
         :param repo_id: int
-          The ID for the corresponding repo to access.
+          ID of the Git folder (repo) object in the workspace.
         
-        :returns: :class:`RepoInfo`
+        :returns: :class:`GetRepoResponse`
         """
 
         headers = {'Accept': 'application/json', }
 
         res = self._api.do('GET', f'/api/2.0/repos/{repo_id}', headers=headers)
-        return RepoInfo.from_dict(res)
+        return GetRepoResponse.from_dict(res)
 
     def get_permission_levels(self, repo_id: str) -> GetRepoPermissionLevelsResponse:
         """Get repo permission levels.
@@ -1709,8 +1869,9 @@ def list(self,
           Token used to get the next page of results. If not specified, returns the first page of results as
           well as a next page token if there are more results.
         :param path_prefix: str (optional)
-          Filters repos that have paths starting with the given path prefix. If not provided repos from /Repos
-          will be served.
+          Filters repos that have paths starting with the given path prefix. If not provided or when provided
+          an effectively empty prefix (`/` or `/Workspace`) Git folders (repos) from `/Workspace/Repos` will
+          be served.
         
         :returns: Iterator over :class:`RepoInfo`
         """
@@ -1764,7 +1925,7 @@ def update(self,
         branch.
         
         :param repo_id: int
-          The ID for the corresponding repo to access.
+          ID of the Git folder (repo) object in the workspace.
         :param branch: str (optional)
           Branch that the local version of the repo is checked out to.
         :param sparse_checkout: :class:`SparseCheckoutUpdate` (optional)

From 79b096fb414d87ea5c8d4484687544ce63bbe266 Mon Sep 17 00:00:00 2001
From: Tanmay Rustagi <88379306+tanmay-db@users.noreply.github.com>
Date: Thu, 26 Sep 2024 15:42:06 +0200
Subject: [PATCH 047/136] [Release] Release v0.33.0 (#775)

### Internal Changes

* Add DCO guidelines
([#773](https://github.com/databricks/databricks-sdk-py/pull/773)).
* Update SDK to latest OpenAPI spec
([#766](https://github.com/databricks/databricks-sdk-py/pull/766)).


### API Changes:

* Added
[w.disable_legacy_access](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/disable_legacy_access.html)
workspace-level service and
[a.disable_legacy_features](https://databricks-sdk-py.readthedocs.io/en/latest/account/account_settings/disable_legacy_features.html)
account-level service.
* Added
[w.temporary_table_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/temporary_table_credentials.html)
workspace-level service.
* Added `put_ai_gateway()` method for
[w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html)
workspace-level service.
* Added `databricks.sdk.service.apps.ApplicationState`,
`databricks.sdk.service.apps.ApplicationStatus`,
`databricks.sdk.service.apps.ComputeState` and
`databricks.sdk.service.apps.ComputeStatus` dataclasses.
* Added `databricks.sdk.service.catalog.AwsCredentials`,
`databricks.sdk.service.catalog.AzureUserDelegationSas`,
`databricks.sdk.service.catalog.GcpOauthToken`,
`databricks.sdk.service.catalog.GenerateTemporaryTableCredentialRequest`,
`databricks.sdk.service.catalog.GenerateTemporaryTableCredentialResponse`,
`databricks.sdk.service.catalog.R2Credentials` and
`databricks.sdk.service.catalog.TableOperation` dataclasses.
* Added `databricks.sdk.service.serving.AiGatewayConfig`,
`databricks.sdk.service.serving.AiGatewayGuardrailParameters`,
`databricks.sdk.service.serving.AiGatewayGuardrailPiiBehavior`,
`databricks.sdk.service.serving.AiGatewayGuardrailPiiBehaviorBehavior`,
`databricks.sdk.service.serving.AiGatewayGuardrails`,
`databricks.sdk.service.serving.AiGatewayInferenceTableConfig`,
`databricks.sdk.service.serving.AiGatewayRateLimit`,
`databricks.sdk.service.serving.AiGatewayRateLimitKey`,
`databricks.sdk.service.serving.AiGatewayRateLimitRenewalPeriod`,
`databricks.sdk.service.serving.AiGatewayUsageTrackingConfig`,
`databricks.sdk.service.serving.PutAiGatewayRequest` and
`databricks.sdk.service.serving.PutAiGatewayResponse` dataclasses.
* Added `databricks.sdk.service.settings.BooleanMessage`,
`databricks.sdk.service.settings.DeleteDisableLegacyAccessRequest`,
`databricks.sdk.service.settings.DeleteDisableLegacyAccessResponse`,
`databricks.sdk.service.settings.DeleteDisableLegacyFeaturesRequest`,
`databricks.sdk.service.settings.DeleteDisableLegacyFeaturesResponse`,
`databricks.sdk.service.settings.DisableLegacyAccess`,
`databricks.sdk.service.settings.DisableLegacyFeatures`,
`databricks.sdk.service.settings.GetDisableLegacyAccessRequest`,
`databricks.sdk.service.settings.GetDisableLegacyFeaturesRequest`,
`databricks.sdk.service.settings.UpdateDisableLegacyAccessRequest` and
`databricks.sdk.service.settings.UpdateDisableLegacyFeaturesRequest`
dataclasses.
* Added `databricks.sdk.service.workspace.CreateCredentialsRequest`,
`databricks.sdk.service.workspace.CreateRepoRequest`,
`databricks.sdk.service.workspace.CreateRepoResponse`,
`databricks.sdk.service.workspace.DeleteCredentialsRequest`, `any`,
`any`, `databricks.sdk.service.workspace.GetCredentialsRequest`,
`databricks.sdk.service.workspace.GetRepoResponse`,
`databricks.sdk.service.workspace.ListCredentialsResponse`,
`databricks.sdk.service.workspace.UpdateCredentialsRequest`, `any`,
`databricks.sdk.service.workspace.UpdateRepoRequest` and `any`
dataclasses.
* Added `app_status` and `compute_status` fields for
`databricks.sdk.service.apps.App`.
* Added `deployment_id` field for
`databricks.sdk.service.apps.CreateAppDeploymentRequest`.
* Added `external_access_enabled` field for
`databricks.sdk.service.catalog.GetMetastoreSummaryResponse`.
* Added `include_manifest_capabilities` field for
`databricks.sdk.service.catalog.GetTableRequest`.
* Added `include_manifest_capabilities` field for
`databricks.sdk.service.catalog.ListSummariesRequest`.
* Added `include_manifest_capabilities` field for
`databricks.sdk.service.catalog.ListTablesRequest`.
* Added `external_access_enabled` field for
`databricks.sdk.service.catalog.MetastoreInfo`.
* Added `budget_policy_id` and `schema` fields for
`databricks.sdk.service.pipelines.CreatePipeline`.
* Added `budget_policy_id` and `schema` fields for
`databricks.sdk.service.pipelines.EditPipeline`.
* Added `effective_budget_policy_id` field for
`databricks.sdk.service.pipelines.GetPipelineResponse`.
* Added `budget_policy_id` and `schema` fields for
`databricks.sdk.service.pipelines.PipelineSpec`.
* Added `ai_gateway` field for
`databricks.sdk.service.serving.CreateServingEndpoint`.
* Added `ai_gateway` field for
`databricks.sdk.service.serving.ServingEndpoint`.
* Added `ai_gateway` field for
`databricks.sdk.service.serving.ServingEndpointDetailed`.
* Added `workspace_id` field for
`databricks.sdk.service.settings.TokenInfo`.
* Added `credential_id`, `git_provider` and `git_username` fields for
`databricks.sdk.service.workspace.GetCredentialsResponse`.
* Changed `delete()`, `start()` and `stop()` methods for
[w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html)
workspace-level service to return `databricks.sdk.service.apps.App`
dataclass.
* Changed `deploy()` method for
[w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html)
workspace-level service with new required argument order.
* Changed `create()` method for
[w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html)
workspace-level service . New request type is
`databricks.sdk.service.workspace.CreateCredentialsRequest` dataclass.
* Changed `delete()` method for
[w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html)
workspace-level service . New request type is
`databricks.sdk.service.workspace.DeleteCredentialsRequest` dataclass.
* Changed `delete()` method for
[w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html)
workspace-level service to return `any` dataclass.
* Changed `get()` method for
[w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html)
workspace-level service . New request type is
`databricks.sdk.service.workspace.GetCredentialsRequest` dataclass.
* Changed `get()` method for
[w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html)
workspace-level service to return
`databricks.sdk.service.workspace.GetCredentialsResponse` dataclass.
* Changed `list()` method for
[w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html)
workspace-level service to return
`databricks.sdk.service.workspace.ListCredentialsResponse` dataclass.
* Changed `update()` method for
[w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html)
workspace-level service . New request type is
`databricks.sdk.service.workspace.UpdateCredentialsRequest` dataclass.
* Changed `update()` method for
[w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html)
workspace-level service to return `any` dataclass.
* Changed `create()` method for
[w.repos](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/repos.html)
workspace-level service to return
`databricks.sdk.service.workspace.CreateRepoResponse` dataclass.
* Changed `create()` method for
[w.repos](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/repos.html)
workspace-level service . New request type is
`databricks.sdk.service.workspace.CreateRepoRequest` dataclass.
* Changed `delete()` method for
[w.repos](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/repos.html)
workspace-level service to return `any` dataclass.
* Changed `get()` method for
[w.repos](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/repos.html)
workspace-level service to return
`databricks.sdk.service.workspace.GetRepoResponse` dataclass.
* Changed `update()` method for
[w.repos](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/repos.html)
workspace-level service to return `any` dataclass.
* Changed `update()` method for
[w.repos](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/repos.html)
workspace-level service . New request type is
`databricks.sdk.service.workspace.UpdateRepoRequest` dataclass.
* Changed `source_code_path` field for
`databricks.sdk.service.apps.AppDeployment` to no longer be required.
* Changed `source_code_path` field for
`databricks.sdk.service.apps.CreateAppDeploymentRequest` to no longer be
required.
* Changed `return_params` and `routine_dependencies` fields for
`databricks.sdk.service.catalog.CreateFunction` to no longer be
required.
* Changed `credential_id` and `git_provider` fields for
`databricks.sdk.service.workspace.CreateCredentialsResponse` to be
required.
* Changed `credential_id` field for
`databricks.sdk.service.workspace.CredentialInfo` to be required.
* Changed `patterns` field for
`databricks.sdk.service.workspace.SparseCheckout` to
`databricks.sdk.service.workspace.List` dataclass.
* Changed `patterns` field for
`databricks.sdk.service.workspace.SparseCheckoutUpdate` to
`databricks.sdk.service.workspace.List` dataclass.
* Removed `databricks.sdk.service.apps.AppState`,
`databricks.sdk.service.apps.AppStatus`, `any` and `any` dataclasses.
* Removed `databricks.sdk.service.sql.ClientCallContext`,
`databricks.sdk.service.sql.EncodedText`,
`databricks.sdk.service.sql.EncodedTextEncoding`,
`databricks.sdk.service.sql.QuerySource`,
`databricks.sdk.service.sql.QuerySourceDriverInfo`,
`databricks.sdk.service.sql.QuerySourceEntryPoint`,
`databricks.sdk.service.sql.QuerySourceJobManager`,
`databricks.sdk.service.sql.QuerySourceTrigger` and
`databricks.sdk.service.sql.ServerlessChannelInfo` dataclasses.
* Removed `databricks.sdk.service.workspace.CreateCredentials`,
`databricks.sdk.service.workspace.CreateRepo`,
`databricks.sdk.service.workspace.DeleteGitCredentialRequest`,
`databricks.sdk.service.workspace.GetGitCredentialRequest`,
`databricks.sdk.service.workspace.SparseCheckoutPattern`,
`databricks.sdk.service.workspace.UpdateCredentials`,
`databricks.sdk.service.workspace.UpdateRepo` and `any` dataclasses.
 * Removed `status` field for `databricks.sdk.service.apps.App`.
* Removed `query_source` field for
`databricks.sdk.service.sql.QueryInfo`.
* Removed `credentials` field for
`databricks.sdk.service.workspace.GetCredentialsResponse`.

OpenAPI SHA: 248f4ad9668661da9d0bf4a7b0119a2d44fd1e75, Date: 2024-09-25
---
 CHANGELOG.md              | 66 +++++++++++++++++++++++++++++++++++++++
 databricks/sdk/version.py |  2 +-
 2 files changed, 67 insertions(+), 1 deletion(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index e5fd9525c..029a437a7 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,71 @@
 # Version changelog
 
+## [Release] Release v0.33.0
+
+### Internal Changes
+
+ * Add DCO guidelines ([#773](https://github.com/databricks/databricks-sdk-py/pull/773)).
+ * Update SDK to latest OpenAPI spec ([#766](https://github.com/databricks/databricks-sdk-py/pull/766)).
+
+
+### API Changes:
+
+ * Added [w.disable_legacy_access](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/disable_legacy_access.html) workspace-level service and [a.disable_legacy_features](https://databricks-sdk-py.readthedocs.io/en/latest/account/account_settings/disable_legacy_features.html) account-level service.
+ * Added [w.temporary_table_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/temporary_table_credentials.html) workspace-level service.
+ * Added `put_ai_gateway()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service.
+ * Added `databricks.sdk.service.apps.ApplicationState`, `databricks.sdk.service.apps.ApplicationStatus`, `databricks.sdk.service.apps.ComputeState` and `databricks.sdk.service.apps.ComputeStatus` dataclasses.
+ * Added `databricks.sdk.service.catalog.AwsCredentials`, `databricks.sdk.service.catalog.AzureUserDelegationSas`, `databricks.sdk.service.catalog.GcpOauthToken`, `databricks.sdk.service.catalog.GenerateTemporaryTableCredentialRequest`, `databricks.sdk.service.catalog.GenerateTemporaryTableCredentialResponse`, `databricks.sdk.service.catalog.R2Credentials` and `databricks.sdk.service.catalog.TableOperation` dataclasses.
+ * Added `databricks.sdk.service.serving.AiGatewayConfig`, `databricks.sdk.service.serving.AiGatewayGuardrailParameters`, `databricks.sdk.service.serving.AiGatewayGuardrailPiiBehavior`, `databricks.sdk.service.serving.AiGatewayGuardrailPiiBehaviorBehavior`, `databricks.sdk.service.serving.AiGatewayGuardrails`, `databricks.sdk.service.serving.AiGatewayInferenceTableConfig`, `databricks.sdk.service.serving.AiGatewayRateLimit`, `databricks.sdk.service.serving.AiGatewayRateLimitKey`, `databricks.sdk.service.serving.AiGatewayRateLimitRenewalPeriod`, `databricks.sdk.service.serving.AiGatewayUsageTrackingConfig`, `databricks.sdk.service.serving.PutAiGatewayRequest` and `databricks.sdk.service.serving.PutAiGatewayResponse` dataclasses.
+ * Added `databricks.sdk.service.settings.BooleanMessage`, `databricks.sdk.service.settings.DeleteDisableLegacyAccessRequest`, `databricks.sdk.service.settings.DeleteDisableLegacyAccessResponse`, `databricks.sdk.service.settings.DeleteDisableLegacyFeaturesRequest`, `databricks.sdk.service.settings.DeleteDisableLegacyFeaturesResponse`, `databricks.sdk.service.settings.DisableLegacyAccess`, `databricks.sdk.service.settings.DisableLegacyFeatures`, `databricks.sdk.service.settings.GetDisableLegacyAccessRequest`, `databricks.sdk.service.settings.GetDisableLegacyFeaturesRequest`, `databricks.sdk.service.settings.UpdateDisableLegacyAccessRequest` and `databricks.sdk.service.settings.UpdateDisableLegacyFeaturesRequest` dataclasses.
+ * Added `databricks.sdk.service.workspace.CreateCredentialsRequest`, `databricks.sdk.service.workspace.CreateRepoRequest`, `databricks.sdk.service.workspace.CreateRepoResponse`, `databricks.sdk.service.workspace.DeleteCredentialsRequest`, `any`, `any`, `databricks.sdk.service.workspace.GetCredentialsRequest`, `databricks.sdk.service.workspace.GetRepoResponse`, `databricks.sdk.service.workspace.ListCredentialsResponse`, `databricks.sdk.service.workspace.UpdateCredentialsRequest`, `any`, `databricks.sdk.service.workspace.UpdateRepoRequest` and `any` dataclasses.
+ * Added `app_status` and `compute_status` fields for `databricks.sdk.service.apps.App`.
+ * Added `deployment_id` field for `databricks.sdk.service.apps.CreateAppDeploymentRequest`.
+ * Added `external_access_enabled` field for `databricks.sdk.service.catalog.GetMetastoreSummaryResponse`.
+ * Added `include_manifest_capabilities` field for `databricks.sdk.service.catalog.GetTableRequest`.
+ * Added `include_manifest_capabilities` field for `databricks.sdk.service.catalog.ListSummariesRequest`.
+ * Added `include_manifest_capabilities` field for `databricks.sdk.service.catalog.ListTablesRequest`.
+ * Added `external_access_enabled` field for `databricks.sdk.service.catalog.MetastoreInfo`.
+ * Added `budget_policy_id` and `schema` fields for `databricks.sdk.service.pipelines.CreatePipeline`.
+ * Added `budget_policy_id` and `schema` fields for `databricks.sdk.service.pipelines.EditPipeline`.
+ * Added `effective_budget_policy_id` field for `databricks.sdk.service.pipelines.GetPipelineResponse`.
+ * Added `budget_policy_id` and `schema` fields for `databricks.sdk.service.pipelines.PipelineSpec`.
+ * Added `ai_gateway` field for `databricks.sdk.service.serving.CreateServingEndpoint`.
+ * Added `ai_gateway` field for `databricks.sdk.service.serving.ServingEndpoint`.
+ * Added `ai_gateway` field for `databricks.sdk.service.serving.ServingEndpointDetailed`.
+ * Added `workspace_id` field for `databricks.sdk.service.settings.TokenInfo`.
+ * Added `credential_id`, `git_provider` and `git_username` fields for `databricks.sdk.service.workspace.GetCredentialsResponse`.
+ * Changed `delete()`, `start()` and `stop()` methods for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service to return `databricks.sdk.service.apps.App` dataclass.
+ * Changed `deploy()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service with new required argument order.
+ * Changed `create()` method for [w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html) workspace-level service . New request type is `databricks.sdk.service.workspace.CreateCredentialsRequest` dataclass.
+ * Changed `delete()` method for [w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html) workspace-level service . New request type is `databricks.sdk.service.workspace.DeleteCredentialsRequest` dataclass.
+ * Changed `delete()` method for [w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html) workspace-level service to return `any` dataclass.
+ * Changed `get()` method for [w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html) workspace-level service . New request type is `databricks.sdk.service.workspace.GetCredentialsRequest` dataclass.
+ * Changed `get()` method for [w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html) workspace-level service to return `databricks.sdk.service.workspace.GetCredentialsResponse` dataclass.
+ * Changed `list()` method for [w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html) workspace-level service to return `databricks.sdk.service.workspace.ListCredentialsResponse` dataclass.
+ * Changed `update()` method for [w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html) workspace-level service . New request type is `databricks.sdk.service.workspace.UpdateCredentialsRequest` dataclass.
+ * Changed `update()` method for [w.git_credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/git_credentials.html) workspace-level service to return `any` dataclass.
+ * Changed `create()` method for [w.repos](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/repos.html) workspace-level service to return `databricks.sdk.service.workspace.CreateRepoResponse` dataclass.
+ * Changed `create()` method for [w.repos](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/repos.html) workspace-level service . New request type is `databricks.sdk.service.workspace.CreateRepoRequest` dataclass.
+ * Changed `delete()` method for [w.repos](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/repos.html) workspace-level service to return `any` dataclass.
+ * Changed `get()` method for [w.repos](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/repos.html) workspace-level service to return `databricks.sdk.service.workspace.GetRepoResponse` dataclass.
+ * Changed `update()` method for [w.repos](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/repos.html) workspace-level service to return `any` dataclass.
+ * Changed `update()` method for [w.repos](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/repos.html) workspace-level service . New request type is `databricks.sdk.service.workspace.UpdateRepoRequest` dataclass.
+ * Changed `source_code_path` field for `databricks.sdk.service.apps.AppDeployment` to no longer be required.
+ * Changed `source_code_path` field for `databricks.sdk.service.apps.CreateAppDeploymentRequest` to no longer be required.
+ * Changed `return_params` and `routine_dependencies` fields for `databricks.sdk.service.catalog.CreateFunction` to no longer be required.
+ * Changed `credential_id` and `git_provider` fields for `databricks.sdk.service.workspace.CreateCredentialsResponse` to be required.
+ * Changed `credential_id` field for `databricks.sdk.service.workspace.CredentialInfo` to be required.
+ * Changed `patterns` field for `databricks.sdk.service.workspace.SparseCheckout` to `databricks.sdk.service.workspace.List` dataclass.
+ * Changed `patterns` field for `databricks.sdk.service.workspace.SparseCheckoutUpdate` to `databricks.sdk.service.workspace.List` dataclass.
+ * Removed `databricks.sdk.service.apps.AppState`, `databricks.sdk.service.apps.AppStatus`, `any` and `any` dataclasses.
+ * Removed `databricks.sdk.service.sql.ClientCallContext`, `databricks.sdk.service.sql.EncodedText`, `databricks.sdk.service.sql.EncodedTextEncoding`, `databricks.sdk.service.sql.QuerySource`, `databricks.sdk.service.sql.QuerySourceDriverInfo`, `databricks.sdk.service.sql.QuerySourceEntryPoint`, `databricks.sdk.service.sql.QuerySourceJobManager`, `databricks.sdk.service.sql.QuerySourceTrigger` and `databricks.sdk.service.sql.ServerlessChannelInfo` dataclasses.
+ * Removed `databricks.sdk.service.workspace.CreateCredentials`, `databricks.sdk.service.workspace.CreateRepo`, `databricks.sdk.service.workspace.DeleteGitCredentialRequest`, `databricks.sdk.service.workspace.GetGitCredentialRequest`, `databricks.sdk.service.workspace.SparseCheckoutPattern`, `databricks.sdk.service.workspace.UpdateCredentials`, `databricks.sdk.service.workspace.UpdateRepo` and `any` dataclasses.
+ * Removed `status` field for `databricks.sdk.service.apps.App`.
+ * Removed `query_source` field for `databricks.sdk.service.sql.QueryInfo`.
+ * Removed `credentials` field for `databricks.sdk.service.workspace.GetCredentialsResponse`.
+
+OpenAPI SHA: 248f4ad9668661da9d0bf4a7b0119a2d44fd1e75, Date: 2024-09-25
+
 ## [Release] Release v0.32.3
 
 ### New Features and Improvements
diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py
index fb0ef16c1..e3d0b7be1 100644
--- a/databricks/sdk/version.py
+++ b/databricks/sdk/version.py
@@ -1 +1 @@
-__version__ = '0.32.3'
+__version__ = '0.33.0'

From a1df7a7a4334f6e9d0919e9c3bc3d26b733c3160 Mon Sep 17 00:00:00 2001
From: Shicheng Zhou <142252423+shichengzhou-db@users.noreply.github.com>
Date: Mon, 7 Oct 2024 04:31:03 -0700
Subject: [PATCH 048/136] [Internal] revert Support Models in `dbutils.fs`
 operations (#750) (#778)

This reverts commit 3162545c476a05e8e8c993b9e46038ddeea953a3.
Verified that /Models download still work correctly.

## Changes


## Tests


- [ ] `make test` run locally
- [ ] `make fmt` applied
- [ ] relevant integration tests applied
---
 databricks/sdk/mixins/files.py | 18 +++++++++---------
 tests/test_dbfs_mixins.py      | 13 +++++--------
 2 files changed, 14 insertions(+), 17 deletions(-)

diff --git a/databricks/sdk/mixins/files.py b/databricks/sdk/mixins/files.py
index 47c11747d..1e109a1a7 100644
--- a/databricks/sdk/mixins/files.py
+++ b/databricks/sdk/mixins/files.py
@@ -167,7 +167,7 @@ def __repr__(self) -> str:
         return f"<_DbfsIO {self._path} {'read' if self.readable() else 'write'}=True>"
 
 
-class _FilesIO(BinaryIO):
+class _VolumesIO(BinaryIO):
 
     def __init__(self, api: files.FilesAPI, path: str, *, read: bool, write: bool, overwrite: bool):
         self._buffer = []
@@ -262,7 +262,7 @@ def __exit__(self, __t, __value, __traceback):
         self.close()
 
     def __repr__(self) -> str:
-        return f"<_FilesIO {self._path} {'read' if self.readable() else 'write'}=True>"
+        return f"<_VolumesIO {self._path} {'read' if self.readable() else 'write'}=True>"
 
 
 class _Path(ABC):
@@ -398,7 +398,7 @@ def __repr__(self) -> str:
         return f'<_LocalPath {self._path}>'
 
 
-class _FilesPath(_Path):
+class _VolumesPath(_Path):
 
     def __init__(self, api: files.FilesAPI, src: Union[str, pathlib.Path]):
         self._path = pathlib.PurePosixPath(str(src).replace('dbfs:', '').replace('file:', ''))
@@ -411,7 +411,7 @@ def _is_dbfs(self) -> bool:
         return False
 
     def child(self, path: str) -> Self:
-        return _FilesPath(self._api, str(self._path / path))
+        return _VolumesPath(self._api, str(self._path / path))
 
     def _is_dir(self) -> bool:
         try:
@@ -431,7 +431,7 @@ def exists(self) -> bool:
             return self.is_dir
 
     def open(self, *, read=False, write=False, overwrite=False) -> BinaryIO:
-        return _FilesIO(self._api, self.as_string, read=read, write=write, overwrite=overwrite)
+        return _VolumesIO(self._api, self.as_string, read=read, write=write, overwrite=overwrite)
 
     def list(self, *, recursive=False) -> Generator[files.FileInfo, None, None]:
         if not self.is_dir:
@@ -458,13 +458,13 @@ def list(self, *, recursive=False) -> Generator[files.FileInfo, None, None]:
     def delete(self, *, recursive=False):
         if self.is_dir:
             for entry in self.list(recursive=False):
-                _FilesPath(self._api, entry.path).delete(recursive=True)
+                _VolumesPath(self._api, entry.path).delete(recursive=True)
             self._api.delete_directory(self.as_string)
         else:
             self._api.delete(self.as_string)
 
     def __repr__(self) -> str:
-        return f'<_FilesPath {self._path}>'
+        return f'<_VolumesPath {self._path}>'
 
 
 class _DbfsPath(_Path):
@@ -589,8 +589,8 @@ def _path(self, src):
                 'UC Volumes paths, not external locations or DBFS mount points.')
         if src.scheme == 'file':
             return _LocalPath(src.geturl())
-        if src.path.startswith(('/Volumes', '/Models')):
-            return _FilesPath(self._files_api, src.geturl())
+        if src.path.startswith('/Volumes'):
+            return _VolumesPath(self._files_api, src.geturl())
         return _DbfsPath(self._dbfs_api, src.geturl())
 
     def copy(self, src: str, dst: str, *, recursive=False, overwrite=False):
diff --git a/tests/test_dbfs_mixins.py b/tests/test_dbfs_mixins.py
index ce86a2a80..6bbaca7a2 100644
--- a/tests/test_dbfs_mixins.py
+++ b/tests/test_dbfs_mixins.py
@@ -1,8 +1,8 @@
 import pytest
 
 from databricks.sdk.errors import NotFound
-from databricks.sdk.mixins.files import (DbfsExt, _DbfsPath, _FilesPath,
-                                         _LocalPath)
+from databricks.sdk.mixins.files import (DbfsExt, _DbfsPath, _LocalPath,
+                                         _VolumesPath)
 
 
 def test_moving_dbfs_file_to_local_dir(config, tmp_path, mocker):
@@ -55,14 +55,11 @@ def test_moving_local_dir_to_dbfs(config, tmp_path, mocker):
 
 
 @pytest.mark.parametrize('path,expected_type', [('/path/to/file', _DbfsPath),
-                                                ('/Volumes/path/to/file', _FilesPath),
-                                                ('/Models/path/to/file', _FilesPath),
+                                                ('/Volumes/path/to/file', _VolumesPath),
                                                 ('dbfs:/path/to/file', _DbfsPath),
-                                                ('dbfs:/Volumes/path/to/file', _FilesPath),
-                                                ('dbfs:/Models/path/to/file', _FilesPath),
+                                                ('dbfs:/Volumes/path/to/file', _VolumesPath),
                                                 ('file:/path/to/file', _LocalPath),
-                                                ('file:/Volumes/path/to/file', _LocalPath),
-                                                ('file:/Models/path/to/file', _LocalPath), ])
+                                                ('file:/Volumes/path/to/file', _LocalPath), ])
 def test_fs_path(config, path, expected_type):
     dbfs_ext = DbfsExt(config)
     assert isinstance(dbfs_ext._path(path), expected_type)

From b8c102d982b1e819f874af14214e12d017edd60d Mon Sep 17 00:00:00 2001
From: Parth Bansal 
Date: Mon, 7 Oct 2024 13:50:38 +0200
Subject: [PATCH 049/136] [Fix] Fix Model Serving Test (#781)

## Changes

Fix Model Serving Tests.
- Added preffered auth_type so that test will not use other auth_types
first.
- Patch function that reads `.databrickscfg`.
- Unsetting already existing environment variables.

## Tests


- [x] `make test` run locally
- [x] `make fmt` applied
- [ ] relevant integration tests applied
---
 tests/test_model_serving_auth.py | 32 ++++++++++++++++++++------------
 1 file changed, 20 insertions(+), 12 deletions(-)

diff --git a/tests/test_model_serving_auth.py b/tests/test_model_serving_auth.py
index 0ae211303..092a3bf16 100644
--- a/tests/test_model_serving_auth.py
+++ b/tests/test_model_serving_auth.py
@@ -12,24 +12,31 @@
     "to configure credentials for your preferred authentication method"
 
 
-@pytest.mark.parametrize(
-    "env_values, oauth_file_name",
-    [([('IS_IN_DB_MODEL_SERVING_ENV', 'true'),
-       ('DB_MODEL_SERVING_HOST_URL', 'x')], "tests/testdata/model-serving-test-token"),
-     ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true'),
-       ('DB_MODEL_SERVING_HOST_URL', 'x')], "tests/testdata/model-serving-test-token"),
-     ([('IS_IN_DB_MODEL_SERVING_ENV', 'true'),
-       ('DATABRICKS_MODEL_SERVING_HOST_URL', 'x')], "tests/testdata/model-serving-test-token"),
-     ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true'),
-       ('DATABRICKS_MODEL_SERVING_HOST_URL', 'x')], "tests/testdata/model-serving-test-token"), ])
-def test_model_serving_auth(env_values, oauth_file_name, monkeypatch):
+@pytest.mark.parametrize("env_values, del_env_values, oauth_file_name",
+                         [([
+                             ('IS_IN_DB_MODEL_SERVING_ENV', 'true'), ('DB_MODEL_SERVING_HOST_URL', 'x')
+                         ], ['DATABRICKS_MODEL_SERVING_HOST_URL'], "tests/testdata/model-serving-test-token"),
+                          ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true'),
+                            ('DB_MODEL_SERVING_HOST_URL', 'x')], ['DATABRICKS_MODEL_SERVING_HOST_URL'],
+                           "tests/testdata/model-serving-test-token"),
+                          ([('IS_IN_DB_MODEL_SERVING_ENV', 'true'), ('DATABRICKS_MODEL_SERVING_HOST_URL', 'x')
+                            ], ['DB_MODEL_SERVING_HOST_URL'], "tests/testdata/model-serving-test-token"),
+                          ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true'),
+                            ('DATABRICKS_MODEL_SERVING_HOST_URL', 'x')
+                            ], ['DB_MODEL_SERVING_HOST_URL'], "tests/testdata/model-serving-test-token"), ])
+def test_model_serving_auth(env_values, del_env_values, oauth_file_name, monkeypatch, mocker):
     ## In mlflow we check for these two environment variables to return the correct config
     for (env_name, env_value) in env_values:
         monkeypatch.setenv(env_name, env_value)
+
+    for (env_name) in del_env_values:
+        monkeypatch.delenv(env_name, raising=False)
+
     # patch mlflow to read the file from the test directory
     monkeypatch.setattr(
         "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH",
         oauth_file_name)
+    mocker.patch('databricks.sdk.config.Config._known_file_config_loader')
 
     cfg = Config()
 
@@ -58,7 +65,7 @@ def test_model_serving_auth_errors(env_values, oauth_file_name, monkeypatch):
     Config()
 
 
-def test_model_serving_auth_refresh(monkeypatch):
+def test_model_serving_auth_refresh(monkeypatch, mocker):
     ## In mlflow we check for these two environment variables to return the correct config
     monkeypatch.setenv('IS_IN_DB_MODEL_SERVING_ENV', 'true')
     monkeypatch.setenv('DB_MODEL_SERVING_HOST_URL', 'x')
@@ -67,6 +74,7 @@ def test_model_serving_auth_refresh(monkeypatch):
     monkeypatch.setattr(
         "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH",
         "tests/testdata/model-serving-test-token")
+    mocker.patch('databricks.sdk.config.Config._known_file_config_loader')
 
     cfg = Config()
     assert cfg.auth_type == 'model-serving'

From 0e71082df04ac689bf1431f1b2ef4ef7c94450bd Mon Sep 17 00:00:00 2001
From: Miles Yucht 
Date: Mon, 7 Oct 2024 08:37:03 -0400
Subject: [PATCH 050/136] [Internal] Refactor ApiClient into `_BaseClient` and
 `ApiClient` (#785)

## Changes
`ApiClient` is also coupled to the `Config` object, which means that it
can't be used in situations where there is no config. For example, when
fetching OIDC endpoints, the user may not have a complete `Config`
instance yet. However, failures when requesting from those endpoints
should still be retried according to the SDK's retry policy.

To address this, I've split the ApiClient into `_BaseClient` and
`ApiClient`. `_BaseClient` is the core implementation of the client
without any dependency on the `Config`. This is similar to what @rauchy
did in the Java SDK to cut the dependency between the `ApiClient` and
`DatabricksConfig`. The `_BaseClient` can then be used when fetching
OIDC endpoint information.

This will be used in
https://github.com/databricks/databricks-sdk-py/pull/784 to support
retrying OAuth OIDC endpoint fetches.

## Tests


- [ ] `make test` run locally
- [ ] `make fmt` applied
- [ ] relevant integration tests applied
---
 databricks/sdk/_base_client.py | 323 +++++++++++++++++++++++++++++++++
 databricks/sdk/core.py         | 317 +++-----------------------------
 tests/fixture_server.py        |  31 ++++
 tests/test_base_client.py      | 278 ++++++++++++++++++++++++++++
 tests/test_core.py             | 298 ++----------------------------
 5 files changed, 671 insertions(+), 576 deletions(-)
 create mode 100644 databricks/sdk/_base_client.py
 create mode 100644 tests/fixture_server.py
 create mode 100644 tests/test_base_client.py

diff --git a/databricks/sdk/_base_client.py b/databricks/sdk/_base_client.py
new file mode 100644
index 000000000..62c2974ec
--- /dev/null
+++ b/databricks/sdk/_base_client.py
@@ -0,0 +1,323 @@
+import logging
+from datetime import timedelta
+from types import TracebackType
+from typing import (Any, BinaryIO, Callable, Dict, Iterable, Iterator, List,
+                    Optional, Type, Union)
+
+import requests
+import requests.adapters
+
+from . import useragent
+from .casing import Casing
+from .clock import Clock, RealClock
+from .errors import DatabricksError, _ErrorCustomizer, _Parser
+from .logger import RoundTrip
+from .retries import retried
+
+logger = logging.getLogger('databricks.sdk')
+
+
+class _BaseClient:
+
+    def __init__(self,
+                 debug_truncate_bytes: int = None,
+                 retry_timeout_seconds: int = None,
+                 user_agent_base: str = None,
+                 header_factory: Callable[[], dict] = None,
+                 max_connection_pools: int = None,
+                 max_connections_per_pool: int = None,
+                 pool_block: bool = True,
+                 http_timeout_seconds: float = None,
+                 extra_error_customizers: List[_ErrorCustomizer] = None,
+                 debug_headers: bool = False,
+                 clock: Clock = None):
+        """
+        :param debug_truncate_bytes:
+        :param retry_timeout_seconds:
+        :param user_agent_base:
+        :param header_factory: A function that returns a dictionary of headers to include in the request.
+        :param max_connection_pools: Number of urllib3 connection pools to cache before discarding the least
+            recently used pool. Python requests default value is 10.
+        :param max_connections_per_pool: The maximum number of connections to save in the pool. Improves performance
+            in multithreaded situations. For now, we're setting it to the same value as connection_pool_size.
+        :param pool_block: If pool_block is False, then more connections will are created, but not saved after the
+            first use. Blocks when no free connections are available. urllib3 ensures that no more than
+            pool_maxsize connections are used at a time. Prevents platform from flooding. By default, requests library
+            doesn't block.
+        :param http_timeout_seconds:
+        :param extra_error_customizers:
+        :param debug_headers: Whether to include debug headers in the request log.
+        :param clock: Clock object to use for time-related operations.
+        """
+
+        self._debug_truncate_bytes = debug_truncate_bytes or 96
+        self._debug_headers = debug_headers
+        self._retry_timeout_seconds = retry_timeout_seconds or 300
+        self._user_agent_base = user_agent_base or useragent.to_string()
+        self._header_factory = header_factory
+        self._clock = clock or RealClock()
+        self._session = requests.Session()
+        self._session.auth = self._authenticate
+
+        # We don't use `max_retries` from HTTPAdapter to align with a more production-ready
+        # retry strategy established in the Databricks SDK for Go. See _is_retryable and
+        # @retried for more details.
+        http_adapter = requests.adapters.HTTPAdapter(pool_connections=max_connections_per_pool or 20,
+                                                     pool_maxsize=max_connection_pools or 20,
+                                                     pool_block=pool_block)
+        self._session.mount("https://", http_adapter)
+
+        # Default to 60 seconds
+        self._http_timeout_seconds = http_timeout_seconds or 60
+
+        self._error_parser = _Parser(extra_error_customizers=extra_error_customizers)
+
+    def _authenticate(self, r: requests.PreparedRequest) -> requests.PreparedRequest:
+        if self._header_factory:
+            headers = self._header_factory()
+            for k, v in headers.items():
+                r.headers[k] = v
+        return r
+
+    @staticmethod
+    def _fix_query_string(query: Optional[dict] = None) -> Optional[dict]:
+        # Convert True -> "true" for Databricks APIs to understand booleans.
+        # See: https://github.com/databricks/databricks-sdk-py/issues/142
+        if query is None:
+            return None
+        with_fixed_bools = {k: v if type(v) != bool else ('true' if v else 'false') for k, v in query.items()}
+
+        # Query parameters may be nested, e.g.
+        # {'filter_by': {'user_ids': [123, 456]}}
+        # The HTTP-compatible representation of this is
+        # filter_by.user_ids=123&filter_by.user_ids=456
+        # To achieve this, we convert the above dictionary to
+        # {'filter_by.user_ids': [123, 456]}
+        # See the following for more information:
+        # https://cloud.google.com/endpoints/docs/grpc-service-config/reference/rpc/google.api#google.api.HttpRule
+        def flatten_dict(d: Dict[str, Any]) -> Dict[str, Any]:
+            for k1, v1 in d.items():
+                if isinstance(v1, dict):
+                    v1 = dict(flatten_dict(v1))
+                    for k2, v2 in v1.items():
+                        yield f"{k1}.{k2}", v2
+                else:
+                    yield k1, v1
+
+        flattened = dict(flatten_dict(with_fixed_bools))
+        return flattened
+
+    def do(self,
+           method: str,
+           url: str,
+           query: dict = None,
+           headers: dict = None,
+           body: dict = None,
+           raw: bool = False,
+           files=None,
+           data=None,
+           auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None,
+           response_headers: List[str] = None) -> Union[dict, list, BinaryIO]:
+        if headers is None:
+            headers = {}
+        headers['User-Agent'] = self._user_agent_base
+        retryable = retried(timeout=timedelta(seconds=self._retry_timeout_seconds),
+                            is_retryable=self._is_retryable,
+                            clock=self._clock)
+        response = retryable(self._perform)(method,
+                                            url,
+                                            query=query,
+                                            headers=headers,
+                                            body=body,
+                                            raw=raw,
+                                            files=files,
+                                            data=data,
+                                            auth=auth)
+
+        resp = dict()
+        for header in response_headers if response_headers else []:
+            resp[header] = response.headers.get(Casing.to_header_case(header))
+        if raw:
+            resp["contents"] = _StreamingResponse(response)
+            return resp
+        if not len(response.content):
+            return resp
+
+        json_response = response.json()
+        if json_response is None:
+            return resp
+
+        if isinstance(json_response, list):
+            return json_response
+
+        return {**resp, **json_response}
+
+    @staticmethod
+    def _is_retryable(err: BaseException) -> Optional[str]:
+        # this method is Databricks-specific port of urllib3 retries
+        # (see https://github.com/urllib3/urllib3/blob/main/src/urllib3/util/retry.py)
+        # and Databricks SDK for Go retries
+        # (see https://github.com/databricks/databricks-sdk-go/blob/main/apierr/errors.go)
+        from urllib3.exceptions import ProxyError
+        if isinstance(err, ProxyError):
+            err = err.original_error
+        if isinstance(err, requests.ConnectionError):
+            # corresponds to `connection reset by peer` and `connection refused` errors from Go,
+            # which are generally related to the temporary glitches in the networking stack,
+            # also caused by endpoint protection software, like ZScaler, to drop connections while
+            # not yet authenticated.
+            #
+            # return a simple string for debug log readability, as `raise TimeoutError(...) from err`
+            # will bubble up the original exception in case we reach max retries.
+            return f'cannot connect'
+        if isinstance(err, requests.Timeout):
+            # corresponds to `TLS handshake timeout` and `i/o timeout` in Go.
+            #
+            # return a simple string for debug log readability, as `raise TimeoutError(...) from err`
+            # will bubble up the original exception in case we reach max retries.
+            return f'timeout'
+        if isinstance(err, DatabricksError):
+            message = str(err)
+            transient_error_string_matches = [
+                "com.databricks.backend.manager.util.UnknownWorkerEnvironmentException",
+                "does not have any associated worker environments", "There is no worker environment with id",
+                "Unknown worker environment", "ClusterNotReadyException", "Unexpected error",
+                "Please try again later or try a faster operation.",
+                "RPC token bucket limit has been exceeded",
+            ]
+            for substring in transient_error_string_matches:
+                if substring not in message:
+                    continue
+                return f'matched {substring}'
+        return None
+
+    def _perform(self,
+                 method: str,
+                 url: str,
+                 query: dict = None,
+                 headers: dict = None,
+                 body: dict = None,
+                 raw: bool = False,
+                 files=None,
+                 data=None,
+                 auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None):
+        response = self._session.request(method,
+                                         url,
+                                         params=self._fix_query_string(query),
+                                         json=body,
+                                         headers=headers,
+                                         files=files,
+                                         data=data,
+                                         auth=auth,
+                                         stream=raw,
+                                         timeout=self._http_timeout_seconds)
+        self._record_request_log(response, raw=raw or data is not None or files is not None)
+        error = self._error_parser.get_api_error(response)
+        if error is not None:
+            raise error from None
+        return response
+
+    def _record_request_log(self, response: requests.Response, raw: bool = False) -> None:
+        if not logger.isEnabledFor(logging.DEBUG):
+            return
+        logger.debug(RoundTrip(response, self._debug_headers, self._debug_truncate_bytes, raw).generate())
+
+
+class _StreamingResponse(BinaryIO):
+    _response: requests.Response
+    _buffer: bytes
+    _content: Union[Iterator[bytes], None]
+    _chunk_size: Union[int, None]
+    _closed: bool = False
+
+    def fileno(self) -> int:
+        pass
+
+    def flush(self) -> int:
+        pass
+
+    def __init__(self, response: requests.Response, chunk_size: Union[int, None] = None):
+        self._response = response
+        self._buffer = b''
+        self._content = None
+        self._chunk_size = chunk_size
+
+    def _open(self) -> None:
+        if self._closed:
+            raise ValueError("I/O operation on closed file")
+        if not self._content:
+            self._content = self._response.iter_content(chunk_size=self._chunk_size)
+
+    def __enter__(self) -> BinaryIO:
+        self._open()
+        return self
+
+    def set_chunk_size(self, chunk_size: Union[int, None]) -> None:
+        self._chunk_size = chunk_size
+
+    def close(self) -> None:
+        self._response.close()
+        self._closed = True
+
+    def isatty(self) -> bool:
+        return False
+
+    def read(self, n: int = -1) -> bytes:
+        self._open()
+        read_everything = n < 0
+        remaining_bytes = n
+        res = b''
+        while remaining_bytes > 0 or read_everything:
+            if len(self._buffer) == 0:
+                try:
+                    self._buffer = next(self._content)
+                except StopIteration:
+                    break
+            bytes_available = len(self._buffer)
+            to_read = bytes_available if read_everything else min(remaining_bytes, bytes_available)
+            res += self._buffer[:to_read]
+            self._buffer = self._buffer[to_read:]
+            remaining_bytes -= to_read
+        return res
+
+    def readable(self) -> bool:
+        return self._content is not None
+
+    def readline(self, __limit: int = ...) -> bytes:
+        raise NotImplementedError()
+
+    def readlines(self, __hint: int = ...) -> List[bytes]:
+        raise NotImplementedError()
+
+    def seek(self, __offset: int, __whence: int = ...) -> int:
+        raise NotImplementedError()
+
+    def seekable(self) -> bool:
+        return False
+
+    def tell(self) -> int:
+        raise NotImplementedError()
+
+    def truncate(self, __size: Union[int, None] = ...) -> int:
+        raise NotImplementedError()
+
+    def writable(self) -> bool:
+        return False
+
+    def write(self, s: Union[bytes, bytearray]) -> int:
+        raise NotImplementedError()
+
+    def writelines(self, lines: Iterable[bytes]) -> None:
+        raise NotImplementedError()
+
+    def __next__(self) -> bytes:
+        return self.read(1)
+
+    def __iter__(self) -> Iterator[bytes]:
+        return self._content
+
+    def __exit__(self, t: Union[Type[BaseException], None], value: Union[BaseException, None],
+                 traceback: Union[TracebackType, None]) -> None:
+        self._content = None
+        self._buffer = b''
+        self.close()
diff --git a/databricks/sdk/core.py b/databricks/sdk/core.py
index 77e8c9aac..eab22cd71 100644
--- a/databricks/sdk/core.py
+++ b/databricks/sdk/core.py
@@ -1,19 +1,13 @@
 import re
-from datetime import timedelta
-from types import TracebackType
-from typing import Any, BinaryIO, Iterator, Type
+from typing import BinaryIO
 from urllib.parse import urlencode
 
-from requests.adapters import HTTPAdapter
-
-from .casing import Casing
+from ._base_client import _BaseClient
 from .config import *
 # To preserve backwards compatibility (as these definitions were previously in this module)
 from .credentials_provider import *
-from .errors import DatabricksError, _ErrorCustomizer, _Parser
-from .logger import RoundTrip
+from .errors import DatabricksError, _ErrorCustomizer
 from .oauth import retrieve_token
-from .retries import retried
 
 __all__ = ['Config', 'DatabricksError']
 
@@ -25,53 +19,19 @@
 
 
 class ApiClient:
-    _cfg: Config
-    _RETRY_AFTER_DEFAULT: int = 1
-
-    def __init__(self, cfg: Config = None):
-
-        if cfg is None:
-            cfg = Config()
 
+    def __init__(self, cfg: Config):
         self._cfg = cfg
-        # See https://github.com/databricks/databricks-sdk-go/blob/main/client/client.go#L34-L35
-        self._debug_truncate_bytes = cfg.debug_truncate_bytes if cfg.debug_truncate_bytes else 96
-        self._retry_timeout_seconds = cfg.retry_timeout_seconds if cfg.retry_timeout_seconds else 300
-        self._user_agent_base = cfg.user_agent
-        self._session = requests.Session()
-        self._session.auth = self._authenticate
-
-        # Number of urllib3 connection pools to cache before discarding the least
-        # recently used pool. Python requests default value is 10.
-        pool_connections = cfg.max_connection_pools
-        if pool_connections is None:
-            pool_connections = 20
-
-        # The maximum number of connections to save in the pool. Improves performance
-        # in multithreaded situations. For now, we're setting it to the same value
-        # as connection_pool_size.
-        pool_maxsize = cfg.max_connections_per_pool
-        if cfg.max_connections_per_pool is None:
-            pool_maxsize = pool_connections
-
-        # If pool_block is False, then more connections will are created,
-        # but not saved after the first use. Blocks when no free connections are available.
-        # urllib3 ensures that no more than pool_maxsize connections are used at a time.
-        # Prevents platform from flooding. By default, requests library doesn't block.
-        pool_block = True
-
-        # We don't use `max_retries` from HTTPAdapter to align with a more production-ready
-        # retry strategy established in the Databricks SDK for Go. See _is_retryable and
-        # @retried for more details.
-        http_adapter = HTTPAdapter(pool_connections=pool_connections,
-                                   pool_maxsize=pool_maxsize,
-                                   pool_block=pool_block)
-        self._session.mount("https://", http_adapter)
-
-        # Default to 60 seconds
-        self._http_timeout_seconds = cfg.http_timeout_seconds if cfg.http_timeout_seconds else 60
-
-        self._error_parser = _Parser(extra_error_customizers=[_AddDebugErrorCustomizer(cfg)])
+        self._api_client = _BaseClient(debug_truncate_bytes=cfg.debug_truncate_bytes,
+                                       retry_timeout_seconds=cfg.retry_timeout_seconds,
+                                       user_agent_base=cfg.user_agent,
+                                       header_factory=cfg.authenticate,
+                                       max_connection_pools=cfg.max_connection_pools,
+                                       max_connections_per_pool=cfg.max_connections_per_pool,
+                                       pool_block=True,
+                                       http_timeout_seconds=cfg.http_timeout_seconds,
+                                       extra_error_customizers=[_AddDebugErrorCustomizer(cfg)],
+                                       clock=cfg.clock)
 
     @property
     def account_id(self) -> str:
@@ -81,40 +41,6 @@ def account_id(self) -> str:
     def is_account_client(self) -> bool:
         return self._cfg.is_account_client
 
-    def _authenticate(self, r: requests.PreparedRequest) -> requests.PreparedRequest:
-        headers = self._cfg.authenticate()
-        for k, v in headers.items():
-            r.headers[k] = v
-        return r
-
-    @staticmethod
-    def _fix_query_string(query: Optional[dict] = None) -> Optional[dict]:
-        # Convert True -> "true" for Databricks APIs to understand booleans.
-        # See: https://github.com/databricks/databricks-sdk-py/issues/142
-        if query is None:
-            return None
-        with_fixed_bools = {k: v if type(v) != bool else ('true' if v else 'false') for k, v in query.items()}
-
-        # Query parameters may be nested, e.g.
-        # {'filter_by': {'user_ids': [123, 456]}}
-        # The HTTP-compatible representation of this is
-        # filter_by.user_ids=123&filter_by.user_ids=456
-        # To achieve this, we convert the above dictionary to
-        # {'filter_by.user_ids': [123, 456]}
-        # See the following for more information:
-        # https://cloud.google.com/endpoints/docs/grpc-service-config/reference/rpc/google.api#google.api.HttpRule
-        def flatten_dict(d: Dict[str, Any]) -> Dict[str, Any]:
-            for k1, v1 in d.items():
-                if isinstance(v1, dict):
-                    v1 = dict(flatten_dict(v1))
-                    for k2, v2 in v1.items():
-                        yield f"{k1}.{k2}", v2
-                else:
-                    yield k1, v1
-
-        flattened = dict(flatten_dict(with_fixed_bools))
-        return flattened
-
     def get_oauth_token(self, auth_details: str) -> Token:
         if not self._cfg.auth_type:
             self._cfg.authenticate()
@@ -142,115 +68,22 @@ def do(self,
            files=None,
            data=None,
            auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None,
-           response_headers: List[str] = None) -> Union[dict, BinaryIO]:
-        if headers is None:
-            headers = {}
+           response_headers: List[str] = None) -> Union[dict, list, BinaryIO]:
         if url is None:
             # Remove extra `/` from path for Files API
             # Once we've fixed the OpenAPI spec, we can remove this
             path = re.sub('^/api/2.0/fs/files//', '/api/2.0/fs/files/', path)
             url = f"{self._cfg.host}{path}"
-        headers['User-Agent'] = self._user_agent_base
-        retryable = retried(timeout=timedelta(seconds=self._retry_timeout_seconds),
-                            is_retryable=self._is_retryable,
-                            clock=self._cfg.clock)
-        response = retryable(self._perform)(method,
-                                            url,
-                                            query=query,
-                                            headers=headers,
-                                            body=body,
-                                            raw=raw,
-                                            files=files,
-                                            data=data,
-                                            auth=auth)
-
-        resp = dict()
-        for header in response_headers if response_headers else []:
-            resp[header] = response.headers.get(Casing.to_header_case(header))
-        if raw:
-            resp["contents"] = StreamingResponse(response)
-            return resp
-        if not len(response.content):
-            return resp
-
-        jsonResponse = response.json()
-        if jsonResponse is None:
-            return resp
-
-        if isinstance(jsonResponse, list):
-            return jsonResponse
-
-        return {**resp, **jsonResponse}
-
-    @staticmethod
-    def _is_retryable(err: BaseException) -> Optional[str]:
-        # this method is Databricks-specific port of urllib3 retries
-        # (see https://github.com/urllib3/urllib3/blob/main/src/urllib3/util/retry.py)
-        # and Databricks SDK for Go retries
-        # (see https://github.com/databricks/databricks-sdk-go/blob/main/apierr/errors.go)
-        from urllib3.exceptions import ProxyError
-        if isinstance(err, ProxyError):
-            err = err.original_error
-        if isinstance(err, requests.ConnectionError):
-            # corresponds to `connection reset by peer` and `connection refused` errors from Go,
-            # which are generally related to the temporary glitches in the networking stack,
-            # also caused by endpoint protection software, like ZScaler, to drop connections while
-            # not yet authenticated.
-            #
-            # return a simple string for debug log readability, as `raise TimeoutError(...) from err`
-            # will bubble up the original exception in case we reach max retries.
-            return f'cannot connect'
-        if isinstance(err, requests.Timeout):
-            # corresponds to `TLS handshake timeout` and `i/o timeout` in Go.
-            #
-            # return a simple string for debug log readability, as `raise TimeoutError(...) from err`
-            # will bubble up the original exception in case we reach max retries.
-            return f'timeout'
-        if isinstance(err, DatabricksError):
-            message = str(err)
-            transient_error_string_matches = [
-                "com.databricks.backend.manager.util.UnknownWorkerEnvironmentException",
-                "does not have any associated worker environments", "There is no worker environment with id",
-                "Unknown worker environment", "ClusterNotReadyException", "Unexpected error",
-                "Please try again later or try a faster operation.",
-                "RPC token bucket limit has been exceeded",
-            ]
-            for substring in transient_error_string_matches:
-                if substring not in message:
-                    continue
-                return f'matched {substring}'
-        return None
-
-    def _perform(self,
-                 method: str,
-                 url: str,
-                 query: dict = None,
-                 headers: dict = None,
-                 body: dict = None,
-                 raw: bool = False,
-                 files=None,
-                 data=None,
-                 auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None):
-        response = self._session.request(method,
-                                         url,
-                                         params=self._fix_query_string(query),
-                                         json=body,
-                                         headers=headers,
-                                         files=files,
-                                         data=data,
-                                         auth=auth,
-                                         stream=raw,
-                                         timeout=self._http_timeout_seconds)
-        self._record_request_log(response, raw=raw or data is not None or files is not None)
-        error = self._error_parser.get_api_error(response)
-        if error is not None:
-            raise error from None
-        return response
-
-    def _record_request_log(self, response: requests.Response, raw: bool = False) -> None:
-        if not logger.isEnabledFor(logging.DEBUG):
-            return
-        logger.debug(RoundTrip(response, self._cfg.debug_headers, self._debug_truncate_bytes, raw).generate())
+        return self._api_client.do(method=method,
+                                   url=url,
+                                   query=query,
+                                   headers=headers,
+                                   body=body,
+                                   raw=raw,
+                                   files=files,
+                                   data=data,
+                                   auth=auth,
+                                   response_headers=response_headers)
 
 
 class _AddDebugErrorCustomizer(_ErrorCustomizer):
@@ -264,103 +97,3 @@ def customize_error(self, response: requests.Response, kwargs: dict):
         if response.status_code in (401, 403):
             message = kwargs.get('message', 'request failed')
             kwargs['message'] = self._cfg.wrap_debug_info(message)
-
-
-class StreamingResponse(BinaryIO):
-    _response: requests.Response
-    _buffer: bytes
-    _content: Union[Iterator[bytes], None]
-    _chunk_size: Union[int, None]
-    _closed: bool = False
-
-    def fileno(self) -> int:
-        pass
-
-    def flush(self) -> int:
-        pass
-
-    def __init__(self, response: requests.Response, chunk_size: Union[int, None] = None):
-        self._response = response
-        self._buffer = b''
-        self._content = None
-        self._chunk_size = chunk_size
-
-    def _open(self) -> None:
-        if self._closed:
-            raise ValueError("I/O operation on closed file")
-        if not self._content:
-            self._content = self._response.iter_content(chunk_size=self._chunk_size)
-
-    def __enter__(self) -> BinaryIO:
-        self._open()
-        return self
-
-    def set_chunk_size(self, chunk_size: Union[int, None]) -> None:
-        self._chunk_size = chunk_size
-
-    def close(self) -> None:
-        self._response.close()
-        self._closed = True
-
-    def isatty(self) -> bool:
-        return False
-
-    def read(self, n: int = -1) -> bytes:
-        self._open()
-        read_everything = n < 0
-        remaining_bytes = n
-        res = b''
-        while remaining_bytes > 0 or read_everything:
-            if len(self._buffer) == 0:
-                try:
-                    self._buffer = next(self._content)
-                except StopIteration:
-                    break
-            bytes_available = len(self._buffer)
-            to_read = bytes_available if read_everything else min(remaining_bytes, bytes_available)
-            res += self._buffer[:to_read]
-            self._buffer = self._buffer[to_read:]
-            remaining_bytes -= to_read
-        return res
-
-    def readable(self) -> bool:
-        return self._content is not None
-
-    def readline(self, __limit: int = ...) -> bytes:
-        raise NotImplementedError()
-
-    def readlines(self, __hint: int = ...) -> List[bytes]:
-        raise NotImplementedError()
-
-    def seek(self, __offset: int, __whence: int = ...) -> int:
-        raise NotImplementedError()
-
-    def seekable(self) -> bool:
-        return False
-
-    def tell(self) -> int:
-        raise NotImplementedError()
-
-    def truncate(self, __size: Union[int, None] = ...) -> int:
-        raise NotImplementedError()
-
-    def writable(self) -> bool:
-        return False
-
-    def write(self, s: Union[bytes, bytearray]) -> int:
-        raise NotImplementedError()
-
-    def writelines(self, lines: Iterable[bytes]) -> None:
-        raise NotImplementedError()
-
-    def __next__(self) -> bytes:
-        return self.read(1)
-
-    def __iter__(self) -> Iterator[bytes]:
-        return self._content
-
-    def __exit__(self, t: Union[Type[BaseException], None], value: Union[BaseException, None],
-                 traceback: Union[TracebackType, None]) -> None:
-        self._content = None
-        self._buffer = b''
-        self.close()
diff --git a/tests/fixture_server.py b/tests/fixture_server.py
new file mode 100644
index 000000000..e15f9cf2d
--- /dev/null
+++ b/tests/fixture_server.py
@@ -0,0 +1,31 @@
+import contextlib
+import functools
+import typing
+from http.server import BaseHTTPRequestHandler
+
+
+@contextlib.contextmanager
+def http_fixture_server(handler: typing.Callable[[BaseHTTPRequestHandler], None]):
+    from http.server import HTTPServer
+    from threading import Thread
+
+    class _handler(BaseHTTPRequestHandler):
+
+        def __init__(self, handler: typing.Callable[[BaseHTTPRequestHandler], None], *args):
+            self._handler = handler
+            super().__init__(*args)
+
+        def __getattr__(self, item):
+            if 'do_' != item[0:3]:
+                raise AttributeError(f'method {item} not found')
+            return functools.partial(self._handler, self)
+
+    handler_factory = functools.partial(_handler, handler)
+    srv = HTTPServer(('localhost', 0), handler_factory)
+    t = Thread(target=srv.serve_forever)
+    try:
+        t.daemon = True
+        t.start()
+        yield 'http://{0}:{1}'.format(*srv.server_address)
+    finally:
+        srv.shutdown()
diff --git a/tests/test_base_client.py b/tests/test_base_client.py
new file mode 100644
index 000000000..e9e7324a9
--- /dev/null
+++ b/tests/test_base_client.py
@@ -0,0 +1,278 @@
+from http.server import BaseHTTPRequestHandler
+from typing import Iterator, List
+
+import pytest
+import requests
+
+from databricks.sdk import errors, useragent
+from databricks.sdk._base_client import _BaseClient, _StreamingResponse
+from databricks.sdk.core import DatabricksError
+
+from .clock import FakeClock
+from .fixture_server import http_fixture_server
+
+
+class DummyResponse(requests.Response):
+    _content: Iterator[bytes]
+    _closed: bool = False
+
+    def __init__(self, content: List[bytes]) -> None:
+        super().__init__()
+        self._content = iter(content)
+
+    def iter_content(self, chunk_size: int = 1, decode_unicode=False) -> Iterator[bytes]:
+        return self._content
+
+    def close(self):
+        self._closed = True
+
+    def isClosed(self):
+        return self._closed
+
+
+def test_streaming_response_read(config):
+    content = b"some initial binary data: \x00\x01"
+    response = _StreamingResponse(DummyResponse([content]))
+    assert response.read() == content
+
+
+def test_streaming_response_read_partial(config):
+    content = b"some initial binary data: \x00\x01"
+    response = _StreamingResponse(DummyResponse([content]))
+    assert response.read(8) == b"some ini"
+
+
+def test_streaming_response_read_full(config):
+    content = b"some initial binary data: \x00\x01"
+    response = _StreamingResponse(DummyResponse([content, content]))
+    assert response.read() == content + content
+
+
+def test_streaming_response_read_closes(config):
+    content = b"some initial binary data: \x00\x01"
+    dummy_response = DummyResponse([content])
+    with _StreamingResponse(dummy_response) as response:
+        assert response.read() == content
+    assert dummy_response.isClosed()
+
+
+@pytest.mark.parametrize('status_code,headers,body,expected_error', [
+    (400, {}, {
+        "message":
+        "errorMessage",
+        "details": [{
+            "type": DatabricksError._error_info_type,
+            "reason": "error reason",
+            "domain": "error domain",
+            "metadata": {
+                "etag": "error etag"
+            },
+        }, {
+            "type": "wrong type",
+            "reason": "wrong reason",
+            "domain": "wrong domain",
+            "metadata": {
+                "etag": "wrong etag"
+            }
+        }],
+    },
+     errors.BadRequest('errorMessage',
+                       details=[{
+                           'type': DatabricksError._error_info_type,
+                           'reason': 'error reason',
+                           'domain': 'error domain',
+                           'metadata': {
+                               'etag': 'error etag'
+                           },
+                       }])),
+    (401, {}, {
+        'error_code': 'UNAUTHORIZED',
+        'message': 'errorMessage',
+    }, errors.Unauthenticated('errorMessage', error_code='UNAUTHORIZED')),
+    (403, {}, {
+        'error_code': 'FORBIDDEN',
+        'message': 'errorMessage',
+    }, errors.PermissionDenied('errorMessage', error_code='FORBIDDEN')),
+    (429, {}, {
+        'error_code': 'TOO_MANY_REQUESTS',
+        'message': 'errorMessage',
+    }, errors.TooManyRequests('errorMessage', error_code='TOO_MANY_REQUESTS', retry_after_secs=1)),
+    (429, {
+        'Retry-After': '100'
+    }, {
+        'error_code': 'TOO_MANY_REQUESTS',
+        'message': 'errorMessage',
+    }, errors.TooManyRequests('errorMessage', error_code='TOO_MANY_REQUESTS', retry_after_secs=100)),
+    (503, {}, {
+        'error_code': 'TEMPORARILY_UNAVAILABLE',
+        'message': 'errorMessage',
+    }, errors.TemporarilyUnavailable('errorMessage', error_code='TEMPORARILY_UNAVAILABLE',
+                                     retry_after_secs=1)),
+    (503, {
+        'Retry-After': '100'
+    }, {
+        'error_code': 'TEMPORARILY_UNAVAILABLE',
+        'message': 'errorMessage',
+    },
+     errors.TemporarilyUnavailable('errorMessage', error_code='TEMPORARILY_UNAVAILABLE',
+                                   retry_after_secs=100)),
+    (404, {}, {
+        'scimType': 'scim type',
+        'detail': 'detail',
+        'status': 'status',
+    }, errors.NotFound('scim type detail', error_code='SCIM_status')),
+])
+def test_error(requests_mock, status_code, headers, body, expected_error):
+    client = _BaseClient(clock=FakeClock())
+    requests_mock.get("/test", json=body, status_code=status_code, headers=headers)
+    with pytest.raises(DatabricksError) as raised:
+        client._perform("GET", "https://localhost/test", headers={"test": "test"})
+    actual = raised.value
+    assert isinstance(actual, type(expected_error))
+    assert str(actual) == str(expected_error)
+    assert actual.error_code == expected_error.error_code
+    assert actual.retry_after_secs == expected_error.retry_after_secs
+    expected_error_infos, actual_error_infos = expected_error.get_error_info(), actual.get_error_info()
+    assert len(expected_error_infos) == len(actual_error_infos)
+    for expected, actual in zip(expected_error_infos, actual_error_infos):
+        assert expected.type == actual.type
+        assert expected.reason == actual.reason
+        assert expected.domain == actual.domain
+        assert expected.metadata == actual.metadata
+
+
+def test_api_client_do_custom_headers(requests_mock):
+    client = _BaseClient()
+    requests_mock.get("/test",
+                      json={"well": "done"},
+                      request_headers={
+                          "test": "test",
+                          "User-Agent": useragent.to_string()
+                      })
+    res = client.do("GET", "https://localhost/test", headers={"test": "test"})
+    assert res == {"well": "done"}
+
+
+@pytest.mark.parametrize('status_code,include_retry_after',
+                         ((429, False), (429, True), (503, False), (503, True)))
+def test_http_retry_after(status_code, include_retry_after):
+    requests = []
+
+    def inner(h: BaseHTTPRequestHandler):
+        if len(requests) == 0:
+            h.send_response(status_code)
+            if include_retry_after:
+                h.send_header('Retry-After', '1')
+            h.send_header('Content-Type', 'application/json')
+            h.end_headers()
+        else:
+            h.send_response(200)
+            h.send_header('Content-Type', 'application/json')
+            h.end_headers()
+            h.wfile.write(b'{"foo": 1}')
+        requests.append(h.requestline)
+
+    with http_fixture_server(inner) as host:
+        api_client = _BaseClient(clock=FakeClock())
+        res = api_client.do('GET', f'{host}/foo')
+        assert 'foo' in res
+
+    assert len(requests) == 2
+
+
+def test_http_retry_after_wrong_format():
+    requests = []
+
+    def inner(h: BaseHTTPRequestHandler):
+        if len(requests) == 0:
+            h.send_response(429)
+            h.send_header('Retry-After', '1.58')
+            h.end_headers()
+        else:
+            h.send_response(200)
+            h.send_header('Content-Type', 'application/json')
+            h.end_headers()
+            h.wfile.write(b'{"foo": 1}')
+        requests.append(h.requestline)
+
+    with http_fixture_server(inner) as host:
+        api_client = _BaseClient(clock=FakeClock())
+        res = api_client.do('GET', f'{host}/foo')
+        assert 'foo' in res
+
+    assert len(requests) == 2
+
+
+def test_http_retried_exceed_limit():
+    requests = []
+
+    def inner(h: BaseHTTPRequestHandler):
+        h.send_response(429)
+        h.send_header('Retry-After', '1')
+        h.end_headers()
+        requests.append(h.requestline)
+
+    with http_fixture_server(inner) as host:
+        api_client = _BaseClient(retry_timeout_seconds=1, clock=FakeClock())
+        with pytest.raises(TimeoutError):
+            api_client.do('GET', f'{host}/foo')
+
+    assert len(requests) == 1
+
+
+def test_http_retried_on_match():
+    requests = []
+
+    def inner(h: BaseHTTPRequestHandler):
+        if len(requests) == 0:
+            h.send_response(400)
+            h.end_headers()
+            h.wfile.write(b'{"error_code": "abc", "message": "... ClusterNotReadyException ..."}')
+        else:
+            h.send_response(200)
+            h.end_headers()
+            h.wfile.write(b'{"foo": 1}')
+        requests.append(h.requestline)
+
+    with http_fixture_server(inner) as host:
+        api_client = _BaseClient(clock=FakeClock())
+        res = api_client.do('GET', f'{host}/foo')
+        assert 'foo' in res
+
+    assert len(requests) == 2
+
+
+def test_http_not_retried_on_normal_errors():
+    requests = []
+
+    def inner(h: BaseHTTPRequestHandler):
+        if len(requests) == 0:
+            h.send_response(400)
+            h.end_headers()
+            h.wfile.write(b'{"error_code": "abc", "message": "something not found"}')
+        requests.append(h.requestline)
+
+    with http_fixture_server(inner) as host:
+        api_client = _BaseClient(clock=FakeClock())
+        with pytest.raises(DatabricksError):
+            api_client.do('GET', f'{host}/foo')
+
+    assert len(requests) == 1
+
+
+def test_http_retried_on_connection_error():
+    requests = []
+
+    def inner(h: BaseHTTPRequestHandler):
+        if len(requests) > 0:
+            h.send_response(200)
+            h.end_headers()
+            h.wfile.write(b'{"foo": 1}')
+        requests.append(h.requestline)
+
+    with http_fixture_server(inner) as host:
+        api_client = _BaseClient(clock=FakeClock())
+        res = api_client.do('GET', f'{host}/foo')
+        assert 'foo' in res
+
+    assert len(requests) == 2
diff --git a/tests/test_core.py b/tests/test_core.py
index d54563d4e..16a4c2ad6 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -1,21 +1,15 @@
-import contextlib
-import functools
 import os
 import pathlib
 import platform
 import random
 import string
-import typing
 from datetime import datetime
 from http.server import BaseHTTPRequestHandler
-from typing import Iterator, List
 
 import pytest
-import requests
 
 from databricks.sdk import WorkspaceClient, errors
-from databricks.sdk.core import (ApiClient, Config, DatabricksError,
-                                 StreamingResponse)
+from databricks.sdk.core import ApiClient, Config, DatabricksError
 from databricks.sdk.credentials_provider import (CliTokenSource,
                                                  CredentialsProvider,
                                                  CredentialsStrategy,
@@ -28,8 +22,8 @@
 from databricks.sdk.service.iam import AccessControlRequest
 from databricks.sdk.version import __version__
 
-from .clock import FakeClock
 from .conftest import noop_credentials
+from .fixture_server import http_fixture_server
 
 
 def test_parse_dsn():
@@ -80,32 +74,6 @@ def write_small_dummy_executable(path: pathlib.Path):
     return cli
 
 
-def test_streaming_response_read(config):
-    content = b"some initial binary data: \x00\x01"
-    response = StreamingResponse(DummyResponse([content]))
-    assert response.read() == content
-
-
-def test_streaming_response_read_partial(config):
-    content = b"some initial binary data: \x00\x01"
-    response = StreamingResponse(DummyResponse([content]))
-    assert response.read(8) == b"some ini"
-
-
-def test_streaming_response_read_full(config):
-    content = b"some initial binary data: \x00\x01"
-    response = StreamingResponse(DummyResponse([content, content]))
-    assert response.read() == content + content
-
-
-def test_streaming_response_read_closes(config):
-    content = b"some initial binary data: \x00\x01"
-    dummy_response = DummyResponse([content])
-    with StreamingResponse(dummy_response) as response:
-        assert response.read() == content
-    assert dummy_response.isClosed()
-
-
 def write_large_dummy_executable(path: pathlib.Path):
     cli = path.joinpath('databricks')
 
@@ -290,36 +258,6 @@ def test_config_parsing_non_string_env_vars(monkeypatch):
     assert c.debug_truncate_bytes == 100
 
 
-class DummyResponse(requests.Response):
-    _content: Iterator[bytes]
-    _closed: bool = False
-
-    def __init__(self, content: List[bytes]) -> None:
-        super().__init__()
-        self._content = iter(content)
-
-    def iter_content(self, chunk_size: int = 1, decode_unicode=False) -> Iterator[bytes]:
-        return self._content
-
-    def close(self):
-        self._closed = True
-
-    def isClosed(self):
-        return self._closed
-
-
-def test_api_client_do_custom_headers(config, requests_mock):
-    client = ApiClient(config)
-    requests_mock.get("/test",
-                      json={"well": "done"},
-                      request_headers={
-                          "test": "test",
-                          "User-Agent": config.user_agent
-                      })
-    res = client.do("GET", "/test", headers={"test": "test"})
-    assert res == {"well": "done"}
-
-
 def test_access_control_list(config, requests_mock):
     requests_mock.post("http://localhost/api/2.1/jobs/create",
                        request_headers={"User-Agent": config.user_agent})
@@ -359,81 +297,25 @@ def test_deletes(config, requests_mock):
     assert res is None
 
 
-@pytest.mark.parametrize('status_code,headers,body,expected_error', [
-    (400, {}, {
-        "message":
-        "errorMessage",
-        "details": [{
-            "type": DatabricksError._error_info_type,
-            "reason": "error reason",
-            "domain": "error domain",
-            "metadata": {
-                "etag": "error etag"
-            },
-        }, {
-            "type": "wrong type",
-            "reason": "wrong reason",
-            "domain": "wrong domain",
-            "metadata": {
-                "etag": "wrong etag"
-            }
-        }],
-    },
-     errors.BadRequest('errorMessage',
-                       details=[{
-                           'type': DatabricksError._error_info_type,
-                           'reason': 'error reason',
-                           'domain': 'error domain',
-                           'metadata': {
-                               'etag': 'error etag'
-                           },
-                       }])),
-    (401, {}, {
+@pytest.mark.parametrize(
+    'status_code,headers,body,expected_error',
+    [(401, {}, {
         'error_code': 'UNAUTHORIZED',
         'message': 'errorMessage',
     },
-     errors.Unauthenticated('errorMessage. Config: host=http://localhost, auth_type=noop',
-                            error_code='UNAUTHORIZED')),
-    (403, {}, {
-        'error_code': 'FORBIDDEN',
-        'message': 'errorMessage',
-    },
-     errors.PermissionDenied('errorMessage. Config: host=http://localhost, auth_type=noop',
-                             error_code='FORBIDDEN')),
-    (429, {}, {
-        'error_code': 'TOO_MANY_REQUESTS',
-        'message': 'errorMessage',
-    }, errors.TooManyRequests('errorMessage', error_code='TOO_MANY_REQUESTS', retry_after_secs=1)),
-    (429, {
-        'Retry-After': '100'
-    }, {
-        'error_code': 'TOO_MANY_REQUESTS',
-        'message': 'errorMessage',
-    }, errors.TooManyRequests('errorMessage', error_code='TOO_MANY_REQUESTS', retry_after_secs=100)),
-    (503, {}, {
-        'error_code': 'TEMPORARILY_UNAVAILABLE',
-        'message': 'errorMessage',
-    }, errors.TemporarilyUnavailable('errorMessage', error_code='TEMPORARILY_UNAVAILABLE',
-                                     retry_after_secs=1)),
-    (503, {
-        'Retry-After': '100'
-    }, {
-        'error_code': 'TEMPORARILY_UNAVAILABLE',
-        'message': 'errorMessage',
-    },
-     errors.TemporarilyUnavailable('errorMessage', error_code='TEMPORARILY_UNAVAILABLE',
-                                   retry_after_secs=100)),
-    (404, {}, {
-        'scimType': 'scim type',
-        'detail': 'detail',
-        'status': 'status',
-    }, errors.NotFound('scim type detail', error_code='SCIM_status')),
-])
+      errors.Unauthenticated('errorMessage. Config: host=http://localhost, auth_type=noop',
+                             error_code='UNAUTHORIZED')),
+     (403, {}, {
+         'error_code': 'FORBIDDEN',
+         'message': 'errorMessage',
+     },
+      errors.PermissionDenied('errorMessage. Config: host=http://localhost, auth_type=noop',
+                              error_code='FORBIDDEN')), ])
 def test_error(config, requests_mock, status_code, headers, body, expected_error):
     client = ApiClient(config)
     requests_mock.get("/test", json=body, status_code=status_code, headers=headers)
     with pytest.raises(DatabricksError) as raised:
-        client._perform("GET", "http://localhost/test", headers={"test": "test"})
+        client.do("GET", "/test", headers={"test": "test"})
     actual = raised.value
     assert isinstance(actual, type(expected_error))
     assert str(actual) == str(expected_error)
@@ -448,158 +330,6 @@ def test_error(config, requests_mock, status_code, headers, body, expected_error
         assert expected.metadata == actual.metadata
 
 
-@contextlib.contextmanager
-def http_fixture_server(handler: typing.Callable[[BaseHTTPRequestHandler], None]):
-    from http.server import HTTPServer
-    from threading import Thread
-
-    class _handler(BaseHTTPRequestHandler):
-
-        def __init__(self, handler: typing.Callable[[BaseHTTPRequestHandler], None], *args):
-            self._handler = handler
-            super().__init__(*args)
-
-        def __getattr__(self, item):
-            if 'do_' != item[0:3]:
-                raise AttributeError(f'method {item} not found')
-            return functools.partial(self._handler, self)
-
-    handler_factory = functools.partial(_handler, handler)
-    srv = HTTPServer(('localhost', 0), handler_factory)
-    t = Thread(target=srv.serve_forever)
-    try:
-        t.daemon = True
-        t.start()
-        yield 'http://{0}:{1}'.format(*srv.server_address)
-    finally:
-        srv.shutdown()
-
-
-@pytest.mark.parametrize('status_code,include_retry_after',
-                         ((429, False), (429, True), (503, False), (503, True)))
-def test_http_retry_after(status_code, include_retry_after):
-    requests = []
-
-    def inner(h: BaseHTTPRequestHandler):
-        if len(requests) == 0:
-            h.send_response(status_code)
-            if include_retry_after:
-                h.send_header('Retry-After', '1')
-            h.send_header('Content-Type', 'application/json')
-            h.end_headers()
-        else:
-            h.send_response(200)
-            h.send_header('Content-Type', 'application/json')
-            h.end_headers()
-            h.wfile.write(b'{"foo": 1}')
-        requests.append(h.requestline)
-
-    with http_fixture_server(inner) as host:
-        api_client = ApiClient(Config(host=host, token='_', clock=FakeClock()))
-        res = api_client.do('GET', '/foo')
-        assert 'foo' in res
-
-    assert len(requests) == 2
-
-
-def test_http_retry_after_wrong_format():
-    requests = []
-
-    def inner(h: BaseHTTPRequestHandler):
-        if len(requests) == 0:
-            h.send_response(429)
-            h.send_header('Retry-After', '1.58')
-            h.end_headers()
-        else:
-            h.send_response(200)
-            h.send_header('Content-Type', 'application/json')
-            h.end_headers()
-            h.wfile.write(b'{"foo": 1}')
-        requests.append(h.requestline)
-
-    with http_fixture_server(inner) as host:
-        api_client = ApiClient(Config(host=host, token='_', clock=FakeClock()))
-        res = api_client.do('GET', '/foo')
-        assert 'foo' in res
-
-    assert len(requests) == 2
-
-
-def test_http_retried_exceed_limit():
-    requests = []
-
-    def inner(h: BaseHTTPRequestHandler):
-        h.send_response(429)
-        h.send_header('Retry-After', '1')
-        h.end_headers()
-        requests.append(h.requestline)
-
-    with http_fixture_server(inner) as host:
-        api_client = ApiClient(Config(host=host, token='_', retry_timeout_seconds=1, clock=FakeClock()))
-        with pytest.raises(TimeoutError):
-            api_client.do('GET', '/foo')
-
-    assert len(requests) == 1
-
-
-def test_http_retried_on_match():
-    requests = []
-
-    def inner(h: BaseHTTPRequestHandler):
-        if len(requests) == 0:
-            h.send_response(400)
-            h.end_headers()
-            h.wfile.write(b'{"error_code": "abc", "message": "... ClusterNotReadyException ..."}')
-        else:
-            h.send_response(200)
-            h.end_headers()
-            h.wfile.write(b'{"foo": 1}')
-        requests.append(h.requestline)
-
-    with http_fixture_server(inner) as host:
-        api_client = ApiClient(Config(host=host, token='_', clock=FakeClock()))
-        res = api_client.do('GET', '/foo')
-        assert 'foo' in res
-
-    assert len(requests) == 2
-
-
-def test_http_not_retried_on_normal_errors():
-    requests = []
-
-    def inner(h: BaseHTTPRequestHandler):
-        if len(requests) == 0:
-            h.send_response(400)
-            h.end_headers()
-            h.wfile.write(b'{"error_code": "abc", "message": "something not found"}')
-        requests.append(h.requestline)
-
-    with http_fixture_server(inner) as host:
-        api_client = ApiClient(Config(host=host, token='_', clock=FakeClock()))
-        with pytest.raises(DatabricksError):
-            api_client.do('GET', '/foo')
-
-    assert len(requests) == 1
-
-
-def test_http_retried_on_connection_error():
-    requests = []
-
-    def inner(h: BaseHTTPRequestHandler):
-        if len(requests) > 0:
-            h.send_response(200)
-            h.end_headers()
-            h.wfile.write(b'{"foo": 1}')
-        requests.append(h.requestline)
-
-    with http_fixture_server(inner) as host:
-        api_client = ApiClient(Config(host=host, token='_', clock=FakeClock()))
-        res = api_client.do('GET', '/foo')
-        assert 'foo' in res
-
-    assert len(requests) == 2
-
-
 def test_github_oidc_flow_works_with_azure(monkeypatch):
 
     def inner(h: BaseHTTPRequestHandler):

From 0b8b9acd040110dc008e8aa20ff1e6470e514d29 Mon Sep 17 00:00:00 2001
From: Miles Yucht 
Date: Mon, 7 Oct 2024 09:49:03 -0400
Subject: [PATCH 051/136] [Fix] Include package name for external types when
 deserializing responses (#786)

## Changes
#683 is caused by a small bug in the template used to generate the
Python SDK. When referring to a class defined in a separate API package,
only the module is imported, not the exact class, so the generated code
needs to use the qualified name of the structure.

Resolved #683.

## Tests


- [ ] `make test` run locally
- [ ] `make fmt` applied
- [ ] relevant integration tests applied
---
 .codegen/service.py.tmpl          | 2 +-
 databricks/sdk/service/sharing.py | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/.codegen/service.py.tmpl b/.codegen/service.py.tmpl
index b4e6b1dc9..4307e0913 100644
--- a/.codegen/service.py.tmpl
+++ b/.codegen/service.py.tmpl
@@ -350,7 +350,7 @@ class {{.PascalName}}API:{{if .Description}}
           {{- else if .Response.MapValue -}}
             return res
           {{- else -}}
-            return {{.Response.PascalName}}.from_dict(res)
+            return {{template "type" .Response}}.from_dict(res)
           {{- end}}
         {{- end}}
 {{- end}}
diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py
index 23a31e774..772bc7aee 100755
--- a/databricks/sdk/service/sharing.py
+++ b/databricks/sdk/service/sharing.py
@@ -2496,7 +2496,7 @@ def share_permissions(self,
                            f'/api/2.1/unity-catalog/shares/{name}/permissions',
                            query=query,
                            headers=headers)
-        return PermissionsList.from_dict(res)
+        return catalog.PermissionsList.from_dict(res)
 
     def update(self,
                name: str,

From b0e4192f8969e8fa615bede19b494b97078f9d65 Mon Sep 17 00:00:00 2001
From: Parth Bansal 
Date: Mon, 7 Oct 2024 16:20:32 +0200
Subject: [PATCH 052/136] [Internal] Update to latest OpenAPI spec (#787)

## Changes

Update to latest OpenAPI spec

## Tests


- [x] `make test` run locally
- [x] `make fmt` applied
- [ ] relevant integration tests applied
---
 .codegen/_openapi_sha                         |   2 +-
 databricks/sdk/__init__.py                    |   1 +
 databricks/sdk/service/apps.py                | 219 +++++++++++++++++-
 databricks/sdk/service/catalog.py             |   7 +-
 databricks/sdk/service/dashboards.py          |   4 +-
 databricks/sdk/service/jobs.py                |   6 +
 databricks/sdk/service/settings.py            | 179 ++++++++++++++
 databricks/sdk/service/sql.py                 |  10 +-
 databricks/sdk/service/workspace.py           |   4 +-
 .../settings/disable_legacy_features.rst      |  60 +++++
 docs/account/settings/index.rst               |   1 +
 docs/account/settings/settings.rst            |   9 +
 docs/dbdataclasses/apps.rst                   | 125 ++++++++--
 docs/dbdataclasses/catalog.rst                |  41 ++++
 docs/dbdataclasses/compute.rst                |   4 +-
 docs/dbdataclasses/dashboards.rst             |   5 +-
 docs/dbdataclasses/jobs.rst                   |  10 +-
 docs/dbdataclasses/serving.rst                |  59 +++++
 docs/dbdataclasses/settings.rst               |  43 ++++
 docs/dbdataclasses/sql.rst                    |  97 --------
 docs/dbdataclasses/workspace.rst              |  34 ++-
 docs/workspace/apps/apps.rst                  |  50 ++--
 docs/workspace/catalog/index.rst              |   1 +
 docs/workspace/catalog/tables.rst             |  12 +-
 .../catalog/temporary_table_credentials.rst   |  36 +++
 docs/workspace/compute/clusters.rst           |  23 +-
 docs/workspace/dashboards/lakeview.rst        |  12 +-
 docs/workspace/jobs/jobs.rst                  |  16 +-
 docs/workspace/pipelines/pipelines.rst        |  14 +-
 docs/workspace/serving/serving_endpoints.rst  |  38 ++-
 .../settings/disable_legacy_access.rst        |  61 +++++
 .../settings/disable_legacy_dbfs.rst          |  57 +++++
 docs/workspace/settings/index.rst             |   2 +
 docs/workspace/settings/settings.rst          |  16 ++
 docs/workspace/sql/statement_execution.rst    |   4 +-
 docs/workspace/sql/warehouses.rst             |   3 +-
 docs/workspace/workspace/git_credentials.rst  |  26 +--
 docs/workspace/workspace/repos.rst            |  31 +--
 38 files changed, 1104 insertions(+), 218 deletions(-)
 create mode 100644 docs/account/settings/disable_legacy_features.rst
 create mode 100644 docs/workspace/catalog/temporary_table_credentials.rst
 create mode 100644 docs/workspace/settings/disable_legacy_access.rst
 create mode 100644 docs/workspace/settings/disable_legacy_dbfs.rst

diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index e9f9e0a0e..7f9f41bb8 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-248f4ad9668661da9d0bf4a7b0119a2d44fd1e75
\ No newline at end of file
+bc17b474818138f19b78a7bea0675707dead2b87
\ No newline at end of file
diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py
index 617f2cee2..848272198 100755
--- a/databricks/sdk/__init__.py
+++ b/databricks/sdk/__init__.py
@@ -71,6 +71,7 @@
                                              CspEnablementAccountAPI,
                                              DefaultNamespaceAPI,
                                              DisableLegacyAccessAPI,
+                                             DisableLegacyDbfsAPI,
                                              DisableLegacyFeaturesAPI,
                                              EnhancedSecurityMonitoringAPI,
                                              EsmEnablementAccountAPI,
diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py
index 9cafe235e..63bc981ba 100755
--- a/databricks/sdk/service/apps.py
+++ b/databricks/sdk/service/apps.py
@@ -25,7 +25,8 @@ class App:
     It must be unique within the workspace."""
 
     active_deployment: Optional[AppDeployment] = None
-    """The active deployment of the app."""
+    """The active deployment of the app. A deployment is considered active when it has been deployed to
+    the app compute."""
 
     app_status: Optional[ApplicationStatus] = None
 
@@ -37,11 +38,19 @@ class App:
     creator: Optional[str] = None
     """The email of the user that created the app."""
 
+    default_source_code_path: Optional[str] = None
+    """The default workspace file system path of the source code from which app deployment are created.
+    This field tracks the workspace source code path of the last active deployment."""
+
     description: Optional[str] = None
     """The description of the app."""
 
     pending_deployment: Optional[AppDeployment] = None
-    """The pending deployment of the app."""
+    """The pending deployment of the app. A deployment is considered pending when it is being prepared
+    for deployment to the app compute."""
+
+    resources: Optional[List[AppResource]] = None
+    """Resources for the app."""
 
     service_principal_id: Optional[int] = None
 
@@ -64,9 +73,12 @@ def as_dict(self) -> dict:
         if self.compute_status: body['compute_status'] = self.compute_status.as_dict()
         if self.create_time is not None: body['create_time'] = self.create_time
         if self.creator is not None: body['creator'] = self.creator
+        if self.default_source_code_path is not None:
+            body['default_source_code_path'] = self.default_source_code_path
         if self.description is not None: body['description'] = self.description
         if self.name is not None: body['name'] = self.name
         if self.pending_deployment: body['pending_deployment'] = self.pending_deployment.as_dict()
+        if self.resources: body['resources'] = [v.as_dict() for v in self.resources]
         if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id
         if self.service_principal_name is not None:
             body['service_principal_name'] = self.service_principal_name
@@ -83,9 +95,11 @@ def from_dict(cls, d: Dict[str, any]) -> App:
                    compute_status=_from_dict(d, 'compute_status', ComputeStatus),
                    create_time=d.get('create_time', None),
                    creator=d.get('creator', None),
+                   default_source_code_path=d.get('default_source_code_path', None),
                    description=d.get('description', None),
                    name=d.get('name', None),
                    pending_deployment=_from_dict(d, 'pending_deployment', AppDeployment),
+                   resources=_repeated_dict(d, 'resources', AppResource),
                    service_principal_id=d.get('service_principal_id', None),
                    service_principal_name=d.get('service_principal_name', None),
                    update_time=d.get('update_time', None),
@@ -372,6 +386,170 @@ def from_dict(cls, d: Dict[str, any]) -> AppPermissionsRequest:
                    app_name=d.get('app_name', None))
 
 
+@dataclass
+class AppResource:
+    name: str
+    """Name of the App Resource."""
+
+    description: Optional[str] = None
+    """Description of the App Resource."""
+
+    job: Optional[AppResourceJob] = None
+
+    secret: Optional[AppResourceSecret] = None
+
+    serving_endpoint: Optional[AppResourceServingEndpoint] = None
+
+    sql_warehouse: Optional[AppResourceSqlWarehouse] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the AppResource into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.job: body['job'] = self.job.as_dict()
+        if self.name is not None: body['name'] = self.name
+        if self.secret: body['secret'] = self.secret.as_dict()
+        if self.serving_endpoint: body['serving_endpoint'] = self.serving_endpoint.as_dict()
+        if self.sql_warehouse: body['sql_warehouse'] = self.sql_warehouse.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AppResource:
+        """Deserializes the AppResource from a dictionary."""
+        return cls(description=d.get('description', None),
+                   job=_from_dict(d, 'job', AppResourceJob),
+                   name=d.get('name', None),
+                   secret=_from_dict(d, 'secret', AppResourceSecret),
+                   serving_endpoint=_from_dict(d, 'serving_endpoint', AppResourceServingEndpoint),
+                   sql_warehouse=_from_dict(d, 'sql_warehouse', AppResourceSqlWarehouse))
+
+
+@dataclass
+class AppResourceJob:
+    id: str
+    """Id of the job to grant permission on."""
+
+    permission: AppResourceJobJobPermission
+    """Permissions to grant on the Job. Supported permissions are: "CAN_MANAGE", "IS_OWNER",
+    "CAN_MANAGE_RUN", "CAN_VIEW"."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AppResourceJob into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.permission is not None: body['permission'] = self.permission.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AppResourceJob:
+        """Deserializes the AppResourceJob from a dictionary."""
+        return cls(id=d.get('id', None), permission=_enum(d, 'permission', AppResourceJobJobPermission))
+
+
+class AppResourceJobJobPermission(Enum):
+
+    CAN_MANAGE = 'CAN_MANAGE'
+    CAN_MANAGE_RUN = 'CAN_MANAGE_RUN'
+    CAN_VIEW = 'CAN_VIEW'
+    IS_OWNER = 'IS_OWNER'
+
+
+@dataclass
+class AppResourceSecret:
+    scope: str
+    """Scope of the secret to grant permission on."""
+
+    key: str
+    """Key of the secret to grant permission on."""
+
+    permission: AppResourceSecretSecretPermission
+    """Permission to grant on the secret scope. For secrets, only one permission is allowed. Permission
+    must be one of: "READ", "WRITE", "MANAGE"."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AppResourceSecret into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.permission is not None: body['permission'] = self.permission.value
+        if self.scope is not None: body['scope'] = self.scope
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AppResourceSecret:
+        """Deserializes the AppResourceSecret from a dictionary."""
+        return cls(key=d.get('key', None),
+                   permission=_enum(d, 'permission', AppResourceSecretSecretPermission),
+                   scope=d.get('scope', None))
+
+
+class AppResourceSecretSecretPermission(Enum):
+    """Permission to grant on the secret scope. Supported permissions are: "READ", "WRITE", "MANAGE"."""
+
+    MANAGE = 'MANAGE'
+    READ = 'READ'
+    WRITE = 'WRITE'
+
+
+@dataclass
+class AppResourceServingEndpoint:
+    name: str
+    """Name of the serving endpoint to grant permission on."""
+
+    permission: AppResourceServingEndpointServingEndpointPermission
+    """Permission to grant on the serving endpoint. Supported permissions are: "CAN_MANAGE",
+    "CAN_QUERY", "CAN_VIEW"."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AppResourceServingEndpoint into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.permission is not None: body['permission'] = self.permission.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AppResourceServingEndpoint:
+        """Deserializes the AppResourceServingEndpoint from a dictionary."""
+        return cls(name=d.get('name', None),
+                   permission=_enum(d, 'permission', AppResourceServingEndpointServingEndpointPermission))
+
+
+class AppResourceServingEndpointServingEndpointPermission(Enum):
+
+    CAN_MANAGE = 'CAN_MANAGE'
+    CAN_QUERY = 'CAN_QUERY'
+    CAN_VIEW = 'CAN_VIEW'
+
+
+@dataclass
+class AppResourceSqlWarehouse:
+    id: str
+    """Id of the SQL warehouse to grant permission on."""
+
+    permission: AppResourceSqlWarehouseSqlWarehousePermission
+    """Permission to grant on the SQL warehouse. Supported permissions are: "CAN_MANAGE", "CAN_USE",
+    "IS_OWNER"."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AppResourceSqlWarehouse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.permission is not None: body['permission'] = self.permission.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AppResourceSqlWarehouse:
+        """Deserializes the AppResourceSqlWarehouse from a dictionary."""
+        return cls(id=d.get('id', None),
+                   permission=_enum(d, 'permission', AppResourceSqlWarehouseSqlWarehousePermission))
+
+
+class AppResourceSqlWarehouseSqlWarehousePermission(Enum):
+
+    CAN_MANAGE = 'CAN_MANAGE'
+    CAN_USE = 'CAN_USE'
+    IS_OWNER = 'IS_OWNER'
+
+
 class ApplicationState(Enum):
 
     CRASHED = 'CRASHED'
@@ -478,17 +656,23 @@ class CreateAppRequest:
     description: Optional[str] = None
     """The description of the app."""
 
+    resources: Optional[List[AppResource]] = None
+    """Resources for the app."""
+
     def as_dict(self) -> dict:
         """Serializes the CreateAppRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.description is not None: body['description'] = self.description
         if self.name is not None: body['name'] = self.name
+        if self.resources: body['resources'] = [v.as_dict() for v in self.resources]
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateAppRequest:
         """Deserializes the CreateAppRequest from a dictionary."""
-        return cls(description=d.get('description', None), name=d.get('name', None))
+        return cls(description=d.get('description', None),
+                   name=d.get('name', None),
+                   resources=_repeated_dict(d, 'resources', AppResource))
 
 
 @dataclass
@@ -571,17 +755,23 @@ class UpdateAppRequest:
     description: Optional[str] = None
     """The description of the app."""
 
+    resources: Optional[List[AppResource]] = None
+    """Resources for the app."""
+
     def as_dict(self) -> dict:
         """Serializes the UpdateAppRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.description is not None: body['description'] = self.description
         if self.name is not None: body['name'] = self.name
+        if self.resources: body['resources'] = [v.as_dict() for v in self.resources]
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateAppRequest:
         """Deserializes the UpdateAppRequest from a dictionary."""
-        return cls(description=d.get('description', None), name=d.get('name', None))
+        return cls(description=d.get('description', None),
+                   name=d.get('name', None),
+                   resources=_repeated_dict(d, 'resources', AppResource))
 
 
 class AppsAPI:
@@ -689,7 +879,11 @@ def wait_get_deployment_app_succeeded(
             attempt += 1
         raise TimeoutError(f'timed out after {timeout}: {status_message}')
 
-    def create(self, name: str, *, description: Optional[str] = None) -> Wait[App]:
+    def create(self,
+               name: str,
+               *,
+               description: Optional[str] = None,
+               resources: Optional[List[AppResource]] = None) -> Wait[App]:
         """Create an app.
         
         Creates a new app.
@@ -699,6 +893,8 @@ def create(self, name: str, *, description: Optional[str] = None) -> Wait[App]:
           must be unique within the workspace.
         :param description: str (optional)
           The description of the app.
+        :param resources: List[:class:`AppResource`] (optional)
+          Resources for the app.
         
         :returns:
           Long-running operation waiter for :class:`App`.
@@ -707,6 +903,7 @@ def create(self, name: str, *, description: Optional[str] = None) -> Wait[App]:
         body = {}
         if description is not None: body['description'] = description
         if name is not None: body['name'] = name
+        if resources is not None: body['resources'] = [v.as_dict() for v in resources]
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         op_response = self._api.do('POST', '/api/2.0/apps', body=body, headers=headers)
@@ -716,8 +913,9 @@ def create_and_wait(self,
                         name: str,
                         *,
                         description: Optional[str] = None,
+                        resources: Optional[List[AppResource]] = None,
                         timeout=timedelta(minutes=20)) -> App:
-        return self.create(description=description, name=name).result(timeout=timeout)
+        return self.create(description=description, name=name, resources=resources).result(timeout=timeout)
 
     def delete(self, name: str) -> App:
         """Delete an app.
@@ -981,7 +1179,11 @@ def stop(self, name: str) -> Wait[App]:
     def stop_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App:
         return self.stop(name=name).result(timeout=timeout)
 
-    def update(self, name: str, *, description: Optional[str] = None) -> App:
+    def update(self,
+               name: str,
+               *,
+               description: Optional[str] = None,
+               resources: Optional[List[AppResource]] = None) -> App:
         """Update an app.
         
         Updates the app with the supplied name.
@@ -991,11 +1193,14 @@ def update(self, name: str, *, description: Optional[str] = None) -> App:
           must be unique within the workspace.
         :param description: str (optional)
           The description of the app.
+        :param resources: List[:class:`AppResource`] (optional)
+          Resources for the app.
         
         :returns: :class:`App`
         """
         body = {}
         if description is not None: body['description'] = description
+        if resources is not None: body['resources'] = [v.as_dict() for v in resources]
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('PATCH', f'/api/2.0/apps/{name}', body=body, headers=headers)
diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py
index 9c795dc2a..2ccff4217 100755
--- a/databricks/sdk/service/catalog.py
+++ b/databricks/sdk/service/catalog.py
@@ -909,6 +909,7 @@ class ConnectionInfoSecurableKind(Enum):
     CONNECTION_DATABRICKS = 'CONNECTION_DATABRICKS'
     CONNECTION_EXTERNAL_HIVE_METASTORE = 'CONNECTION_EXTERNAL_HIVE_METASTORE'
     CONNECTION_GLUE = 'CONNECTION_GLUE'
+    CONNECTION_HTTP_BEARER = 'CONNECTION_HTTP_BEARER'
     CONNECTION_MYSQL = 'CONNECTION_MYSQL'
     CONNECTION_ONLINE_CATALOG = 'CONNECTION_ONLINE_CATALOG'
     CONNECTION_POSTGRESQL = 'CONNECTION_POSTGRESQL'
@@ -925,6 +926,7 @@ class ConnectionType(Enum):
     DATABRICKS = 'DATABRICKS'
     GLUE = 'GLUE'
     HIVE_METASTORE = 'HIVE_METASTORE'
+    HTTP = 'HTTP'
     MYSQL = 'MYSQL'
     POSTGRESQL = 'POSTGRESQL'
     REDSHIFT = 'REDSHIFT'
@@ -1676,6 +1678,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateVolumeRequestContent:
 class CredentialType(Enum):
     """The type of credential."""
 
+    BEARER_TOKEN = 'BEARER_TOKEN'
     USERNAME_PASSWORD = 'USERNAME_PASSWORD'
 
 
@@ -2547,8 +2550,8 @@ class GenerateTemporaryTableCredentialResponse:
     https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas"""
 
     expiration_time: Optional[int] = None
-    """Server time when the credential will expire, in unix epoch milliseconds since January 1, 1970 at
-    00:00:00 UTC. The API client is advised to cache the credential given this expiration time."""
+    """Server time when the credential will expire, in epoch milliseconds. The API client is advised to
+    cache the credential given this expiration time."""
 
     gcp_oauth_token: Optional[GcpOauthToken] = None
     """GCP temporary credentials for API authentication. Read more at
diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py
index 6e85cf45c..27117d43a 100755
--- a/databricks/sdk/service/dashboards.py
+++ b/databricks/sdk/service/dashboards.py
@@ -169,8 +169,8 @@ class Dashboard:
     trailing slash. This field is excluded in List Dashboards responses."""
 
     path: Optional[str] = None
-    """The workspace path of the dashboard asset, including the file name. This field is excluded in
-    List Dashboards responses."""
+    """The workspace path of the dashboard asset, including the file name. Exported dashboards always
+    have the file extension `.lvdash.json`. This field is excluded in List Dashboards responses."""
 
     serialized_dashboard: Optional[str] = None
     """The contents of the dashboard in serialized string form. This field is excluded in List
diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index e7fbddb48..b3c723f37 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -2478,6 +2478,7 @@ class RepairRun:
     [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
 
     pipeline_params: Optional[PipelineParams] = None
+    """Controls whether the pipeline should perform a full refresh"""
 
     python_named_params: Optional[Dict[str, str]] = None
 
@@ -3181,6 +3182,7 @@ class RunJobTask:
     [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
 
     pipeline_params: Optional[PipelineParams] = None
+    """Controls whether the pipeline should perform a full refresh"""
 
     python_named_params: Optional[Dict[str, str]] = None
 
@@ -3340,6 +3342,7 @@ class RunNow:
     [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
 
     pipeline_params: Optional[PipelineParams] = None
+    """Controls whether the pipeline should perform a full refresh"""
 
     python_named_params: Optional[Dict[str, str]] = None
 
@@ -3549,6 +3552,7 @@ class RunParameters:
     [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
 
     pipeline_params: Optional[PipelineParams] = None
+    """Controls whether the pipeline should perform a full refresh"""
 
     python_named_params: Optional[Dict[str, str]] = None
 
@@ -6087,6 +6091,7 @@ def repair_run(self,
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
           [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
         :param pipeline_params: :class:`PipelineParams` (optional)
+          Controls whether the pipeline should perform a full refresh
         :param python_named_params: Dict[str,str] (optional)
         :param python_params: List[str] (optional)
           A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
@@ -6276,6 +6281,7 @@ def run_now(self,
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
           [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
         :param pipeline_params: :class:`PipelineParams` (optional)
+          Controls whether the pipeline should perform a full refresh
         :param python_named_params: Dict[str,str] (optional)
         :param python_params: List[str] (optional)
           A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py
index fd7ed5dd8..a6a235158 100755
--- a/databricks/sdk/service/settings.py
+++ b/databricks/sdk/service/settings.py
@@ -720,6 +720,30 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyAccessResponse:
         return cls(etag=d.get('etag', None))
 
 
+@dataclass
+class DeleteDisableLegacyDbfsResponse:
+    """The etag is returned."""
+
+    etag: str
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+    an etag from a GET request, and pass it with the DELETE request to identify the rule set version
+    you are deleting."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteDisableLegacyDbfsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyDbfsResponse:
+        """Deserializes the DeleteDisableLegacyDbfsResponse from a dictionary."""
+        return cls(etag=d.get('etag', None))
+
+
 @dataclass
 class DeleteDisableLegacyFeaturesResponse:
     """The etag is returned."""
@@ -863,6 +887,40 @@ def from_dict(cls, d: Dict[str, any]) -> DisableLegacyAccess:
                    setting_name=d.get('setting_name', None))
 
 
+@dataclass
+class DisableLegacyDbfs:
+    disable_legacy_dbfs: BooleanMessage
+
+    etag: Optional[str] = None
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+    etag from a GET request, and pass it with the PATCH request to identify the setting version you
+    are updating."""
+
+    setting_name: Optional[str] = None
+    """Name of the corresponding setting. This field is populated in the response, but it will not be
+    respected even if it's set in the request body. The setting name in the path parameter will be
+    respected instead. Setting name is required to be 'default' if the setting only has one instance
+    per workspace."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DisableLegacyDbfs into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.disable_legacy_dbfs: body['disable_legacy_dbfs'] = self.disable_legacy_dbfs.as_dict()
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DisableLegacyDbfs:
+        """Deserializes the DisableLegacyDbfs from a dictionary."""
+        return cls(disable_legacy_dbfs=_from_dict(d, 'disable_legacy_dbfs', BooleanMessage),
+                   etag=d.get('etag', None),
+                   setting_name=d.get('setting_name', None))
+
+
 @dataclass
 class DisableLegacyFeatures:
     disable_legacy_features: BooleanMessage
@@ -2534,6 +2592,36 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyAccessRequest:
                    setting=_from_dict(d, 'setting', DisableLegacyAccess))
 
 
+@dataclass
+class UpdateDisableLegacyDbfsRequest:
+    """Details required to update a setting."""
+
+    allow_missing: bool
+    """This should always be set to true for Settings API. Added for AIP compliance."""
+
+    setting: DisableLegacyDbfs
+
+    field_mask: str
+    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
+    the setting payload will be updated. The field mask needs to be supplied as single string. To
+    specify multiple fields in the field mask, use comma as the separator (no space)."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateDisableLegacyDbfsRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyDbfsRequest:
+        """Deserializes the UpdateDisableLegacyDbfsRequest from a dictionary."""
+        return cls(allow_missing=d.get('allow_missing', None),
+                   field_mask=d.get('field_mask', None),
+                   setting=_from_dict(d, 'setting', DisableLegacyDbfs))
+
+
 @dataclass
 class UpdateDisableLegacyFeaturesRequest:
     """Details required to update a setting."""
@@ -3447,6 +3535,91 @@ def update(self, allow_missing: bool, setting: DisableLegacyAccess,
         return DisableLegacyAccess.from_dict(res)
 
 
+class DisableLegacyDbfsAPI:
+    """When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation of new
+    mounts). When the setting is off, all DBFS functionality is enabled"""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def delete(self, *, etag: Optional[str] = None) -> DeleteDisableLegacyDbfsResponse:
+        """Delete the disable legacy DBFS setting.
+        
+        Deletes the disable legacy DBFS setting for a workspace, reverting back to the default.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteDisableLegacyDbfsResponse`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('DELETE',
+                           '/api/2.0/settings/types/disable_legacy_dbfs/names/default',
+                           query=query,
+                           headers=headers)
+        return DeleteDisableLegacyDbfsResponse.from_dict(res)
+
+    def get(self, *, etag: Optional[str] = None) -> DisableLegacyDbfs:
+        """Get the disable legacy DBFS setting.
+        
+        Gets the disable legacy DBFS setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DisableLegacyDbfs`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET',
+                           '/api/2.0/settings/types/disable_legacy_dbfs/names/default',
+                           query=query,
+                           headers=headers)
+        return DisableLegacyDbfs.from_dict(res)
+
+    def update(self, allow_missing: bool, setting: DisableLegacyDbfs, field_mask: str) -> DisableLegacyDbfs:
+        """Update the disable legacy DBFS setting.
+        
+        Updates the disable legacy DBFS setting for the workspace.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`DisableLegacyDbfs`
+        :param field_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        
+        :returns: :class:`DisableLegacyDbfs`
+        """
+        body = {}
+        if allow_missing is not None: body['allow_missing'] = allow_missing
+        if field_mask is not None: body['field_mask'] = field_mask
+        if setting is not None: body['setting'] = setting.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH',
+                           '/api/2.0/settings/types/disable_legacy_dbfs/names/default',
+                           body=body,
+                           headers=headers)
+        return DisableLegacyDbfs.from_dict(res)
+
+
 class DisableLegacyFeaturesAPI:
     """Disable legacy features for new Databricks workspaces.
     
@@ -4411,6 +4584,7 @@ def __init__(self, api_client):
         self._compliance_security_profile = ComplianceSecurityProfileAPI(self._api)
         self._default_namespace = DefaultNamespaceAPI(self._api)
         self._disable_legacy_access = DisableLegacyAccessAPI(self._api)
+        self._disable_legacy_dbfs = DisableLegacyDbfsAPI(self._api)
         self._enhanced_security_monitoring = EnhancedSecurityMonitoringAPI(self._api)
         self._restrict_workspace_admins = RestrictWorkspaceAdminsAPI(self._api)
 
@@ -4434,6 +4608,11 @@ def disable_legacy_access(self) -> DisableLegacyAccessAPI:
         """'Disabling legacy access' has the following impacts: 1."""
         return self._disable_legacy_access
 
+    @property
+    def disable_legacy_dbfs(self) -> DisableLegacyDbfsAPI:
+        """When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation of new mounts)."""
+        return self._disable_legacy_dbfs
+
     @property
     def enhanced_security_monitoring(self) -> EnhancedSecurityMonitoringAPI:
         """Controls whether enhanced security monitoring is enabled for the current workspace."""
diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py
index 348a27123..4f0e49c77 100755
--- a/databricks/sdk/service/sql.py
+++ b/databricks/sdk/service/sql.py
@@ -454,6 +454,9 @@ def from_dict(cls, d: Dict[str, any]) -> CancelExecutionResponse:
 
 @dataclass
 class Channel:
+    """Configures the channel name and DBSQL version of the warehouse. CHANNEL_NAME_CUSTOM should be
+    chosen only when `dbsql_version` is specified."""
+
     dbsql_version: Optional[str] = None
 
     name: Optional[ChannelName] = None
@@ -499,7 +502,6 @@ class ChannelName(Enum):
     CHANNEL_NAME_CURRENT = 'CHANNEL_NAME_CURRENT'
     CHANNEL_NAME_CUSTOM = 'CHANNEL_NAME_CUSTOM'
     CHANNEL_NAME_PREVIEW = 'CHANNEL_NAME_PREVIEW'
-    CHANNEL_NAME_PREVIOUS = 'CHANNEL_NAME_PREVIOUS'
     CHANNEL_NAME_UNSPECIFIED = 'CHANNEL_NAME_UNSPECIFIED'
 
 
@@ -827,7 +829,8 @@ class CreateWarehouseRequest:
     """The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries)
     before it is automatically stopped.
     
-    Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
+    Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins for
+    non-serverless warehouses - 0 indicates no autostop.
     
     Defaults to 120 mins"""
 
@@ -6866,7 +6869,8 @@ def create(
           The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it
           is automatically stopped.
           
-          Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
+          Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins for
+          non-serverless warehouses - 0 indicates no autostop.
           
           Defaults to 120 mins
         :param channel: :class:`Channel` (optional)
diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py
index 6c12c6039..7c8bfbd5e 100755
--- a/databricks/sdk/service/workspace.py
+++ b/databricks/sdk/service/workspace.py
@@ -1862,8 +1862,8 @@ def list(self,
              path_prefix: Optional[str] = None) -> Iterator[RepoInfo]:
         """Get repos.
         
-        Returns repos that the calling user has Manage permissions on. Results are paginated with each page
-        containing twenty repos.
+        Returns repos that the calling user has Manage permissions on. Use `next_page_token` to iterate
+        through additional pages.
         
         :param next_page_token: str (optional)
           Token used to get the next page of results. If not specified, returns the first page of results as
diff --git a/docs/account/settings/disable_legacy_features.rst b/docs/account/settings/disable_legacy_features.rst
new file mode 100644
index 000000000..d7f1db9d3
--- /dev/null
+++ b/docs/account/settings/disable_legacy_features.rst
@@ -0,0 +1,60 @@
+``a.settings.disable_legacy_features``: Disable Legacy Features
+===============================================================
+.. currentmodule:: databricks.sdk.service.settings
+
+.. py:class:: DisableLegacyFeaturesAPI
+
+    Disable legacy features for new Databricks workspaces.
+    
+    For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be
+    provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions
+    prior to 13.3LTS.
+
+    .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyFeaturesResponse
+
+        Delete the disable legacy features setting.
+        
+        Deletes the disable legacy features setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteDisableLegacyFeaturesResponse`
+        
+
+    .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyFeatures
+
+        Get the disable legacy features setting.
+        
+        Gets the value of the disable legacy features setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DisableLegacyFeatures`
+        
+
+    .. py:method:: update(allow_missing: bool, setting: DisableLegacyFeatures, field_mask: str) -> DisableLegacyFeatures
+
+        Update the disable legacy features setting.
+        
+        Updates the value of the disable legacy features setting.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`DisableLegacyFeatures`
+        :param field_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        
+        :returns: :class:`DisableLegacyFeatures`
+        
\ No newline at end of file
diff --git a/docs/account/settings/index.rst b/docs/account/settings/index.rst
index 2c53b1afa..abf97c6a0 100644
--- a/docs/account/settings/index.rst
+++ b/docs/account/settings/index.rst
@@ -11,5 +11,6 @@ Manage security settings for Accounts and Workspaces
    network_connectivity
    settings
    csp_enablement_account
+   disable_legacy_features
    esm_enablement_account
    personal_compute
\ No newline at end of file
diff --git a/docs/account/settings/settings.rst b/docs/account/settings/settings.rst
index 9ef26a1ee..3df647279 100644
--- a/docs/account/settings/settings.rst
+++ b/docs/account/settings/settings.rst
@@ -16,6 +16,15 @@
         This settings can be disabled so that new workspaces do not have compliance security profile enabled by
         default.
 
+    .. py:property:: disable_legacy_features
+        :type: DisableLegacyFeaturesAPI
+
+        Disable legacy features for new Databricks workspaces.
+        
+        For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be
+        provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions
+        prior to 13.3LTS.
+
     .. py:property:: esm_enablement_account
         :type: EsmEnablementAccountAPI
 
diff --git a/docs/dbdataclasses/apps.rst b/docs/dbdataclasses/apps.rst
index 827a563b8..2d522c625 100644
--- a/docs/dbdataclasses/apps.rst
+++ b/docs/dbdataclasses/apps.rst
@@ -34,15 +34,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: AppDeploymentState
 
+   .. py:attribute:: CANCELLED
+      :value: "CANCELLED"
+
    .. py:attribute:: FAILED
       :value: "FAILED"
 
    .. py:attribute:: IN_PROGRESS
       :value: "IN_PROGRESS"
 
-   .. py:attribute:: STOPPED
-      :value: "STOPPED"
-
    .. py:attribute:: SUCCEEDED
       :value: "SUCCEEDED"
 
@@ -76,30 +76,117 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. py:class:: AppState
+.. autoclass:: AppResource
+   :members:
+   :undoc-members:
 
-   .. py:attribute:: CREATING
-      :value: "CREATING"
+.. autoclass:: AppResourceJob
+   :members:
+   :undoc-members:
 
-   .. py:attribute:: DELETED
-      :value: "DELETED"
+.. py:class:: AppResourceJobJobPermission
 
-   .. py:attribute:: DELETING
-      :value: "DELETING"
+   .. py:attribute:: CAN_MANAGE
+      :value: "CAN_MANAGE"
 
-   .. py:attribute:: ERROR
-      :value: "ERROR"
+   .. py:attribute:: CAN_MANAGE_RUN
+      :value: "CAN_MANAGE_RUN"
 
-   .. py:attribute:: IDLE
-      :value: "IDLE"
+   .. py:attribute:: CAN_VIEW
+      :value: "CAN_VIEW"
+
+   .. py:attribute:: IS_OWNER
+      :value: "IS_OWNER"
+
+.. autoclass:: AppResourceSecret
+   :members:
+   :undoc-members:
+
+.. py:class:: AppResourceSecretSecretPermission
+
+   Permission to grant on the secret scope. Supported permissions are: "READ", "WRITE", "MANAGE".
+
+   .. py:attribute:: MANAGE
+      :value: "MANAGE"
+
+   .. py:attribute:: READ
+      :value: "READ"
+
+   .. py:attribute:: WRITE
+      :value: "WRITE"
+
+.. autoclass:: AppResourceServingEndpoint
+   :members:
+   :undoc-members:
+
+.. py:class:: AppResourceServingEndpointServingEndpointPermission
+
+   .. py:attribute:: CAN_MANAGE
+      :value: "CAN_MANAGE"
+
+   .. py:attribute:: CAN_QUERY
+      :value: "CAN_QUERY"
+
+   .. py:attribute:: CAN_VIEW
+      :value: "CAN_VIEW"
+
+.. autoclass:: AppResourceSqlWarehouse
+   :members:
+   :undoc-members:
+
+.. py:class:: AppResourceSqlWarehouseSqlWarehousePermission
+
+   .. py:attribute:: CAN_MANAGE
+      :value: "CAN_MANAGE"
+
+   .. py:attribute:: CAN_USE
+      :value: "CAN_USE"
+
+   .. py:attribute:: IS_OWNER
+      :value: "IS_OWNER"
+
+.. py:class:: ApplicationState
+
+   .. py:attribute:: CRASHED
+      :value: "CRASHED"
+
+   .. py:attribute:: DEPLOYING
+      :value: "DEPLOYING"
 
    .. py:attribute:: RUNNING
       :value: "RUNNING"
 
+   .. py:attribute:: UNAVAILABLE
+      :value: "UNAVAILABLE"
+
+.. autoclass:: ApplicationStatus
+   :members:
+   :undoc-members:
+
+.. py:class:: ComputeState
+
+   .. py:attribute:: ACTIVE
+      :value: "ACTIVE"
+
+   .. py:attribute:: DELETING
+      :value: "DELETING"
+
+   .. py:attribute:: ERROR
+      :value: "ERROR"
+
    .. py:attribute:: STARTING
       :value: "STARTING"
 
-.. autoclass:: AppStatus
+   .. py:attribute:: STOPPED
+      :value: "STOPPED"
+
+   .. py:attribute:: STOPPING
+      :value: "STOPPING"
+
+   .. py:attribute:: UPDATING
+      :value: "UPDATING"
+
+.. autoclass:: ComputeStatus
    :members:
    :undoc-members:
 
@@ -111,10 +198,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: DeleteResponse
-   :members:
-   :undoc-members:
-
 .. autoclass:: GetAppPermissionLevelsResponse
    :members:
    :undoc-members:
@@ -135,10 +218,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: StopAppResponse
-   :members:
-   :undoc-members:
-
 .. autoclass:: UpdateAppRequest
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst
index 4f9c651d2..b0f4f838e 100644
--- a/docs/dbdataclasses/catalog.rst
+++ b/docs/dbdataclasses/catalog.rst
@@ -65,6 +65,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: AwsCredentials
+   :members:
+   :undoc-members:
+
 .. autoclass:: AwsIamRoleRequest
    :members:
    :undoc-members:
@@ -85,6 +89,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: AzureUserDelegationSas
+   :members:
+   :undoc-members:
+
 .. autoclass:: CancelRefreshResponse
    :members:
    :undoc-members:
@@ -261,6 +269,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CONNECTION_GLUE
       :value: "CONNECTION_GLUE"
 
+   .. py:attribute:: CONNECTION_HTTP_BEARER
+      :value: "CONNECTION_HTTP_BEARER"
+
    .. py:attribute:: CONNECTION_MYSQL
       :value: "CONNECTION_MYSQL"
 
@@ -298,6 +309,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: HIVE_METASTORE
       :value: "HIVE_METASTORE"
 
+   .. py:attribute:: HTTP
+      :value: "HTTP"
+
    .. py:attribute:: MYSQL
       :value: "MYSQL"
 
@@ -421,6 +435,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
    The type of credential.
 
+   .. py:attribute:: BEARER_TOKEN
+      :value: "BEARER_TOKEN"
+
    .. py:attribute:: USERNAME_PASSWORD
       :value: "USERNAME_PASSWORD"
 
@@ -662,6 +679,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: PARAM
       :value: "PARAM"
 
+.. autoclass:: GcpOauthToken
+   :members:
+   :undoc-members:
+
+.. autoclass:: GenerateTemporaryTableCredentialRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: GenerateTemporaryTableCredentialResponse
+   :members:
+   :undoc-members:
+
 .. py:class:: GetBindingsSecurableType
 
    .. py:attribute:: CATALOG
@@ -1176,6 +1205,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: R2Credentials
+   :members:
+   :undoc-members:
+
 .. autoclass:: RegenerateDashboardRequest
    :members:
    :undoc-members:
@@ -1304,6 +1337,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. py:class:: TableOperation
+
+   .. py:attribute:: READ
+      :value: "READ"
+
+   .. py:attribute:: READ_WRITE
+      :value: "READ_WRITE"
+
 .. autoclass:: TableRowFilter
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst
index f4e175920..0066f0374 100644
--- a/docs/dbdataclasses/compute.rst
+++ b/docs/dbdataclasses/compute.rst
@@ -1022,7 +1022,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: RuntimeEngine
 
-   Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime engine is inferred from spark_version.
+   Determines the cluster's runtime engine, either standard or Photon.
+   This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+   If left unspecified, the runtime engine defaults to standard unless the spark_version contains -photon-, in which case Photon will be used.
 
    .. py:attribute:: NULL
       :value: "NULL"
diff --git a/docs/dbdataclasses/dashboards.rst b/docs/dbdataclasses/dashboards.rst
index 8765ee695..192095548 100644
--- a/docs/dbdataclasses/dashboards.rst
+++ b/docs/dbdataclasses/dashboards.rst
@@ -207,7 +207,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: MessageStatus
 
-   MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data sources. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message status will stay in the `EXECUTING_QUERY` until a client calls [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message processing is completed. Results are in the `attachments` field. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user needs to execute the query again. * `CANCELLED`: Message has been cancelled.
+   MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message status will stay in the `EXECUTING_QUERY` until a client calls [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message processing is completed. Results are in the `attachments` field. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user needs to execute the query again. * `CANCELLED`: Message has been cancelled.
 
    .. py:attribute:: ASKING_AI
       :value: "ASKING_AI"
@@ -227,6 +227,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: FETCHING_METADATA
       :value: "FETCHING_METADATA"
 
+   .. py:attribute:: FILTERING_CONTEXT
+      :value: "FILTERING_CONTEXT"
+
    .. py:attribute:: QUERY_RESULT_EXPIRED
       :value: "QUERY_RESULT_EXPIRED"
 
diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst
index b1b05ec18..3aa0db043 100644
--- a/docs/dbdataclasses/jobs.rst
+++ b/docs/dbdataclasses/jobs.rst
@@ -601,11 +601,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: RunResultState
 
-   A value indicating the run's result. The possible values are: * `SUCCESS`: The task completed successfully. * `FAILED`: The task completed with an error. * `TIMEDOUT`: The run was stopped after reaching the timeout. * `CANCELED`: The run was canceled at user request. * `MAXIMUM_CONCURRENT_RUNS_REACHED`: The run was skipped because the maximum concurrent runs were reached. * `EXCLUDED`: The run was skipped because the necessary conditions were not met. * `SUCCESS_WITH_FAILURES`: The job run completed successfully with some failures; leaf tasks were successful. * `UPSTREAM_FAILED`: The run was skipped because of an upstream failure. * `UPSTREAM_CANCELED`: The run was skipped because an upstream task was canceled.
+   A value indicating the run's result. The possible values are: * `SUCCESS`: The task completed successfully. * `FAILED`: The task completed with an error. * `TIMEDOUT`: The run was stopped after reaching the timeout. * `CANCELED`: The run was canceled at user request. * `MAXIMUM_CONCURRENT_RUNS_REACHED`: The run was skipped because the maximum concurrent runs were reached. * `EXCLUDED`: The run was skipped because the necessary conditions were not met. * `SUCCESS_WITH_FAILURES`: The job run completed successfully with some failures; leaf tasks were successful. * `UPSTREAM_FAILED`: The run was skipped because of an upstream failure. * `UPSTREAM_CANCELED`: The run was skipped because an upstream task was canceled. * `DISABLED`: The run was skipped because it was disabled explicitly by the user.
 
    .. py:attribute:: CANCELED
       :value: "CANCELED"
 
+   .. py:attribute:: DISABLED
+      :value: "DISABLED"
+
    .. py:attribute:: EXCLUDED
       :value: "EXCLUDED"
 
@@ -796,7 +799,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: TerminationCodeCode
 
-   The code indicates why the run was terminated. Additional codes might be introduced in future releases. * `SUCCESS`: The run was completed successfully. * `CANCELED`: The run was canceled during execution by the Databricks platform; for example, if the maximum run duration was exceeded. * `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the dependency type condition was not met, or there were no material tasks to execute. * `INTERNAL_ERROR`: The run encountered an unexpected error. Refer to the state message for further details. * `DRIVER_ERROR`: The run encountered an error while communicating with the Spark Driver. * `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state message for further details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due to an error when communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The run failed because it issued an invalid request to start the cluster. * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The workspace has reached the quota for the maximum number of concurrent active runs. Consider scheduling the runs over a larger time frame. * `FEATURE_DISABLED`: The run failed because it tried to access a feature unavailable for the workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The number of cluster creation, start, and upsize requests have exceeded the allotted rate limit. Consider spreading the run execution over a larger time frame. * `STORAGE_ACCESS_ERROR`: The run failed due to an error when accessing the customer blob storage. Refer to the state message for further details. * `RUN_EXECUTION_ERROR`: The run was completed with task failures. For more details, refer to the state message or run output. * `UNAUTHORIZED_ERROR`: The run failed due to a permission issue while accessing a resource. Refer to the state message for further details. * `LIBRARY_INSTALLATION_ERROR`: The run failed while installing the user-requested library. Refer to the state message for further details. The causes might include, but are not limited to: The provided library is invalid, there are insufficient permissions to install the library, and so forth. * `MAX_CONCURRENT_RUNS_EXCEEDED`: The scheduled run exceeds the limit of maximum concurrent runs set for the job. * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a cluster that has already reached the maximum number of contexts it is configured to create. See: [Link]. * `RESOURCE_NOT_FOUND`: A resource necessary for run execution does not exist. Refer to the state message for further details. * `INVALID_RUN_CONFIGURATION`: The run failed due to an invalid configuration. Refer to the state message for further details. * `CLOUD_FAILURE`: The run failed due to a cloud provider issue. Refer to the state message for further details. * `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size limit.
+   The code indicates why the run was terminated. Additional codes might be introduced in future releases. * `SUCCESS`: The run was completed successfully. * `USER_CANCELED`: The run was successfully canceled during execution by a user. * `CANCELED`: The run was canceled during execution by the Databricks platform; for example, if the maximum run duration was exceeded. * `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the dependency type condition was not met, or there were no material tasks to execute. * `INTERNAL_ERROR`: The run encountered an unexpected error. Refer to the state message for further details. * `DRIVER_ERROR`: The run encountered an error while communicating with the Spark Driver. * `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state message for further details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due to an error when communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The run failed because it issued an invalid request to start the cluster. * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The workspace has reached the quota for the maximum number of concurrent active runs. Consider scheduling the runs over a larger time frame. * `FEATURE_DISABLED`: The run failed because it tried to access a feature unavailable for the workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The number of cluster creation, start, and upsize requests have exceeded the allotted rate limit. Consider spreading the run execution over a larger time frame. * `STORAGE_ACCESS_ERROR`: The run failed due to an error when accessing the customer blob storage. Refer to the state message for further details. * `RUN_EXECUTION_ERROR`: The run was completed with task failures. For more details, refer to the state message or run output. * `UNAUTHORIZED_ERROR`: The run failed due to a permission issue while accessing a resource. Refer to the state message for further details. * `LIBRARY_INSTALLATION_ERROR`: The run failed while installing the user-requested library. Refer to the state message for further details. The causes might include, but are not limited to: The provided library is invalid, there are insufficient permissions to install the library, and so forth. * `MAX_CONCURRENT_RUNS_EXCEEDED`: The scheduled run exceeds the limit of maximum concurrent runs set for the job. * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a cluster that has already reached the maximum number of contexts it is configured to create. See: [Link]. * `RESOURCE_NOT_FOUND`: A resource necessary for run execution does not exist. Refer to the state message for further details. * `INVALID_RUN_CONFIGURATION`: The run failed due to an invalid configuration. Refer to the state message for further details. * `CLOUD_FAILURE`: The run failed due to a cloud provider issue. Refer to the state message for further details. * `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size limit.
    [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now
 
    .. py:attribute:: CANCELED
@@ -859,6 +862,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: UNAUTHORIZED_ERROR
       :value: "UNAUTHORIZED_ERROR"
 
+   .. py:attribute:: USER_CANCELED
+      :value: "USER_CANCELED"
+
    .. py:attribute:: WORKSPACE_RUN_LIMIT_EXCEEDED
       :value: "WORKSPACE_RUN_LIMIT_EXCEEDED"
 
diff --git a/docs/dbdataclasses/serving.rst b/docs/dbdataclasses/serving.rst
index 23ef3c257..3deefc873 100644
--- a/docs/dbdataclasses/serving.rst
+++ b/docs/dbdataclasses/serving.rst
@@ -8,6 +8,61 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: AiGatewayConfig
+   :members:
+   :undoc-members:
+
+.. autoclass:: AiGatewayGuardrailParameters
+   :members:
+   :undoc-members:
+
+.. autoclass:: AiGatewayGuardrailPiiBehavior
+   :members:
+   :undoc-members:
+
+.. py:class:: AiGatewayGuardrailPiiBehaviorBehavior
+
+   Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.
+
+   .. py:attribute:: BLOCK
+      :value: "BLOCK"
+
+   .. py:attribute:: NONE
+      :value: "NONE"
+
+.. autoclass:: AiGatewayGuardrails
+   :members:
+   :undoc-members:
+
+.. autoclass:: AiGatewayInferenceTableConfig
+   :members:
+   :undoc-members:
+
+.. autoclass:: AiGatewayRateLimit
+   :members:
+   :undoc-members:
+
+.. py:class:: AiGatewayRateLimitKey
+
+   Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.
+
+   .. py:attribute:: ENDPOINT
+      :value: "ENDPOINT"
+
+   .. py:attribute:: USER
+      :value: "USER"
+
+.. py:class:: AiGatewayRateLimitRenewalPeriod
+
+   Renewal period field for a rate limit. Currently, only 'minute' is supported.
+
+   .. py:attribute:: MINUTE
+      :value: "MINUTE"
+
+.. autoclass:: AiGatewayUsageTrackingConfig
+   :members:
+   :undoc-members:
+
 .. autoclass:: AmazonBedrockConfig
    :members:
    :undoc-members:
@@ -226,6 +281,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: PutAiGatewayResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: PutResponse
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst
index 0031512e7..12043e3c5 100644
--- a/docs/dbdataclasses/settings.rst
+++ b/docs/dbdataclasses/settings.rst
@@ -8,6 +8,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: BooleanMessage
+   :members:
+   :undoc-members:
+
 .. autoclass:: ClusterAutoRestartMessage
    :members:
    :undoc-members:
@@ -188,6 +192,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: DeleteDisableLegacyAccessResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: DeleteDisableLegacyDbfsResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: DeleteDisableLegacyFeaturesResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: DeleteNetworkConnectivityConfigurationResponse
    :members:
    :undoc-members:
@@ -221,6 +237,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: WEBHOOK
       :value: "WEBHOOK"
 
+.. autoclass:: DisableLegacyAccess
+   :members:
+   :undoc-members:
+
+.. autoclass:: DisableLegacyDbfs
+   :members:
+   :undoc-members:
+
+.. autoclass:: DisableLegacyFeatures
+   :members:
+   :undoc-members:
+
 .. autoclass:: EmailConfig
    :members:
    :undoc-members:
@@ -509,6 +537,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
    The type of token request. As of now, only `AZURE_ACTIVE_DIRECTORY_TOKEN` is supported.
 
+   .. py:attribute:: ARCLIGHT_AZURE_EXCHANGE_TOKEN
+      :value: "ARCLIGHT_AZURE_EXCHANGE_TOKEN"
+
    .. py:attribute:: AZURE_ACTIVE_DIRECTORY_TOKEN
       :value: "AZURE_ACTIVE_DIRECTORY_TOKEN"
 
@@ -528,6 +559,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: UpdateDisableLegacyAccessRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: UpdateDisableLegacyDbfsRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: UpdateDisableLegacyFeaturesRequest
+   :members:
+   :undoc-members:
+
 .. autoclass:: UpdateEnhancedSecurityMonitoringSettingRequest
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst
index 255123067..1657146c3 100644
--- a/docs/dbdataclasses/sql.rst
+++ b/docs/dbdataclasses/sql.rst
@@ -114,16 +114,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CHANNEL_NAME_PREVIEW
       :value: "CHANNEL_NAME_PREVIEW"
 
-   .. py:attribute:: CHANNEL_NAME_PREVIOUS
-      :value: "CHANNEL_NAME_PREVIOUS"
-
    .. py:attribute:: CHANNEL_NAME_UNSPECIFIED
       :value: "CHANNEL_NAME_UNSPECIFIED"
 
-.. autoclass:: ClientCallContext
-   :members:
-   :undoc-members:
-
 .. autoclass:: ColumnInfo
    :members:
    :undoc-members:
@@ -391,20 +384,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: EncodedText
-   :members:
-   :undoc-members:
-
-.. py:class:: EncodedTextEncoding
-
-   Carry text data in different form.
-
-   .. py:attribute:: BASE64
-      :value: "BASE64"
-
-   .. py:attribute:: PLAIN
-      :value: "PLAIN"
-
 .. autoclass:: EndpointConfPair
    :members:
    :undoc-members:
@@ -744,78 +723,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: QuerySource
-   :members:
-   :undoc-members:
-
-.. autoclass:: QuerySourceDriverInfo
-   :members:
-   :undoc-members:
-
-.. py:class:: QuerySourceEntryPoint
-
-   Spark service that received and processed the query
-
-   .. py:attribute:: DLT
-      :value: "DLT"
-
-   .. py:attribute:: SPARK_CONNECT
-      :value: "SPARK_CONNECT"
-
-   .. py:attribute:: THRIFT_SERVER
-      :value: "THRIFT_SERVER"
-
-.. py:class:: QuerySourceJobManager
-
-   Copied from elastic-spark-common/api/messages/manager.proto with enum values changed by 1 to accommodate JOB_MANAGER_UNSPECIFIED
-
-   .. py:attribute:: APP_SYSTEM_TABLE
-      :value: "APP_SYSTEM_TABLE"
-
-   .. py:attribute:: AUTOML
-      :value: "AUTOML"
-
-   .. py:attribute:: AUTO_MAINTENANCE
-      :value: "AUTO_MAINTENANCE"
-
-   .. py:attribute:: CLEAN_ROOMS
-      :value: "CLEAN_ROOMS"
-
-   .. py:attribute:: DATA_MONITORING
-      :value: "DATA_MONITORING"
-
-   .. py:attribute:: DATA_SHARING
-      :value: "DATA_SHARING"
-
-   .. py:attribute:: ENCRYPTION
-      :value: "ENCRYPTION"
-
-   .. py:attribute:: FABRIC_CRAWLER
-      :value: "FABRIC_CRAWLER"
-
-   .. py:attribute:: JOBS
-      :value: "JOBS"
-
-   .. py:attribute:: LAKEVIEW
-      :value: "LAKEVIEW"
-
-   .. py:attribute:: MANAGED_RAG
-      :value: "MANAGED_RAG"
-
-   .. py:attribute:: SCHEDULED_MV_REFRESH
-      :value: "SCHEDULED_MV_REFRESH"
-
-   .. py:attribute:: TESTING
-      :value: "TESTING"
-
-.. py:class:: QuerySourceTrigger
-
-   .. py:attribute:: MANUAL
-      :value: "MANUAL"
-
-   .. py:attribute:: SCHEDULED
-      :value: "SCHEDULED"
-
 .. py:class:: QueryStatementType
 
    .. py:attribute:: ALTER
@@ -950,10 +857,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: VIEWER
       :value: "VIEWER"
 
-.. autoclass:: ServerlessChannelInfo
-   :members:
-   :undoc-members:
-
 .. autoclass:: ServiceError
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/workspace.rst b/docs/dbdataclasses/workspace.rst
index eaf70f9e0..9ff3eb66b 100644
--- a/docs/dbdataclasses/workspace.rst
+++ b/docs/dbdataclasses/workspace.rst
@@ -23,7 +23,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: CreateCredentials
+.. autoclass:: CreateCredentialsRequest
    :members:
    :undoc-members:
 
@@ -31,7 +31,11 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: CreateRepo
+.. autoclass:: CreateRepoRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: CreateRepoResponse
    :members:
    :undoc-members:
 
@@ -59,6 +63,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: DeleteCredentialsResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: DeleteRepoResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: DeleteResponse
    :members:
    :undoc-members:
@@ -111,6 +123,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: GetRepoResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: GetSecretResponse
    :members:
    :undoc-members:
@@ -171,6 +187,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: ListCredentialsResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: ListReposResponse
    :members:
    :undoc-members:
@@ -306,15 +326,19 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: UpdateCredentials
+.. autoclass:: UpdateCredentialsRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: UpdateCredentialsResponse
    :members:
    :undoc-members:
 
-.. autoclass:: UpdateRepo
+.. autoclass:: UpdateRepoRequest
    :members:
    :undoc-members:
 
-.. autoclass:: UpdateResponse
+.. autoclass:: UpdateRepoResponse
    :members:
    :undoc-members:
 
diff --git a/docs/workspace/apps/apps.rst b/docs/workspace/apps/apps.rst
index 455bb81cc..774e75b8b 100644
--- a/docs/workspace/apps/apps.rst
+++ b/docs/workspace/apps/apps.rst
@@ -7,7 +7,7 @@
     Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend
     Databricks services, and enable users to interact through single sign-on.
 
-    .. py:method:: create(name: str [, description: Optional[str]]) -> Wait[App]
+    .. py:method:: create(name: str [, description: Optional[str], resources: Optional[List[AppResource]]]) -> Wait[App]
 
         Create an app.
         
@@ -18,16 +18,18 @@
           must be unique within the workspace.
         :param description: str (optional)
           The description of the app.
+        :param resources: List[:class:`AppResource`] (optional)
+          Resources for the app.
         
         :returns:
           Long-running operation waiter for :class:`App`.
-          See :method:wait_get_app_idle for more details.
+          See :method:wait_get_app_active for more details.
         
 
-    .. py:method:: create_and_wait(name: str [, description: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> App
+    .. py:method:: create_and_wait(name: str [, description: Optional[str], resources: Optional[List[AppResource]], timeout: datetime.timedelta = 0:20:00]) -> App
 
 
-    .. py:method:: delete(name: str)
+    .. py:method:: delete(name: str) -> App
 
         Delete an app.
         
@@ -36,10 +38,10 @@
         :param name: str
           The name of the app.
         
-        
+        :returns: :class:`App`
         
 
-    .. py:method:: deploy(app_name: str, source_code_path: str [, mode: Optional[AppDeploymentMode]]) -> Wait[AppDeployment]
+    .. py:method:: deploy(app_name: str [, deployment_id: Optional[str], mode: Optional[AppDeploymentMode], source_code_path: Optional[str]]) -> Wait[AppDeployment]
 
         Create an app deployment.
         
@@ -47,21 +49,23 @@
         
         :param app_name: str
           The name of the app.
-        :param source_code_path: str
+        :param deployment_id: str (optional)
+          The unique id of the deployment.
+        :param mode: :class:`AppDeploymentMode` (optional)
+          The mode of which the deployment will manage the source code.
+        :param source_code_path: str (optional)
           The workspace file system path of the source code used to create the app deployment. This is
           different from `deployment_artifacts.source_code_path`, which is the path used by the deployed app.
           The former refers to the original source code location of the app in the workspace during deployment
           creation, whereas the latter provides a system generated stable snapshotted source code path used by
           the deployment.
-        :param mode: :class:`AppDeploymentMode` (optional)
-          The mode of which the deployment will manage the source code.
         
         :returns:
           Long-running operation waiter for :class:`AppDeployment`.
           See :method:wait_get_deployment_app_succeeded for more details.
         
 
-    .. py:method:: deploy_and_wait(app_name: str, source_code_path: str [, mode: Optional[AppDeploymentMode], timeout: datetime.timedelta = 0:20:00]) -> AppDeployment
+    .. py:method:: deploy_and_wait(app_name: str [, deployment_id: Optional[str], mode: Optional[AppDeploymentMode], source_code_path: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> AppDeployment
 
 
     .. py:method:: get(name: str) -> App
@@ -157,7 +161,7 @@
         :returns: :class:`AppPermissions`
         
 
-    .. py:method:: start(name: str) -> Wait[AppDeployment]
+    .. py:method:: start(name: str) -> Wait[App]
 
         Start an app.
         
@@ -167,14 +171,14 @@
           The name of the app.
         
         :returns:
-          Long-running operation waiter for :class:`AppDeployment`.
-          See :method:wait_get_deployment_app_succeeded for more details.
+          Long-running operation waiter for :class:`App`.
+          See :method:wait_get_app_active for more details.
         
 
-    .. py:method:: start_and_wait(name: str, timeout: datetime.timedelta = 0:20:00) -> AppDeployment
+    .. py:method:: start_and_wait(name: str, timeout: datetime.timedelta = 0:20:00) -> App
 
 
-    .. py:method:: stop(name: str)
+    .. py:method:: stop(name: str) -> Wait[App]
 
         Stop an app.
         
@@ -183,10 +187,15 @@
         :param name: str
           The name of the app.
         
-        
+        :returns:
+          Long-running operation waiter for :class:`App`.
+          See :method:wait_get_app_stopped for more details.
         
 
-    .. py:method:: update(name: str [, description: Optional[str]]) -> App
+    .. py:method:: stop_and_wait(name: str, timeout: datetime.timedelta = 0:20:00) -> App
+
+
+    .. py:method:: update(name: str [, description: Optional[str], resources: Optional[List[AppResource]]]) -> App
 
         Update an app.
         
@@ -197,6 +206,8 @@
           must be unique within the workspace.
         :param description: str (optional)
           The description of the app.
+        :param resources: List[:class:`AppResource`] (optional)
+          Resources for the app.
         
         :returns: :class:`App`
         
@@ -214,7 +225,10 @@
         :returns: :class:`AppPermissions`
         
 
-    .. py:method:: wait_get_app_idle(name: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[App], None]]) -> App
+    .. py:method:: wait_get_app_active(name: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[App], None]]) -> App
+
+
+    .. py:method:: wait_get_app_stopped(name: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[App], None]]) -> App
 
 
     .. py:method:: wait_get_deployment_app_succeeded(app_name: str, deployment_id: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[AppDeployment], None]]) -> AppDeployment
diff --git a/docs/workspace/catalog/index.rst b/docs/workspace/catalog/index.rst
index 3bf2522d8..1372ca5a1 100644
--- a/docs/workspace/catalog/index.rst
+++ b/docs/workspace/catalog/index.rst
@@ -24,5 +24,6 @@ Configure data governance with Unity Catalog for metastores, catalogs, schemas,
    system_schemas
    table_constraints
    tables
+   temporary_table_credentials
    volumes
    workspace_bindings
\ No newline at end of file
diff --git a/docs/workspace/catalog/tables.rst b/docs/workspace/catalog/tables.rst
index 6249f0da1..4cb458b46 100644
--- a/docs/workspace/catalog/tables.rst
+++ b/docs/workspace/catalog/tables.rst
@@ -45,7 +45,7 @@
         :returns: :class:`TableExistsResponse`
         
 
-    .. py:method:: get(full_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool]]) -> TableInfo
+    .. py:method:: get(full_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool], include_manifest_capabilities: Optional[bool]]) -> TableInfo
 
 
         Usage:
@@ -94,11 +94,13 @@
           for
         :param include_delta_metadata: bool (optional)
           Whether delta metadata should be included in the response.
+        :param include_manifest_capabilities: bool (optional)
+          Whether to include a manifest containing capabilities the table has.
         
         :returns: :class:`TableInfo`
         
 
-    .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool], max_results: Optional[int], omit_columns: Optional[bool], omit_properties: Optional[bool], page_token: Optional[str]]) -> Iterator[TableInfo]
+    .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool], include_manifest_capabilities: Optional[bool], max_results: Optional[int], omit_columns: Optional[bool], omit_properties: Optional[bool], page_token: Optional[str]]) -> Iterator[TableInfo]
 
 
         Usage:
@@ -138,6 +140,8 @@
           for
         :param include_delta_metadata: bool (optional)
           Whether delta metadata should be included in the response.
+        :param include_manifest_capabilities: bool (optional)
+          Whether to include a manifest containing capabilities the table has.
         :param max_results: int (optional)
           Maximum number of tables to return. If not set, all the tables are returned (not recommended). -
           when set to a value greater than 0, the page length is the minimum of this value and a server
@@ -153,7 +157,7 @@
         :returns: Iterator over :class:`TableInfo`
         
 
-    .. py:method:: list_summaries(catalog_name: str [, max_results: Optional[int], page_token: Optional[str], schema_name_pattern: Optional[str], table_name_pattern: Optional[str]]) -> Iterator[TableSummary]
+    .. py:method:: list_summaries(catalog_name: str [, include_manifest_capabilities: Optional[bool], max_results: Optional[int], page_token: Optional[str], schema_name_pattern: Optional[str], table_name_pattern: Optional[str]]) -> Iterator[TableSummary]
 
 
         Usage:
@@ -192,6 +196,8 @@
         
         :param catalog_name: str
           Name of parent catalog for tables of interest.
+        :param include_manifest_capabilities: bool (optional)
+          Whether to include a manifest containing capabilities the table has.
         :param max_results: int (optional)
           Maximum number of summaries for tables to return. If not set, the page length is set to a server
           configured value (10000, as of 1/5/2024). - when set to a value greater than 0, the page length is
diff --git a/docs/workspace/catalog/temporary_table_credentials.rst b/docs/workspace/catalog/temporary_table_credentials.rst
new file mode 100644
index 000000000..1acd462b7
--- /dev/null
+++ b/docs/workspace/catalog/temporary_table_credentials.rst
@@ -0,0 +1,36 @@
+``w.temporary_table_credentials``: Temporary Table Credentials
+==============================================================
+.. currentmodule:: databricks.sdk.service.catalog
+
+.. py:class:: TemporaryTableCredentialsAPI
+
+    Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage
+    locationswhere table data is stored in Databricks. These credentials are employed to provide secure and
+    time-limitedaccess to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud provider
+    has its own typeof credentials: AWS uses temporary session tokens via AWS Security Token Service (STS),
+    Azure utilizesShared Access Signatures (SAS) for its data storage services, and Google Cloud supports
+    temporary credentialsthrough OAuth 2.0.Temporary table credentials ensure that data access is limited in
+    scope and duration, reducing the risk ofunauthorized access or misuse. To use the temporary table
+    credentials API, a metastore admin needs to enable the external_access_enabled flag (off by default) at
+    the metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema level
+    by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by
+    catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for
+    security reason.
+
+    .. py:method:: generate_temporary_table_credentials( [, operation: Optional[TableOperation], table_id: Optional[str]]) -> GenerateTemporaryTableCredentialResponse
+
+        Generate a temporary table credential.
+        
+        Get a short-lived credential for directly accessing the table data on cloud storage. The metastore
+        must have external_access_enabled flag set to true (default false). The caller must have
+        EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog
+        owners.
+        
+        :param operation: :class:`TableOperation` (optional)
+          The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is
+          specified, the credentials returned will have write permissions, otherwise, it will be read only.
+        :param table_id: str (optional)
+          UUID of the table to read or write.
+        
+        :returns: :class:`GenerateTemporaryTableCredentialResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst
index 601b55812..ac52edecb 100644
--- a/docs/workspace/compute/clusters.rst
+++ b/docs/workspace/compute/clusters.rst
@@ -107,6 +107,11 @@
         If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will succeed.
         Otherwise the cluster will terminate with an informative error message.
         
+        Rather than authoring the cluster's JSON definition from scratch, Databricks recommends filling out
+        the [create compute UI] and then copying the generated JSON definition from the UI.
+        
+        [create compute UI]: https://docs.databricks.com/compute/configure.html
+        
         :param spark_version: str
           The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be
           retrieved by using the :method:clusters/sparkVersions API call.
@@ -202,8 +207,13 @@
         :param policy_id: str (optional)
           The ID of the cluster policy used to create the cluster if applicable.
         :param runtime_engine: :class:`RuntimeEngine` (optional)
-          Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime engine
-          is inferred from spark_version.
+          Determines the cluster's runtime engine, either standard or Photon.
+          
+          This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+          `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+          
+          If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+          -photon-, in which case Photon will be used.
         :param single_user_name: str (optional)
           Single user name if data_security_mode is `SINGLE_USER`
         :param spark_conf: Dict[str,str] (optional)
@@ -425,8 +435,13 @@
         :param policy_id: str (optional)
           The ID of the cluster policy used to create the cluster if applicable.
         :param runtime_engine: :class:`RuntimeEngine` (optional)
-          Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime engine
-          is inferred from spark_version.
+          Determines the cluster's runtime engine, either standard or Photon.
+          
+          This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+          `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+          
+          If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+          -photon-, in which case Photon will be used.
         :param single_user_name: str (optional)
           Single user name if data_security_mode is `SINGLE_USER`
         :param spark_conf: Dict[str,str] (optional)
diff --git a/docs/workspace/dashboards/lakeview.rst b/docs/workspace/dashboards/lakeview.rst
index 92aa8c0e3..fe358063c 100644
--- a/docs/workspace/dashboards/lakeview.rst
+++ b/docs/workspace/dashboards/lakeview.rst
@@ -20,7 +20,11 @@
           slash. This field is excluded in List Dashboards responses.
         :param serialized_dashboard: str (optional)
           The contents of the dashboard in serialized string form. This field is excluded in List Dashboards
-          responses.
+          responses. Use the [get dashboard API] to retrieve an example response, which includes the
+          `serialized_dashboard` field. This field provides the structure of the JSON string that represents
+          the dashboard's layout and components.
+          
+          [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get
         :param warehouse_id: str (optional)
           The warehouse ID used to run the dashboard.
         
@@ -261,7 +265,11 @@
           not been modified since the last read. This field is excluded in List Dashboards responses.
         :param serialized_dashboard: str (optional)
           The contents of the dashboard in serialized string form. This field is excluded in List Dashboards
-          responses.
+          responses. Use the [get dashboard API] to retrieve an example response, which includes the
+          `serialized_dashboard` field. This field provides the structure of the JSON string that represents
+          the dashboard's layout and components.
+          
+          [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get
         :param warehouse_id: str (optional)
           The warehouse ID used to run the dashboard.
         
diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst
index c07c8e28e..3c6e0f2e4 100644
--- a/docs/workspace/jobs/jobs.rst
+++ b/docs/workspace/jobs/jobs.rst
@@ -174,7 +174,10 @@
           An optional set of email addresses that is notified when runs of this job begin or complete as well
           as when this job is deleted.
         :param environments: List[:class:`JobEnvironment`] (optional)
-          A list of task execution environment specifications that can be referenced by tasks of this job.
+          A list of task execution environment specifications that can be referenced by serverless tasks of
+          this job. An environment is required to be present for serverless tasks. For serverless notebook
+          tasks, the environment is accessible in the notebook environment panel. For other serverless tasks,
+          the task environment is required to be specified using environment_key in the task settings.
         :param format: :class:`Format` (optional)
           Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. When
           using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`.
@@ -211,12 +214,11 @@
         :param queue: :class:`QueueSettings` (optional)
           The queue settings of the job.
         :param run_as: :class:`JobRunAs` (optional)
-          Write-only setting, available only in Create/Update/Reset and Submit calls. Specifies the user or
-          service principal that the job runs as. If not specified, the job runs as the user who created the
-          job.
+          Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If
+          not specified, the job/pipeline runs as the user who created the job/pipeline.
           
-          Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
-          thrown.
+          Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, an
+          error is thrown.
         :param schedule: :class:`CronSchedule` (optional)
           An optional periodic schedule for this job. The default behavior is that the job only runs when
           triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.
@@ -679,6 +681,7 @@
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
           [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
         :param pipeline_params: :class:`PipelineParams` (optional)
+          Controls whether the pipeline should perform a full refresh
         :param python_named_params: Dict[str,str] (optional)
         :param python_params: List[str] (optional)
           A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
@@ -868,6 +871,7 @@
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
           [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
         :param pipeline_params: :class:`PipelineParams` (optional)
+          Controls whether the pipeline should perform a full refresh
         :param python_named_params: Dict[str,str] (optional)
         :param python_params: List[str] (optional)
           A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst
index ce98ac5d4..9801a200e 100644
--- a/docs/workspace/pipelines/pipelines.rst
+++ b/docs/workspace/pipelines/pipelines.rst
@@ -15,7 +15,7 @@
     also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected
     data quality and specify how to handle records that fail those expectations.
 
-    .. py:method:: create( [, allow_duplicate_names: Optional[bool], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse
+    .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse
 
 
         Usage:
@@ -55,6 +55,8 @@
         
         :param allow_duplicate_names: bool (optional)
           If false, deployment will fail if name conflicts with that of another pipeline.
+        :param budget_policy_id: str (optional)
+          Budget policy of this pipeline.
         :param catalog: str (optional)
           A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables
           in this pipeline are published to a `target` schema inside `catalog` (for example,
@@ -91,6 +93,9 @@
           List of notification settings for this pipeline.
         :param photon: bool (optional)
           Whether Photon is enabled for this pipeline.
+        :param schema: str (optional)
+          The default schema (database) where tables are read from or published to. The presence of this field
+          implies that the pipeline is in direct publishing mode.
         :param serverless: bool (optional)
           Whether serverless compute is enabled for this pipeline.
         :param storage: str (optional)
@@ -371,7 +376,7 @@
     .. py:method:: stop_and_wait(pipeline_id: str, timeout: datetime.timedelta = 0:20:00) -> GetPipelineResponse
 
 
-    .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]])
+    .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]])
 
 
         Usage:
@@ -425,6 +430,8 @@
           Unique identifier for this pipeline.
         :param allow_duplicate_names: bool (optional)
           If false, deployment will fail if name has changed and conflicts the name of another pipeline.
+        :param budget_policy_id: str (optional)
+          Budget policy of this pipeline.
         :param catalog: str (optional)
           A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables
           in this pipeline are published to a `target` schema inside `catalog` (for example,
@@ -463,6 +470,9 @@
           List of notification settings for this pipeline.
         :param photon: bool (optional)
           Whether Photon is enabled for this pipeline.
+        :param schema: str (optional)
+          The default schema (database) where tables are read from or published to. The presence of this field
+          implies that the pipeline is in direct publishing mode.
         :param serverless: bool (optional)
           Whether serverless compute is enabled for this pipeline.
         :param storage: str (optional)
diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst
index 9244f333a..8e21197a1 100644
--- a/docs/workspace/serving/serving_endpoints.rst
+++ b/docs/workspace/serving/serving_endpoints.rst
@@ -29,7 +29,7 @@
         :returns: :class:`BuildLogsResponse`
         
 
-    .. py:method:: create(name: str, config: EndpointCoreConfigInput [, rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]]]) -> Wait[ServingEndpointDetailed]
+    .. py:method:: create(name: str, config: EndpointCoreConfigInput [, ai_gateway: Optional[AiGatewayConfig], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]]]) -> Wait[ServingEndpointDetailed]
 
         Create a new serving endpoint.
         
@@ -38,9 +38,12 @@
           workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.
         :param config: :class:`EndpointCoreConfigInput`
           The core config of the serving endpoint.
+        :param ai_gateway: :class:`AiGatewayConfig` (optional)
+          The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
+          supported as of now.
         :param rate_limits: List[:class:`RateLimit`] (optional)
-          Rate limits to be applied to the serving endpoint. NOTE: only external and foundation model
-          endpoints are supported as of now.
+          Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
+          Gateway to manage rate limits.
         :param route_optimized: bool (optional)
           Enable route optimization for the serving endpoint.
         :param tags: List[:class:`EndpointTag`] (optional)
@@ -51,7 +54,7 @@
           See :method:wait_get_serving_endpoint_not_updating for more details.
         
 
-    .. py:method:: create_and_wait(name: str, config: EndpointCoreConfigInput [, rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]], timeout: datetime.timedelta = 0:20:00]) -> ServingEndpointDetailed
+    .. py:method:: create_and_wait(name: str, config: EndpointCoreConfigInput [, ai_gateway: Optional[AiGatewayConfig], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]], timeout: datetime.timedelta = 0:20:00]) -> ServingEndpointDetailed
 
 
     .. py:method:: delete(name: str)
@@ -168,8 +171,8 @@
 
         Update rate limits of a serving endpoint.
         
-        Used to update the rate limits of a serving endpoint. NOTE: only external and foundation model
-        endpoints are supported as of now.
+        Used to update the rate limits of a serving endpoint. NOTE: Only foundation model endpoints are
+        currently supported. For external models, use AI Gateway to manage rate limits.
         
         :param name: str
           The name of the serving endpoint whose rate limits are being updated. This field is required.
@@ -179,6 +182,29 @@
         :returns: :class:`PutResponse`
         
 
+    .. py:method:: put_ai_gateway(name: str [, guardrails: Optional[AiGatewayGuardrails], inference_table_config: Optional[AiGatewayInferenceTableConfig], rate_limits: Optional[List[AiGatewayRateLimit]], usage_tracking_config: Optional[AiGatewayUsageTrackingConfig]]) -> PutAiGatewayResponse
+
+        Update AI Gateway of a serving endpoint.
+        
+        Used to update the AI Gateway of a serving endpoint. NOTE: Only external model endpoints are currently
+        supported.
+        
+        :param name: str
+          The name of the serving endpoint whose AI Gateway is being updated. This field is required.
+        :param guardrails: :class:`AiGatewayGuardrails` (optional)
+          Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.
+        :param inference_table_config: :class:`AiGatewayInferenceTableConfig` (optional)
+          Configuration for payload logging using inference tables. Use these tables to monitor and audit data
+          being sent to and received from model APIs and to improve model quality.
+        :param rate_limits: List[:class:`AiGatewayRateLimit`] (optional)
+          Configuration for rate limits which can be set to limit endpoint traffic.
+        :param usage_tracking_config: :class:`AiGatewayUsageTrackingConfig` (optional)
+          Configuration to enable usage tracking using system tables. These tables allow you to monitor
+          operational usage on endpoints and their associated costs.
+        
+        :returns: :class:`PutAiGatewayResponse`
+        
+
     .. py:method:: query(name: str [, dataframe_records: Optional[List[Any]], dataframe_split: Optional[DataframeSplitInput], extra_params: Optional[Dict[str, str]], input: Optional[Any], inputs: Optional[Any], instances: Optional[List[Any]], max_tokens: Optional[int], messages: Optional[List[ChatMessage]], n: Optional[int], prompt: Optional[Any], stop: Optional[List[str]], stream: Optional[bool], temperature: Optional[float]]) -> QueryEndpointResponse
 
         Query a serving endpoint.
diff --git a/docs/workspace/settings/disable_legacy_access.rst b/docs/workspace/settings/disable_legacy_access.rst
new file mode 100644
index 000000000..c8baba3a7
--- /dev/null
+++ b/docs/workspace/settings/disable_legacy_access.rst
@@ -0,0 +1,61 @@
+``w.settings.disable_legacy_access``: Disable Legacy Access
+===========================================================
+.. currentmodule:: databricks.sdk.service.settings
+
+.. py:class:: DisableLegacyAccessAPI
+
+    'Disabling legacy access' has the following impacts:
+    
+    1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore through HMS
+    Federation. 2. Disables Fallback Mode (docs link) on any External Location access from the workspace. 3.
+    Alters DBFS path access to use External Location permissions in place of legacy credentials. 4. Enforces
+    Unity Catalog access on all path based access.
+
+    .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyAccessResponse
+
+        Delete Legacy Access Disablement Status.
+        
+        Deletes legacy access disablement status.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteDisableLegacyAccessResponse`
+        
+
+    .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyAccess
+
+        Retrieve Legacy Access Disablement Status.
+        
+        Retrieves legacy access disablement Status.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DisableLegacyAccess`
+        
+
+    .. py:method:: update(allow_missing: bool, setting: DisableLegacyAccess, field_mask: str) -> DisableLegacyAccess
+
+        Update Legacy Access Disablement Status.
+        
+        Updates legacy access disablement status.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`DisableLegacyAccess`
+        :param field_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        
+        :returns: :class:`DisableLegacyAccess`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/disable_legacy_dbfs.rst b/docs/workspace/settings/disable_legacy_dbfs.rst
new file mode 100644
index 000000000..ad11fa606
--- /dev/null
+++ b/docs/workspace/settings/disable_legacy_dbfs.rst
@@ -0,0 +1,57 @@
+``w.settings.disable_legacy_dbfs``: Disable Legacy DBFS
+=======================================================
+.. currentmodule:: databricks.sdk.service.settings
+
+.. py:class:: DisableLegacyDbfsAPI
+
+    When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation of new
+    mounts). When the setting is off, all DBFS functionality is enabled
+
+    .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyDbfsResponse
+
+        Delete the disable legacy DBFS setting.
+        
+        Deletes the disable legacy DBFS setting for a workspace, reverting back to the default.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteDisableLegacyDbfsResponse`
+        
+
+    .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyDbfs
+
+        Get the disable legacy DBFS setting.
+        
+        Gets the disable legacy DBFS setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DisableLegacyDbfs`
+        
+
+    .. py:method:: update(allow_missing: bool, setting: DisableLegacyDbfs, field_mask: str) -> DisableLegacyDbfs
+
+        Update the disable legacy DBFS setting.
+        
+        Updates the disable legacy DBFS setting for the workspace.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`DisableLegacyDbfs`
+        :param field_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        
+        :returns: :class:`DisableLegacyDbfs`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/index.rst b/docs/workspace/settings/index.rst
index d513ea9fd..22655853b 100644
--- a/docs/workspace/settings/index.rst
+++ b/docs/workspace/settings/index.rst
@@ -14,6 +14,8 @@ Manage security settings for Accounts and Workspaces
    automatic_cluster_update
    compliance_security_profile
    default_namespace
+   disable_legacy_access
+   disable_legacy_dbfs
    enhanced_security_monitoring
    restrict_workspace_admins
    token_management
diff --git a/docs/workspace/settings/settings.rst b/docs/workspace/settings/settings.rst
index 55f47dae0..588031926 100644
--- a/docs/workspace/settings/settings.rst
+++ b/docs/workspace/settings/settings.rst
@@ -34,6 +34,22 @@
         This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default
         namespace only applies when using Unity Catalog-enabled compute.
 
+    .. py:property:: disable_legacy_access
+        :type: DisableLegacyAccessAPI
+
+        'Disabling legacy access' has the following impacts:
+        
+        1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore through HMS
+        Federation. 2. Disables Fallback Mode (docs link) on any External Location access from the workspace. 3.
+        Alters DBFS path access to use External Location permissions in place of legacy credentials. 4. Enforces
+        Unity Catalog access on all path based access.
+
+    .. py:property:: disable_legacy_dbfs
+        :type: DisableLegacyDbfsAPI
+
+        When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation of new
+        mounts). When the setting is off, all DBFS functionality is enabled
+
     .. py:property:: enhanced_security_monitoring
         :type: EnhancedSecurityMonitoringAPI
 
diff --git a/docs/workspace/sql/statement_execution.rst b/docs/workspace/sql/statement_execution.rst
index 4d1337623..716fa4fdc 100644
--- a/docs/workspace/sql/statement_execution.rst
+++ b/docs/workspace/sql/statement_execution.rst
@@ -82,7 +82,9 @@
     are approximate, occur server-side, and cannot account for things such as caller delays and network
     latency from caller to service. - The system will auto-close a statement after one hour if the client
     stops polling and thus you must poll at least once an hour. - The results are only available for one hour
-    after success; polling does not extend this.
+    after success; polling does not extend this. - The SQL Execution API must be used for the entire lifecycle
+    of the statement. For example, you cannot use the Jobs API to execute the command, and then the SQL
+    Execution API to cancel it.
     
     [Apache Arrow Columnar]: https://arrow.apache.org/overview/
     [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html
diff --git a/docs/workspace/sql/warehouses.rst b/docs/workspace/sql/warehouses.rst
index 8a5da4302..58b8a3fc0 100644
--- a/docs/workspace/sql/warehouses.rst
+++ b/docs/workspace/sql/warehouses.rst
@@ -41,7 +41,8 @@
           The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it
           is automatically stopped.
           
-          Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
+          Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins for
+          non-serverless warehouses - 0 indicates no autostop.
           
           Defaults to 120 mins
         :param channel: :class:`Channel` (optional)
diff --git a/docs/workspace/workspace/git_credentials.rst b/docs/workspace/workspace/git_credentials.rst
index 490cb16ea..34851e84a 100644
--- a/docs/workspace/workspace/git_credentials.rst
+++ b/docs/workspace/workspace/git_credentials.rst
@@ -33,9 +33,9 @@
         existing credentials, or the DELETE endpoint to delete existing credentials.
         
         :param git_provider: str
-          Git provider. This field is case-insensitive. The available Git providers are gitHub,
-          bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-          gitLabEnterpriseEdition and awsCodeCommit.
+          Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+          `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+          `gitLabEnterpriseEdition` and `awsCodeCommit`.
         :param git_username: str (optional)
           The username or email provided with your Git provider account, depending on which provider you are
           using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may
@@ -44,8 +44,7 @@
           Access Token authentication documentation to see what is supported.
         :param personal_access_token: str (optional)
           The personal access token used to authenticate to the corresponding Git provider. For certain
-          providers, support may exist for other types of scoped access tokens. [Learn more]. The personal
-          access token used to authenticate to the corresponding Git
+          providers, support may exist for other types of scoped access tokens. [Learn more].
           
           [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
         
@@ -64,7 +63,7 @@
         
         
 
-    .. py:method:: get(credential_id: int) -> CredentialInfo
+    .. py:method:: get(credential_id: int) -> GetCredentialsResponse
 
 
         Usage:
@@ -89,7 +88,7 @@
         :param credential_id: int
           The ID for the corresponding credential to access.
         
-        :returns: :class:`CredentialInfo`
+        :returns: :class:`GetCredentialsResponse`
         
 
     .. py:method:: list() -> Iterator[CredentialInfo]
@@ -112,7 +111,7 @@
         :returns: Iterator over :class:`CredentialInfo`
         
 
-    .. py:method:: update(credential_id: int [, git_provider: Optional[str], git_username: Optional[str], personal_access_token: Optional[str]])
+    .. py:method:: update(credential_id: int, git_provider: str [, git_username: Optional[str], personal_access_token: Optional[str]])
 
 
         Usage:
@@ -141,10 +140,10 @@
         
         :param credential_id: int
           The ID for the corresponding credential to access.
-        :param git_provider: str (optional)
-          Git provider. This field is case-insensitive. The available Git providers are gitHub,
-          bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-          gitLabEnterpriseEdition and awsCodeCommit.
+        :param git_provider: str
+          Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+          `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+          `gitLabEnterpriseEdition` and `awsCodeCommit`.
         :param git_username: str (optional)
           The username or email provided with your Git provider account, depending on which provider you are
           using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may
@@ -153,8 +152,7 @@
           Access Token authentication documentation to see what is supported.
         :param personal_access_token: str (optional)
           The personal access token used to authenticate to the corresponding Git provider. For certain
-          providers, support may exist for other types of scoped access tokens. [Learn more]. The personal
-          access token used to authenticate to the corresponding Git
+          providers, support may exist for other types of scoped access tokens. [Learn more].
           
           [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
         
diff --git a/docs/workspace/workspace/repos.rst b/docs/workspace/workspace/repos.rst
index a5c602a3a..01b1c875f 100644
--- a/docs/workspace/workspace/repos.rst
+++ b/docs/workspace/workspace/repos.rst
@@ -14,7 +14,7 @@
     Within Repos you can develop code in notebooks or other files and follow data science and engineering code
     development best practices using Git for version control, collaboration, and CI/CD.
 
-    .. py:method:: create(url: str, provider: str [, path: Optional[str], sparse_checkout: Optional[SparseCheckout]]) -> RepoInfo
+    .. py:method:: create(url: str, provider: str [, path: Optional[str], sparse_checkout: Optional[SparseCheckout]]) -> CreateRepoResponse
 
 
         Usage:
@@ -42,17 +42,17 @@
         :param url: str
           URL of the Git repository to be linked.
         :param provider: str
-          Git provider. This field is case-insensitive. The available Git providers are gitHub,
-          bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
-          gitLabEnterpriseEdition and awsCodeCommit.
+          Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+          `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+          `gitLabEnterpriseEdition` and `awsCodeCommit`.
         :param path: str (optional)
           Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If repo
-          is created in /Repos, path must be in the format /Repos/{folder}/{repo-name}.
+          is created in `/Repos`, path must be in the format `/Repos/{folder}/{repo-name}`.
         :param sparse_checkout: :class:`SparseCheckout` (optional)
           If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable
           sparse checkout after the repo is created.
         
-        :returns: :class:`RepoInfo`
+        :returns: :class:`CreateRepoResponse`
         
 
     .. py:method:: delete(repo_id: int)
@@ -62,12 +62,12 @@
         Deletes the specified repo.
         
         :param repo_id: int
-          The ID for the corresponding repo to access.
+          ID of the Git folder (repo) object in the workspace.
         
         
         
 
-    .. py:method:: get(repo_id: int) -> RepoInfo
+    .. py:method:: get(repo_id: int) -> GetRepoResponse
 
 
         Usage:
@@ -94,9 +94,9 @@
         Returns the repo with the given repo ID.
         
         :param repo_id: int
-          The ID for the corresponding repo to access.
+          ID of the Git folder (repo) object in the workspace.
         
-        :returns: :class:`RepoInfo`
+        :returns: :class:`GetRepoResponse`
         
 
     .. py:method:: get_permission_levels(repo_id: str) -> GetRepoPermissionLevelsResponse
@@ -139,15 +139,16 @@
 
         Get repos.
         
-        Returns repos that the calling user has Manage permissions on. Results are paginated with each page
-        containing twenty repos.
+        Returns repos that the calling user has Manage permissions on. Use `next_page_token` to iterate
+        through additional pages.
         
         :param next_page_token: str (optional)
           Token used to get the next page of results. If not specified, returns the first page of results as
           well as a next page token if there are more results.
         :param path_prefix: str (optional)
-          Filters repos that have paths starting with the given path prefix. If not provided repos from /Repos
-          will be served.
+          Filters repos that have paths starting with the given path prefix. If not provided or when provided
+          an effectively empty prefix (`/` or `/Workspace`) Git folders (repos) from `/Workspace/Repos` will
+          be served.
         
         :returns: Iterator over :class:`RepoInfo`
         
@@ -193,7 +194,7 @@
         branch.
         
         :param repo_id: int
-          The ID for the corresponding repo to access.
+          ID of the Git folder (repo) object in the workspace.
         :param branch: str (optional)
           Branch that the local version of the repo is checked out to.
         :param sparse_checkout: :class:`SparseCheckoutUpdate` (optional)

From a3794b10d854ed8cb25dd4a4b184f974f08686e6 Mon Sep 17 00:00:00 2001
From: Parth Bansal 
Date: Mon, 7 Oct 2024 17:09:26 +0200
Subject: [PATCH 053/136] [Release] Release v0.34.0 (#788)

### Bug Fixes

* Fix Model Serving Test
([#781](https://github.com/databricks/databricks-sdk-py/pull/781)).
* Include package name for external types when deserializing responses
([#786](https://github.com/databricks/databricks-sdk-py/pull/786)).


### Internal Changes

* Refactor ApiClient into `_BaseClient` and `ApiClient`
([#785](https://github.com/databricks/databricks-sdk-py/pull/785)).
* Update to latest OpenAPI spec
([#787](https://github.com/databricks/databricks-sdk-py/pull/787)).
* revert Support Models in `dbutils.fs` operations
([#750](https://github.com/databricks/databricks-sdk-py/pull/750))
([#778](https://github.com/databricks/databricks-sdk-py/pull/778)).


### API Changes:

* Added
[w.disable_legacy_dbfs](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/disable_legacy_dbfs.html)
workspace-level service.
* Added `default_source_code_path` and `resources` fields for
`databricks.sdk.service.apps.App`.
* Added `resources` field for
`databricks.sdk.service.apps.CreateAppRequest`.
* Added `resources` field for
`databricks.sdk.service.apps.UpdateAppRequest`.

OpenAPI SHA: bc17b474818138f19b78a7bea0675707dead2b87, Date: 2024-10-07
---
 CHANGELOG.md              | 24 ++++++++++++++++++++++++
 databricks/sdk/version.py |  2 +-
 2 files changed, 25 insertions(+), 1 deletion(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 029a437a7..9ffbd9134 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,29 @@
 # Version changelog
 
+## [Release] Release v0.34.0
+
+### Bug Fixes
+
+ * Fix Model Serving Test ([#781](https://github.com/databricks/databricks-sdk-py/pull/781)).
+ * Include package name for external types when deserializing responses ([#786](https://github.com/databricks/databricks-sdk-py/pull/786)).
+
+
+### Internal Changes
+
+ * Refactor ApiClient into `_BaseClient` and `ApiClient` ([#785](https://github.com/databricks/databricks-sdk-py/pull/785)).
+ * Update to latest OpenAPI spec ([#787](https://github.com/databricks/databricks-sdk-py/pull/787)).
+ * revert Support Models in `dbutils.fs` operations ([#750](https://github.com/databricks/databricks-sdk-py/pull/750)) ([#778](https://github.com/databricks/databricks-sdk-py/pull/778)).
+
+
+### API Changes:
+
+ * Added [w.disable_legacy_dbfs](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/disable_legacy_dbfs.html) workspace-level service.
+ * Added `default_source_code_path` and `resources` fields for `databricks.sdk.service.apps.App`.
+ * Added `resources` field for `databricks.sdk.service.apps.CreateAppRequest`.
+ * Added `resources` field for `databricks.sdk.service.apps.UpdateAppRequest`.
+
+OpenAPI SHA: bc17b474818138f19b78a7bea0675707dead2b87, Date: 2024-10-07
+
 ## [Release] Release v0.33.0
 
 ### Internal Changes
diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py
index e3d0b7be1..cac711204 100644
--- a/databricks/sdk/version.py
+++ b/databricks/sdk/version.py
@@ -1 +1 @@
-__version__ = '0.33.0'
+__version__ = '0.34.0'

From 1c758153e8869cb11fecf2d5e7fee1b949acbd4f Mon Sep 17 00:00:00 2001
From: Aravind Segu 
Date: Thu, 10 Oct 2024 01:22:40 -0700
Subject: [PATCH 054/136] [Feature] Open AI Client Mixin (#779)

## Changes
Add Open AI Client Mixing with the Serving Endpoints API. Open AI Client
requires a token to be authenticated. Therefore we are moving the
creation of OpenAI client to the databricks sdk so that users can easily
use it in both the notebook and model serving environments

## Tests
Dogfood Test:
https://e2-dogfood.staging.cloud.databricks.com/editor/notebooks/2337940012762945?o=6051921418418893

- [x] `make test` run locally
- [x] `make fmt` applied
- [ ] relevant integration tests applied

---------

Signed-off-by: aravind-segu 
---
 .codegen/__init__.py.tmpl               |  3 +-
 NOTICE                                  | 14 +++++++
 databricks/sdk/__init__.py              |  3 +-
 databricks/sdk/mixins/open_ai_client.py | 52 +++++++++++++++++++++++++
 setup.py                                |  6 ++-
 tests/test_open_ai_mixin.py             | 30 ++++++++++++++
 6 files changed, 104 insertions(+), 4 deletions(-)
 create mode 100644 databricks/sdk/mixins/open_ai_client.py
 create mode 100644 tests/test_open_ai_mixin.py

diff --git a/.codegen/__init__.py.tmpl b/.codegen/__init__.py.tmpl
index 5ca160685..bc68f5654 100644
--- a/.codegen/__init__.py.tmpl
+++ b/.codegen/__init__.py.tmpl
@@ -5,6 +5,7 @@ from databricks.sdk.credentials_provider import CredentialsStrategy
 from databricks.sdk.mixins.files import DbfsExt
 from databricks.sdk.mixins.compute import ClustersExt
 from databricks.sdk.mixins.workspace import WorkspaceExt
+from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt
 {{- range .Services}}
 from databricks.sdk.service.{{.Package.Name}} import {{.PascalName}}API{{end}}
 from databricks.sdk.service.provisioning import Workspace
@@ -17,7 +18,7 @@ from typing import Optional
   "google_credentials" "google_service_account" }}
 
 {{- define "api" -}}
-  {{- $mixins := dict "ClustersAPI" "ClustersExt" "DbfsAPI" "DbfsExt" "WorkspaceAPI" "WorkspaceExt" -}}
+  {{- $mixins := dict "ClustersAPI" "ClustersExt" "DbfsAPI" "DbfsExt" "WorkspaceAPI" "WorkspaceExt" "ServingEndpointsExt" "ServingEndpointsApi" -}}
   {{- $genApi := concat .PascalName "API" -}}
   {{- getOrDefault $mixins $genApi $genApi -}}
 {{- end -}}
diff --git a/NOTICE b/NOTICE
index 2a353a6c8..c05cdd318 100644
--- a/NOTICE
+++ b/NOTICE
@@ -12,8 +12,22 @@ googleapis/google-auth-library-python - https://github.com/googleapis/google-aut
 Copyright google-auth-library-python authors
 License - https://github.com/googleapis/google-auth-library-python/blob/main/LICENSE
 
+openai/openai-python - https://github.com/openai/openai-python
+Copyright 2024 OpenAI
+License - https://github.com/openai/openai-python/blob/main/LICENSE
+
 This software contains code from the following open source projects, licensed under the BSD (3-clause) license.
 
 x/oauth2 - https://cs.opensource.google/go/x/oauth2/+/master:oauth2.go
 Copyright 2014 The Go Authors. All rights reserved.
 License - https://cs.opensource.google/go/x/oauth2/+/master:LICENSE
+
+encode/httpx - https://github.com/encode/httpx
+Copyright 2019, Encode OSS Ltd
+License - https://github.com/encode/httpx/blob/master/LICENSE.md
+
+This software contains code from the following open source projects, licensed under the MIT license:
+
+langchain-ai/langchain - https://github.com/langchain-ai/langchain/blob/master/libs/partners/openai
+Copyright 2023 LangChain, Inc.
+License - https://github.com/langchain-ai/langchain/blob/master/libs/partners/openai/LICENSE
diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py
index 848272198..a4058ec51 100755
--- a/databricks/sdk/__init__.py
+++ b/databricks/sdk/__init__.py
@@ -6,6 +6,7 @@
 from databricks.sdk.credentials_provider import CredentialsStrategy
 from databricks.sdk.mixins.compute import ClustersExt
 from databricks.sdk.mixins.files import DbfsExt
+from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt
 from databricks.sdk.mixins.workspace import WorkspaceExt
 from databricks.sdk.service.apps import AppsAPI
 from databricks.sdk.service.billing import (BillableUsageAPI, BudgetsAPI,
@@ -175,7 +176,7 @@ def __init__(self,
         self._config = config.copy()
         self._dbutils = _make_dbutils(self._config)
         self._api_client = client.ApiClient(self._config)
-        serving_endpoints = ServingEndpointsAPI(self._api_client)
+        serving_endpoints = ServingEndpointsExt(self._api_client)
         self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client)
         self._alerts = AlertsAPI(self._api_client)
         self._alerts_legacy = AlertsLegacyAPI(self._api_client)
diff --git a/databricks/sdk/mixins/open_ai_client.py b/databricks/sdk/mixins/open_ai_client.py
new file mode 100644
index 000000000..f7a8af02d
--- /dev/null
+++ b/databricks/sdk/mixins/open_ai_client.py
@@ -0,0 +1,52 @@
+from databricks.sdk.service.serving import ServingEndpointsAPI
+
+
+class ServingEndpointsExt(ServingEndpointsAPI):
+
+    # Using the HTTP Client to pass in the databricks authorization
+    # This method will be called on every invocation, so when using with model serving will always get the refreshed token
+    def _get_authorized_http_client(self):
+        import httpx
+
+        class BearerAuth(httpx.Auth):
+
+            def __init__(self, get_headers_func):
+                self.get_headers_func = get_headers_func
+
+            def auth_flow(self, request: httpx.Request) -> httpx.Request:
+                auth_headers = self.get_headers_func()
+                request.headers["Authorization"] = auth_headers["Authorization"]
+                yield request
+
+        databricks_token_auth = BearerAuth(self._api._cfg.authenticate)
+
+        # Create an HTTP client with Bearer Token authentication
+        http_client = httpx.Client(auth=databricks_token_auth)
+        return http_client
+
+    def get_open_ai_client(self):
+        try:
+            from openai import OpenAI
+        except Exception:
+            raise ImportError(
+                "Open AI is not installed. Please install the Databricks SDK with the following command `pip isntall databricks-sdk[openai]`"
+            )
+
+        return OpenAI(
+            base_url=self._api._cfg.host + "/serving-endpoints",
+            api_key="no-token", # Passing in a placeholder to pass validations, this will not be used
+            http_client=self._get_authorized_http_client())
+
+    def get_langchain_chat_open_ai_client(self, model):
+        try:
+            from langchain_openai import ChatOpenAI
+        except Exception:
+            raise ImportError(
+                "Langchain Open AI is not installed. Please install the Databricks SDK with the following command `pip isntall databricks-sdk[openai]` and ensure you are using python>3.7"
+            )
+
+        return ChatOpenAI(
+            model=model,
+            openai_api_base=self._api._cfg.host + "/serving-endpoints",
+            api_key="no-token", # Passing in a placeholder to pass validations, this will not be used
+            http_client=self._get_authorized_http_client())
diff --git a/setup.py b/setup.py
index 9cfe38d09..b756e6d0d 100644
--- a/setup.py
+++ b/setup.py
@@ -17,8 +17,10 @@
       extras_require={"dev": ["pytest", "pytest-cov", "pytest-xdist", "pytest-mock",
                               "yapf", "pycodestyle", "autoflake", "isort", "wheel",
                               "ipython", "ipywidgets", "requests-mock", "pyfakefs",
-                              "databricks-connect", "pytest-rerunfailures"],
-                      "notebook": ["ipython>=8,<9", "ipywidgets>=8,<9"]},
+                              "databricks-connect", "pytest-rerunfailures", "openai", 
+                              'langchain-openai; python_version > "3.7"', "httpx"],
+                      "notebook": ["ipython>=8,<9", "ipywidgets>=8,<9"],
+                      "openai": ["openai", 'langchain-openai; python_version > "3.7"', "httpx"]},
       author="Serge Smertin",
       author_email="serge.smertin@databricks.com",
       description="Databricks SDK for Python (Beta)",
diff --git a/tests/test_open_ai_mixin.py b/tests/test_open_ai_mixin.py
new file mode 100644
index 000000000..1858c66cb
--- /dev/null
+++ b/tests/test_open_ai_mixin.py
@@ -0,0 +1,30 @@
+import sys
+
+import pytest
+
+from databricks.sdk.core import Config
+
+
+def test_open_ai_client(monkeypatch):
+    from databricks.sdk import WorkspaceClient
+
+    monkeypatch.setenv('DATABRICKS_HOST', 'test_host')
+    monkeypatch.setenv('DATABRICKS_TOKEN', 'test_token')
+    w = WorkspaceClient(config=Config())
+    client = w.serving_endpoints.get_open_ai_client()
+
+    assert client.base_url == "https://test_host/serving-endpoints/"
+    assert client.api_key == "no-token"
+
+
+@pytest.mark.skipif(sys.version_info < (3, 8), reason="Requires Python > 3.7")
+def test_langchain_open_ai_client(monkeypatch):
+    from databricks.sdk import WorkspaceClient
+
+    monkeypatch.setenv('DATABRICKS_HOST', 'test_host')
+    monkeypatch.setenv('DATABRICKS_TOKEN', 'test_token')
+    w = WorkspaceClient(config=Config())
+    client = w.serving_endpoints.get_langchain_chat_open_ai_client("databricks-meta-llama-3-1-70b-instruct")
+
+    assert client.openai_api_base == "https://test_host/serving-endpoints"
+    assert client.model_name == "databricks-meta-llama-3-1-70b-instruct"

From 58d686ad58a85b385eb3ce3fc3452af1b37ef02c Mon Sep 17 00:00:00 2001
From: hectorcast-db 
Date: Thu, 17 Oct 2024 11:41:41 +0200
Subject: [PATCH 055/136] [Fix] Update Serving Endpoint mixing template and
 docs generation logic (#792)

## Changes
Update Serving Endpoint mixing template and docs generation logic

## Tests


- [X] `make test` run locally
- [X] `make fmt` applied
- [ ] relevant integration tests applied

---------

Co-authored-by: Omer Lachish 
---
 .codegen/__init__.py.tmpl                    |  2 +-
 .codegen/_openapi_sha                        |  2 +-
 databricks/sdk/__init__.py                   |  2 +-
 databricks/sdk/service/apps.py               |  2 +-
 databricks/sdk/service/catalog.py            | 15 ++++--
 databricks/sdk/service/dashboards.py         |  9 +++-
 databricks/sdk/service/jobs.py               | 53 +++++++++++++++++-
 databricks/sdk/service/pipelines.py          | 56 ++++++++++++++++++--
 databricks/sdk/service/sql.py                | 20 +++++++
 docs/dbdataclasses/catalog.rst               |  4 +-
 docs/dbdataclasses/dashboards.rst            |  3 ++
 docs/dbdataclasses/pipelines.rst             |  4 ++
 docs/gen-client-docs.py                      | 11 +++-
 docs/workspace/jobs/jobs.rst                 | 13 +++--
 docs/workspace/serving/serving_endpoints.rst |  8 ++-
 15 files changed, 184 insertions(+), 20 deletions(-)

diff --git a/.codegen/__init__.py.tmpl b/.codegen/__init__.py.tmpl
index bc68f5654..d54e9dfff 100644
--- a/.codegen/__init__.py.tmpl
+++ b/.codegen/__init__.py.tmpl
@@ -18,7 +18,7 @@ from typing import Optional
   "google_credentials" "google_service_account" }}
 
 {{- define "api" -}}
-  {{- $mixins := dict "ClustersAPI" "ClustersExt" "DbfsAPI" "DbfsExt" "WorkspaceAPI" "WorkspaceExt" "ServingEndpointsExt" "ServingEndpointsApi" -}}
+  {{- $mixins := dict "ClustersAPI" "ClustersExt" "DbfsAPI" "DbfsExt" "WorkspaceAPI" "WorkspaceExt" "ServingEndpointsAPI" "ServingEndpointsExt" -}}
   {{- $genApi := concat .PascalName "API" -}}
   {{- getOrDefault $mixins $genApi $genApi -}}
 {{- end -}}
diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 7f9f41bb8..2d9cb6d86 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-bc17b474818138f19b78a7bea0675707dead2b87
\ No newline at end of file
+cf9c61453990df0f9453670f2fe68e1b128647a2
\ No newline at end of file
diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py
index a4058ec51..159946461 100755
--- a/databricks/sdk/__init__.py
+++ b/databricks/sdk/__init__.py
@@ -638,7 +638,7 @@ def service_principals(self) -> ServicePrincipalsAPI:
         return self._service_principals
 
     @property
-    def serving_endpoints(self) -> ServingEndpointsAPI:
+    def serving_endpoints(self) -> ServingEndpointsExt:
         """The Serving Endpoints API allows you to create, update, and delete model serving endpoints."""
         return self._serving_endpoints
 
diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py
index 63bc981ba..52796d0e8 100755
--- a/databricks/sdk/service/apps.py
+++ b/databricks/sdk/service/apps.py
@@ -787,7 +787,7 @@ def wait_get_app_active(self,
                             callback: Optional[Callable[[App], None]] = None) -> App:
         deadline = time.time() + timeout.total_seconds()
         target_states = (ComputeState.ACTIVE, )
-        failure_states = (ComputeState.ERROR, )
+        failure_states = (ComputeState.ERROR, ComputeState.STOPPED, )
         status_message = 'polling...'
         attempt = 1
         while time.time() < deadline:
diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py
index 2ccff4217..b149dbbaa 100755
--- a/databricks/sdk/service/catalog.py
+++ b/databricks/sdk/service/catalog.py
@@ -3865,11 +3865,16 @@ class OnlineTable:
     """Specification of the online table."""
 
     status: Optional[OnlineTableStatus] = None
-    """Online Table status"""
+    """Online Table data synchronization status"""
 
     table_serving_url: Optional[str] = None
     """Data serving REST API URL for this table"""
 
+    unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None
+    """The provisioning state of the online table entity in Unity Catalog. This is distinct from the
+    state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline
+    may be in "PROVISIONING" as it runs asynchronously)."""
+
     def as_dict(self) -> dict:
         """Serializes the OnlineTable into a dictionary suitable for use as a JSON request body."""
         body = {}
@@ -3877,6 +3882,8 @@ def as_dict(self) -> dict:
         if self.spec: body['spec'] = self.spec.as_dict()
         if self.status: body['status'] = self.status.as_dict()
         if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url
+        if self.unity_catalog_provisioning_state is not None:
+            body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state.value
         return body
 
     @classmethod
@@ -3885,7 +3892,9 @@ def from_dict(cls, d: Dict[str, any]) -> OnlineTable:
         return cls(name=d.get('name', None),
                    spec=_from_dict(d, 'spec', OnlineTableSpec),
                    status=_from_dict(d, 'status', OnlineTableStatus),
-                   table_serving_url=d.get('table_serving_url', None))
+                   table_serving_url=d.get('table_serving_url', None),
+                   unity_catalog_provisioning_state=_enum(d, 'unity_catalog_provisioning_state',
+                                                          ProvisioningInfoState))
 
 
 @dataclass
@@ -4244,7 +4253,7 @@ class ProvisioningInfoState(Enum):
     DELETING = 'DELETING'
     FAILED = 'FAILED'
     PROVISIONING = 'PROVISIONING'
-    STATE_UNSPECIFIED = 'STATE_UNSPECIFIED'
+    UPDATING = 'UPDATING'
 
 
 @dataclass
diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py
index 27117d43a..4a4c640e6 100755
--- a/databricks/sdk/service/dashboards.py
+++ b/databricks/sdk/service/dashboards.py
@@ -607,6 +607,7 @@ class MessageErrorType(Enum):
     LOCAL_CONTEXT_EXCEEDED_EXCEPTION = 'LOCAL_CONTEXT_EXCEEDED_EXCEPTION'
     MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION'
     MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION'
+    NO_QUERY_TO_VISUALIZE_EXCEPTION = 'NO_QUERY_TO_VISUALIZE_EXCEPTION'
     NO_TABLES_TO_QUERY_EXCEPTION = 'NO_TABLES_TO_QUERY_EXCEPTION'
     RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION = 'RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION'
     RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION = 'RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION'
@@ -784,6 +785,9 @@ def from_dict(cls, d: Dict[str, any]) -> QueryAttachment:
 
 @dataclass
 class Result:
+    is_truncated: Optional[bool] = None
+    """If result is truncated"""
+
     row_count: Optional[int] = None
     """Row count of the result"""
 
@@ -794,6 +798,7 @@ class Result:
     def as_dict(self) -> dict:
         """Serializes the Result into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.is_truncated is not None: body['is_truncated'] = self.is_truncated
         if self.row_count is not None: body['row_count'] = self.row_count
         if self.statement_id is not None: body['statement_id'] = self.statement_id
         return body
@@ -801,7 +806,9 @@ def as_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Result:
         """Deserializes the Result from a dictionary."""
-        return cls(row_count=d.get('row_count', None), statement_id=d.get('statement_id', None))
+        return cls(is_truncated=d.get('is_truncated', None),
+                   row_count=d.get('row_count', None),
+                   statement_id=d.get('statement_id', None))
 
 
 @dataclass
diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index b3c723f37..a4f138d6b 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -29,6 +29,12 @@ class BaseJob:
     """The creator user name. This field won’t be included in the response if the user has already
     been deleted."""
 
+    effective_budget_policy_id: Optional[str] = None
+    """The id of the budget policy used by this job for cost attribution purposes. This may be set
+    through (in order of precedence): 1. Budget admins through the account or workspace console 2.
+    Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based
+    on accessible budget policies of the run_as identity on job creation or modification."""
+
     job_id: Optional[int] = None
     """The canonical identifier for this job."""
 
@@ -41,6 +47,8 @@ def as_dict(self) -> dict:
         body = {}
         if self.created_time is not None: body['created_time'] = self.created_time
         if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.effective_budget_policy_id is not None:
+            body['effective_budget_policy_id'] = self.effective_budget_policy_id
         if self.job_id is not None: body['job_id'] = self.job_id
         if self.settings: body['settings'] = self.settings.as_dict()
         return body
@@ -50,6 +58,7 @@ def from_dict(cls, d: Dict[str, any]) -> BaseJob:
         """Deserializes the BaseJob from a dictionary."""
         return cls(created_time=d.get('created_time', None),
                    creator_user_name=d.get('creator_user_name', None),
+                   effective_budget_policy_id=d.get('effective_budget_policy_id', None),
                    job_id=d.get('job_id', None),
                    settings=_from_dict(d, 'settings', JobSettings))
 
@@ -484,6 +493,11 @@ class CreateJob:
     access_control_list: Optional[List[JobAccessControlRequest]] = None
     """List of permissions to set on the job."""
 
+    budget_policy_id: Optional[str] = None
+    """The id of the user specified budget policy to use for this job. If not specified, a default
+    budget policy may be applied when creating or modifying the job. See
+    `effective_budget_policy_id` for the budget policy used by this workload."""
+
     continuous: Optional[Continuous] = None
     """An optional continuous property for this job. The continuous property will ensure that there is
     always one run executing. Only one of `schedule` and `continuous` can be used."""
@@ -591,6 +605,7 @@ def as_dict(self) -> dict:
         body = {}
         if self.access_control_list:
             body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
         if self.continuous: body['continuous'] = self.continuous.as_dict()
         if self.deployment: body['deployment'] = self.deployment.as_dict()
         if self.description is not None: body['description'] = self.description
@@ -619,6 +634,7 @@ def as_dict(self) -> dict:
     def from_dict(cls, d: Dict[str, any]) -> CreateJob:
         """Deserializes the CreateJob from a dictionary."""
         return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest),
+                   budget_policy_id=d.get('budget_policy_id', None),
                    continuous=_from_dict(d, 'continuous', Continuous),
                    deployment=_from_dict(d, 'deployment', JobDeployment),
                    description=d.get('description', None),
@@ -1261,6 +1277,12 @@ class Job:
     """The creator user name. This field won’t be included in the response if the user has already
     been deleted."""
 
+    effective_budget_policy_id: Optional[str] = None
+    """The id of the budget policy used by this job for cost attribution purposes. This may be set
+    through (in order of precedence): 1. Budget admins through the account or workspace console 2.
+    Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based
+    on accessible budget policies of the run_as identity on job creation or modification."""
+
     job_id: Optional[int] = None
     """The canonical identifier for this job."""
 
@@ -1282,6 +1304,8 @@ def as_dict(self) -> dict:
         body = {}
         if self.created_time is not None: body['created_time'] = self.created_time
         if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.effective_budget_policy_id is not None:
+            body['effective_budget_policy_id'] = self.effective_budget_policy_id
         if self.job_id is not None: body['job_id'] = self.job_id
         if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
         if self.settings: body['settings'] = self.settings.as_dict()
@@ -1292,6 +1316,7 @@ def from_dict(cls, d: Dict[str, any]) -> Job:
         """Deserializes the Job from a dictionary."""
         return cls(created_time=d.get('created_time', None),
                    creator_user_name=d.get('creator_user_name', None),
+                   effective_budget_policy_id=d.get('effective_budget_policy_id', None),
                    job_id=d.get('job_id', None),
                    run_as_user_name=d.get('run_as_user_name', None),
                    settings=_from_dict(d, 'settings', JobSettings))
@@ -1755,6 +1780,11 @@ def from_dict(cls, d: Dict[str, any]) -> JobRunAs:
 
 @dataclass
 class JobSettings:
+    budget_policy_id: Optional[str] = None
+    """The id of the user specified budget policy to use for this job. If not specified, a default
+    budget policy may be applied when creating or modifying the job. See
+    `effective_budget_policy_id` for the budget policy used by this workload."""
+
     continuous: Optional[Continuous] = None
     """An optional continuous property for this job. The continuous property will ensure that there is
     always one run executing. Only one of `schedule` and `continuous` can be used."""
@@ -1860,6 +1890,7 @@ class JobSettings:
     def as_dict(self) -> dict:
         """Serializes the JobSettings into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
         if self.continuous: body['continuous'] = self.continuous.as_dict()
         if self.deployment: body['deployment'] = self.deployment.as_dict()
         if self.description is not None: body['description'] = self.description
@@ -1887,7 +1918,8 @@ def as_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobSettings:
         """Deserializes the JobSettings from a dictionary."""
-        return cls(continuous=_from_dict(d, 'continuous', Continuous),
+        return cls(budget_policy_id=d.get('budget_policy_id', None),
+                   continuous=_from_dict(d, 'continuous', Continuous),
                    deployment=_from_dict(d, 'deployment', JobDeployment),
                    description=d.get('description', None),
                    edit_mode=_enum(d, 'edit_mode', JobEditMode),
@@ -4507,6 +4539,10 @@ class SubmitRun:
     access_control_list: Optional[List[JobAccessControlRequest]] = None
     """List of permissions to set on the job."""
 
+    budget_policy_id: Optional[str] = None
+    """The user specified id of the budget policy to use for this one-time run. If not specified, the
+    run will be not be attributed to any budget policy."""
+
     email_notifications: Optional[JobEmailNotifications] = None
     """An optional set of email addresses notified when the run begins or completes."""
 
@@ -4567,6 +4603,7 @@ def as_dict(self) -> dict:
         body = {}
         if self.access_control_list:
             body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
         if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
         if self.environments: body['environments'] = [v.as_dict() for v in self.environments]
         if self.git_source: body['git_source'] = self.git_source.as_dict()
@@ -4585,6 +4622,7 @@ def as_dict(self) -> dict:
     def from_dict(cls, d: Dict[str, any]) -> SubmitRun:
         """Deserializes the SubmitRun from a dictionary."""
         return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest),
+                   budget_policy_id=d.get('budget_policy_id', None),
                    email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
                    environments=_repeated_dict(d, 'environments', JobEnvironment),
                    git_source=_from_dict(d, 'git_source', GitSource),
@@ -5619,6 +5657,7 @@ def cancel_run_and_wait(self, run_id: int, timeout=timedelta(minutes=20)) -> Run
     def create(self,
                *,
                access_control_list: Optional[List[JobAccessControlRequest]] = None,
+               budget_policy_id: Optional[str] = None,
                continuous: Optional[Continuous] = None,
                deployment: Optional[JobDeployment] = None,
                description: Optional[str] = None,
@@ -5647,6 +5686,10 @@ def create(self,
         
         :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
           List of permissions to set on the job.
+        :param budget_policy_id: str (optional)
+          The id of the user specified budget policy to use for this job. If not specified, a default budget
+          policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for the
+          budget policy used by this workload.
         :param continuous: :class:`Continuous` (optional)
           An optional continuous property for this job. The continuous property will ensure that there is
           always one run executing. Only one of `schedule` and `continuous` can be used.
@@ -5731,6 +5774,7 @@ def create(self,
         body = {}
         if access_control_list is not None:
             body['access_control_list'] = [v.as_dict() for v in access_control_list]
+        if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id
         if continuous is not None: body['continuous'] = continuous.as_dict()
         if deployment is not None: body['deployment'] = deployment.as_dict()
         if description is not None: body['description'] = description
@@ -6398,6 +6442,7 @@ def set_permissions(
     def submit(self,
                *,
                access_control_list: Optional[List[JobAccessControlRequest]] = None,
+               budget_policy_id: Optional[str] = None,
                email_notifications: Optional[JobEmailNotifications] = None,
                environments: Optional[List[JobEnvironment]] = None,
                git_source: Optional[GitSource] = None,
@@ -6418,6 +6463,9 @@ def submit(self,
         
         :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
           List of permissions to set on the job.
+        :param budget_policy_id: str (optional)
+          The user specified id of the budget policy to use for this one-time run. If not specified, the run
+          will be not be attributed to any budget policy.
         :param email_notifications: :class:`JobEmailNotifications` (optional)
           An optional set of email addresses notified when the run begins or completes.
         :param environments: List[:class:`JobEnvironment`] (optional)
@@ -6469,6 +6517,7 @@ def submit(self,
         body = {}
         if access_control_list is not None:
             body['access_control_list'] = [v.as_dict() for v in access_control_list]
+        if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id
         if email_notifications is not None: body['email_notifications'] = email_notifications.as_dict()
         if environments is not None: body['environments'] = [v.as_dict() for v in environments]
         if git_source is not None: body['git_source'] = git_source.as_dict()
@@ -6492,6 +6541,7 @@ def submit_and_wait(
         self,
         *,
         access_control_list: Optional[List[JobAccessControlRequest]] = None,
+        budget_policy_id: Optional[str] = None,
         email_notifications: Optional[JobEmailNotifications] = None,
         environments: Optional[List[JobEnvironment]] = None,
         git_source: Optional[GitSource] = None,
@@ -6506,6 +6556,7 @@ def submit_and_wait(
         webhook_notifications: Optional[WebhookNotifications] = None,
         timeout=timedelta(minutes=20)) -> Run:
         return self.submit(access_control_list=access_control_list,
+                           budget_policy_id=budget_policy_id,
                            email_notifications=email_notifications,
                            environments=environments,
                            git_source=git_source,
diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py
index f102bdc9d..9c12f8788 100755
--- a/databricks/sdk/service/pipelines.py
+++ b/databricks/sdk/service/pipelines.py
@@ -587,6 +587,9 @@ def from_dict(cls, d: Dict[str, any]) -> GetUpdateResponse:
 
 @dataclass
 class IngestionConfig:
+    report: Optional[ReportSpec] = None
+    """Select tables from a specific source report."""
+
     schema: Optional[SchemaSpec] = None
     """Select tables from a specific source schema."""
 
@@ -596,6 +599,7 @@ class IngestionConfig:
     def as_dict(self) -> dict:
         """Serializes the IngestionConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.report: body['report'] = self.report.as_dict()
         if self.schema: body['schema'] = self.schema.as_dict()
         if self.table: body['table'] = self.table.as_dict()
         return body
@@ -603,7 +607,9 @@ def as_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> IngestionConfig:
         """Deserializes the IngestionConfig from a dictionary."""
-        return cls(schema=_from_dict(d, 'schema', SchemaSpec), table=_from_dict(d, 'table', TableSpec))
+        return cls(report=_from_dict(d, 'report', ReportSpec),
+                   schema=_from_dict(d, 'schema', SchemaSpec),
+                   table=_from_dict(d, 'table', TableSpec))
 
 
 @dataclass
@@ -1624,6 +1630,44 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineTrigger:
         return cls(cron=_from_dict(d, 'cron', CronTrigger), manual=_from_dict(d, 'manual', ManualTrigger))
 
 
+@dataclass
+class ReportSpec:
+    destination_catalog: Optional[str] = None
+    """Required. Destination catalog to store table."""
+
+    destination_schema: Optional[str] = None
+    """Required. Destination schema to store table."""
+
+    destination_table: Optional[str] = None
+    """Required. Destination table name. The pipeline fails if a table with that name already exists."""
+
+    source_url: Optional[str] = None
+    """Required. Report URL in the source system."""
+
+    table_configuration: Optional[TableSpecificConfig] = None
+    """Configuration settings to control the ingestion of tables. These settings override the
+    table_configuration defined in the IngestionPipelineDefinition object."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ReportSpec into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog
+        if self.destination_schema is not None: body['destination_schema'] = self.destination_schema
+        if self.destination_table is not None: body['destination_table'] = self.destination_table
+        if self.source_url is not None: body['source_url'] = self.source_url
+        if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ReportSpec:
+        """Deserializes the ReportSpec from a dictionary."""
+        return cls(destination_catalog=d.get('destination_catalog', None),
+                   destination_schema=d.get('destination_schema', None),
+                   destination_table=d.get('destination_table', None),
+                   source_url=d.get('source_url', None),
+                   table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig))
+
+
 @dataclass
 class SchemaSpec:
     destination_catalog: Optional[str] = None
@@ -1841,7 +1885,7 @@ class TableSpec:
     """Required. Destination schema to store table."""
 
     destination_table: Optional[str] = None
-    """Optional. Destination table name. The pipeline fails If a table with that name already exists.
+    """Optional. Destination table name. The pipeline fails if a table with that name already exists.
     If not set, the source table name is used."""
 
     source_catalog: Optional[str] = None
@@ -1893,6 +1937,10 @@ class TableSpecificConfig:
     scd_type: Optional[TableSpecificConfigScdType] = None
     """The SCD type to use to ingest the table."""
 
+    sequence_by: Optional[List[str]] = None
+    """The column names specifying the logical order of events in the source data. Delta Live Tables
+    uses this sequencing to handle change events that arrive out of order."""
+
     def as_dict(self) -> dict:
         """Serializes the TableSpecificConfig into a dictionary suitable for use as a JSON request body."""
         body = {}
@@ -1900,6 +1948,7 @@ def as_dict(self) -> dict:
         if self.salesforce_include_formula_fields is not None:
             body['salesforce_include_formula_fields'] = self.salesforce_include_formula_fields
         if self.scd_type is not None: body['scd_type'] = self.scd_type.value
+        if self.sequence_by: body['sequence_by'] = [v for v in self.sequence_by]
         return body
 
     @classmethod
@@ -1907,7 +1956,8 @@ def from_dict(cls, d: Dict[str, any]) -> TableSpecificConfig:
         """Deserializes the TableSpecificConfig from a dictionary."""
         return cls(primary_keys=d.get('primary_keys', None),
                    salesforce_include_formula_fields=d.get('salesforce_include_formula_fields', None),
-                   scd_type=_enum(d, 'scd_type', TableSpecificConfigScdType))
+                   scd_type=_enum(d, 'scd_type', TableSpecificConfigScdType),
+                   sequence_by=d.get('sequence_by', None))
 
 
 class TableSpecificConfigScdType(Enum):
diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py
index 4f0e49c77..7a224feeb 100755
--- a/databricks/sdk/service/sql.py
+++ b/databricks/sdk/service/sql.py
@@ -72,6 +72,9 @@ class Alert:
     lifecycle_state: Optional[LifecycleState] = None
     """The workspace state of the alert. Used for tracking trashed status."""
 
+    notify_on_ok: Optional[bool] = None
+    """Whether to notify alert subscribers when alert returns back to normal."""
+
     owner_user_name: Optional[str] = None
     """The owner's username. This field is set to "Unavailable" if the user has been deleted."""
 
@@ -105,6 +108,7 @@ def as_dict(self) -> dict:
         if self.display_name is not None: body['display_name'] = self.display_name
         if self.id is not None: body['id'] = self.id
         if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
+        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
         if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
         if self.parent_path is not None: body['parent_path'] = self.parent_path
         if self.query_id is not None: body['query_id'] = self.query_id
@@ -124,6 +128,7 @@ def from_dict(cls, d: Dict[str, any]) -> Alert:
                    display_name=d.get('display_name', None),
                    id=d.get('id', None),
                    lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
+                   notify_on_ok=d.get('notify_on_ok', None),
                    owner_user_name=d.get('owner_user_name', None),
                    parent_path=d.get('parent_path', None),
                    query_id=d.get('query_id', None),
@@ -652,6 +657,9 @@ class CreateAlertRequestAlert:
     display_name: Optional[str] = None
     """The display name of the alert."""
 
+    notify_on_ok: Optional[bool] = None
+    """Whether to notify alert subscribers when alert returns back to normal."""
+
     parent_path: Optional[str] = None
     """The workspace path of the folder containing the alert."""
 
@@ -669,6 +677,7 @@ def as_dict(self) -> dict:
         if self.custom_body is not None: body['custom_body'] = self.custom_body
         if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
         if self.display_name is not None: body['display_name'] = self.display_name
+        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
         if self.parent_path is not None: body['parent_path'] = self.parent_path
         if self.query_id is not None: body['query_id'] = self.query_id
         if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
@@ -681,6 +690,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateAlertRequestAlert:
                    custom_body=d.get('custom_body', None),
                    custom_subject=d.get('custom_subject', None),
                    display_name=d.get('display_name', None),
+                   notify_on_ok=d.get('notify_on_ok', None),
                    parent_path=d.get('parent_path', None),
                    query_id=d.get('query_id', None),
                    seconds_to_retrigger=d.get('seconds_to_retrigger', None))
@@ -2696,6 +2706,9 @@ class ListAlertsResponseAlert:
     lifecycle_state: Optional[LifecycleState] = None
     """The workspace state of the alert. Used for tracking trashed status."""
 
+    notify_on_ok: Optional[bool] = None
+    """Whether to notify alert subscribers when alert returns back to normal."""
+
     owner_user_name: Optional[str] = None
     """The owner's username. This field is set to "Unavailable" if the user has been deleted."""
 
@@ -2726,6 +2739,7 @@ def as_dict(self) -> dict:
         if self.display_name is not None: body['display_name'] = self.display_name
         if self.id is not None: body['id'] = self.id
         if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
+        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
         if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
         if self.query_id is not None: body['query_id'] = self.query_id
         if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
@@ -2744,6 +2758,7 @@ def from_dict(cls, d: Dict[str, any]) -> ListAlertsResponseAlert:
                    display_name=d.get('display_name', None),
                    id=d.get('id', None),
                    lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
+                   notify_on_ok=d.get('notify_on_ok', None),
                    owner_user_name=d.get('owner_user_name', None),
                    query_id=d.get('query_id', None),
                    seconds_to_retrigger=d.get('seconds_to_retrigger', None),
@@ -4561,6 +4576,9 @@ class UpdateAlertRequestAlert:
     display_name: Optional[str] = None
     """The display name of the alert."""
 
+    notify_on_ok: Optional[bool] = None
+    """Whether to notify alert subscribers when alert returns back to normal."""
+
     owner_user_name: Optional[str] = None
     """The owner's username. This field is set to "Unavailable" if the user has been deleted."""
 
@@ -4578,6 +4596,7 @@ def as_dict(self) -> dict:
         if self.custom_body is not None: body['custom_body'] = self.custom_body
         if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
         if self.display_name is not None: body['display_name'] = self.display_name
+        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
         if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
         if self.query_id is not None: body['query_id'] = self.query_id
         if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
@@ -4590,6 +4609,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequestAlert:
                    custom_body=d.get('custom_body', None),
                    custom_subject=d.get('custom_subject', None),
                    display_name=d.get('display_name', None),
+                   notify_on_ok=d.get('notify_on_ok', None),
                    owner_user_name=d.get('owner_user_name', None),
                    query_id=d.get('query_id', None),
                    seconds_to_retrigger=d.get('seconds_to_retrigger', None))
diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst
index b0f4f838e..cb6399348 100644
--- a/docs/dbdataclasses/catalog.rst
+++ b/docs/dbdataclasses/catalog.rst
@@ -1194,8 +1194,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: PROVISIONING
       :value: "PROVISIONING"
 
-   .. py:attribute:: STATE_UNSPECIFIED
-      :value: "STATE_UNSPECIFIED"
+   .. py:attribute:: UPDATING
+      :value: "UPDATING"
 
 .. autoclass:: ProvisioningStatus
    :members:
diff --git a/docs/dbdataclasses/dashboards.rst b/docs/dbdataclasses/dashboards.rst
index 192095548..91de6ccb2 100644
--- a/docs/dbdataclasses/dashboards.rst
+++ b/docs/dbdataclasses/dashboards.rst
@@ -166,6 +166,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION
       :value: "MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION"
 
+   .. py:attribute:: NO_QUERY_TO_VISUALIZE_EXCEPTION
+      :value: "NO_QUERY_TO_VISUALIZE_EXCEPTION"
+
    .. py:attribute:: NO_TABLES_TO_QUERY_EXCEPTION
       :value: "NO_TABLES_TO_QUERY_EXCEPTION"
 
diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst
index 9d3d9c8a7..9f419f160 100644
--- a/docs/dbdataclasses/pipelines.rst
+++ b/docs/dbdataclasses/pipelines.rst
@@ -265,6 +265,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: ReportSpec
+   :members:
+   :undoc-members:
+
 .. autoclass:: SchemaSpec
    :members:
    :undoc-members:
diff --git a/docs/gen-client-docs.py b/docs/gen-client-docs.py
index 4b52d817d..5c32beffe 100644
--- a/docs/gen-client-docs.py
+++ b/docs/gen-client-docs.py
@@ -259,7 +259,7 @@ def _openapi_spec(self) -> str:
                 return f.read()
         with open(f'{__dir__}/../.codegen/_openapi_sha') as f:
             sha = f.read().strip()
-        return subprocess.check_output(['deco', 'openapi', 'get', sha]).decode('utf-8')
+        return subprocess.check_output(['genkit', 'get', sha]).decode('utf-8')
 
     def _load_mapping(self) -> dict[str, Tag]:
         mapping = {}
@@ -342,8 +342,15 @@ def service_docs(self, client_inst, client_prefix: str) -> list[ServiceDoc]:
                 continue
             if service_name in ignore_client_fields:
                 continue
-            class_doc = service_inst.__doc__
+
             class_name = service_inst.__class__.__name__
+
+            # Use original class docstring for mixin classes
+            if class_name.endswith('Ext'):
+                class_doc = service_inst.__class__.__base__.__doc__
+            else:        
+                class_doc = service_inst.__doc__
+
             print(f'Processing service {client_prefix}.{service_name}')
             all += self.service_docs(service_inst, client_prefix + "." + service_name)
 
diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst
index 3c6e0f2e4..b097c94c8 100644
--- a/docs/workspace/jobs/jobs.rst
+++ b/docs/workspace/jobs/jobs.rst
@@ -120,7 +120,7 @@
     .. py:method:: cancel_run_and_wait(run_id: int, timeout: datetime.timedelta = 0:20:00) -> Run
 
 
-    .. py:method:: create( [, access_control_list: Optional[List[JobAccessControlRequest]], continuous: Optional[Continuous], deployment: Optional[JobDeployment], description: Optional[str], edit_mode: Optional[JobEditMode], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], format: Optional[Format], git_source: Optional[GitSource], health: Optional[JobsHealthRules], job_clusters: Optional[List[JobCluster]], max_concurrent_runs: Optional[int], name: Optional[str], notification_settings: Optional[JobNotificationSettings], parameters: Optional[List[JobParameterDefinition]], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], schedule: Optional[CronSchedule], tags: Optional[Dict[str, str]], tasks: Optional[List[Task]], timeout_seconds: Optional[int], trigger: Optional[TriggerSettings], webhook_notifications: Optional[WebhookNotifications]]) -> CreateResponse
+    .. py:method:: create( [, access_control_list: Optional[List[JobAccessControlRequest]], budget_policy_id: Optional[str], continuous: Optional[Continuous], deployment: Optional[JobDeployment], description: Optional[str], edit_mode: Optional[JobEditMode], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], format: Optional[Format], git_source: Optional[GitSource], health: Optional[JobsHealthRules], job_clusters: Optional[List[JobCluster]], max_concurrent_runs: Optional[int], name: Optional[str], notification_settings: Optional[JobNotificationSettings], parameters: Optional[List[JobParameterDefinition]], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], schedule: Optional[CronSchedule], tags: Optional[Dict[str, str]], tasks: Optional[List[Task]], timeout_seconds: Optional[int], trigger: Optional[TriggerSettings], webhook_notifications: Optional[WebhookNotifications]]) -> CreateResponse
 
 
         Usage:
@@ -158,6 +158,10 @@
         
         :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
           List of permissions to set on the job.
+        :param budget_policy_id: str (optional)
+          The id of the user specified budget policy to use for this job. If not specified, a default budget
+          policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for the
+          budget policy used by this workload.
         :param continuous: :class:`Continuous` (optional)
           An optional continuous property for this job. The continuous property will ensure that there is
           always one run executing. Only one of `schedule` and `continuous` can be used.
@@ -931,7 +935,7 @@
         :returns: :class:`JobPermissions`
         
 
-    .. py:method:: submit( [, access_control_list: Optional[List[JobAccessControlRequest]], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications]]) -> Wait[Run]
+    .. py:method:: submit( [, access_control_list: Optional[List[JobAccessControlRequest]], budget_policy_id: Optional[str], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications]]) -> Wait[Run]
 
 
         Usage:
@@ -969,6 +973,9 @@
         
         :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
           List of permissions to set on the job.
+        :param budget_policy_id: str (optional)
+          The user specified id of the budget policy to use for this one-time run. If not specified, the run
+          will be not be attributed to any budget policy.
         :param email_notifications: :class:`JobEmailNotifications` (optional)
           An optional set of email addresses notified when the run begins or completes.
         :param environments: List[:class:`JobEnvironment`] (optional)
@@ -1018,7 +1025,7 @@
           See :method:wait_get_run_job_terminated_or_skipped for more details.
         
 
-    .. py:method:: submit_and_wait( [, access_control_list: Optional[List[JobAccessControlRequest]], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications], timeout: datetime.timedelta = 0:20:00]) -> Run
+    .. py:method:: submit_and_wait( [, access_control_list: Optional[List[JobAccessControlRequest]], budget_policy_id: Optional[str], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications], timeout: datetime.timedelta = 0:20:00]) -> Run
 
 
     .. py:method:: update(job_id: int [, fields_to_remove: Optional[List[str]], new_settings: Optional[JobSettings]])
diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst
index 8e21197a1..cbcbca964 100644
--- a/docs/workspace/serving/serving_endpoints.rst
+++ b/docs/workspace/serving/serving_endpoints.rst
@@ -2,7 +2,7 @@
 ==========================================
 .. currentmodule:: databricks.sdk.service.serving
 
-.. py:class:: ServingEndpointsAPI
+.. py:class:: ServingEndpointsExt
 
     The Serving Endpoints API allows you to create, update, and delete model serving endpoints.
     
@@ -92,6 +92,12 @@
         :returns: :class:`ServingEndpointDetailed`
         
 
+    .. py:method:: get_langchain_chat_open_ai_client(model)
+
+
+    .. py:method:: get_open_ai_client()
+
+
     .. py:method:: get_open_api(name: str)
 
         Get the schema for a serving endpoint.

From 15257ebc9640e40ebe9db12e0be289d83c0da93c Mon Sep 17 00:00:00 2001
From: Omer Lachish <289488+rauchy@users.noreply.github.com>
Date: Thu, 17 Oct 2024 15:36:50 +0200
Subject: [PATCH 056/136] [Release] Release v0.35.0 (#793)

### New Features and Improvements

* Open AI Client Mixin
([#779](https://github.com/databricks/databricks-sdk-py/pull/779)).


### Bug Fixes

* Update Serving Endpoint mixing template and docs generation logic
([#792](https://github.com/databricks/databricks-sdk-py/pull/792)).


### API Changes:

 * Added `databricks.sdk.service.pipelines.ReportSpec` dataclass.
* Added `unity_catalog_provisioning_state` field for
`databricks.sdk.service.catalog.OnlineTable`.
* Added `is_truncated` field for
`databricks.sdk.service.dashboards.Result`.
* Added `effective_budget_policy_id` field for
`databricks.sdk.service.jobs.BaseJob`.
* Added `budget_policy_id` field for
`databricks.sdk.service.jobs.CreateJob`.
* Added `effective_budget_policy_id` field for
`databricks.sdk.service.jobs.Job`.
* Added `budget_policy_id` field for
`databricks.sdk.service.jobs.JobSettings`.
* Added `budget_policy_id` field for
`databricks.sdk.service.jobs.SubmitRun`.
* Added `report` field for
`databricks.sdk.service.pipelines.IngestionConfig`.
* Added `sequence_by` field for
`databricks.sdk.service.pipelines.TableSpecificConfig`.
 * Added `notify_on_ok` field for `databricks.sdk.service.sql.Alert`.
* Added `notify_on_ok` field for
`databricks.sdk.service.sql.CreateAlertRequestAlert`.
* Added `notify_on_ok` field for
`databricks.sdk.service.sql.ListAlertsResponseAlert`.
* Added `notify_on_ok` field for
`databricks.sdk.service.sql.UpdateAlertRequestAlert`.

OpenAPI SHA: cf9c61453990df0f9453670f2fe68e1b128647a2, Date: 2024-10-14

Co-authored-by: Omer Lachish 
---
 CHANGELOG.md              | 31 +++++++++++++++++++++++++++++++
 databricks/sdk/version.py |  2 +-
 2 files changed, 32 insertions(+), 1 deletion(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 9ffbd9134..344e975d9 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,36 @@
 # Version changelog
 
+## [Release] Release v0.35.0
+
+### New Features and Improvements
+
+ * Open AI Client Mixin ([#779](https://github.com/databricks/databricks-sdk-py/pull/779)).
+
+
+### Bug Fixes
+
+ * Update Serving Endpoint mixing template and docs generation logic ([#792](https://github.com/databricks/databricks-sdk-py/pull/792)).
+
+
+### API Changes:
+
+ * Added `databricks.sdk.service.pipelines.ReportSpec` dataclass.
+ * Added `unity_catalog_provisioning_state` field for `databricks.sdk.service.catalog.OnlineTable`.
+ * Added `is_truncated` field for `databricks.sdk.service.dashboards.Result`.
+ * Added `effective_budget_policy_id` field for `databricks.sdk.service.jobs.BaseJob`.
+ * Added `budget_policy_id` field for `databricks.sdk.service.jobs.CreateJob`.
+ * Added `effective_budget_policy_id` field for `databricks.sdk.service.jobs.Job`.
+ * Added `budget_policy_id` field for `databricks.sdk.service.jobs.JobSettings`.
+ * Added `budget_policy_id` field for `databricks.sdk.service.jobs.SubmitRun`.
+ * Added `report` field for `databricks.sdk.service.pipelines.IngestionConfig`.
+ * Added `sequence_by` field for `databricks.sdk.service.pipelines.TableSpecificConfig`.
+ * Added `notify_on_ok` field for `databricks.sdk.service.sql.Alert`.
+ * Added `notify_on_ok` field for `databricks.sdk.service.sql.CreateAlertRequestAlert`.
+ * Added `notify_on_ok` field for `databricks.sdk.service.sql.ListAlertsResponseAlert`.
+ * Added `notify_on_ok` field for `databricks.sdk.service.sql.UpdateAlertRequestAlert`.
+
+OpenAPI SHA: cf9c61453990df0f9453670f2fe68e1b128647a2, Date: 2024-10-14
+
 ## [Release] Release v0.34.0
 
 ### Bug Fixes
diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py
index cac711204..2670d0523 100644
--- a/databricks/sdk/version.py
+++ b/databricks/sdk/version.py
@@ -1 +1 @@
-__version__ = '0.34.0'
+__version__ = '0.35.0'

From 32ba2214547e7c551689a4a0fc1d9ecbc2cc34ca Mon Sep 17 00:00:00 2001
From: Miles Yucht 
Date: Mon, 21 Oct 2024 09:47:42 +0200
Subject: [PATCH 057/136] [Fix] Decouple OAuth functionality from `Config`
 (#784)

## Changes
### OAuth Refactoring
Currently, OAuthClient uses Config internally to resolve the OIDC
endpoints by passing the client ID and host to an internal Config
instance and calling its `oidc_endpoints` method. This has a few
drawbacks:
1. There is nearly a cyclical dependency: `Config` depends on methods in
`oauth.py`, and `OAuthClient` depends on `Config`. This currently
doesn't break because the `Config` import is done at runtime in the
`OAuthClient` constructor.
2. Databricks supports both in-house OAuth and Azure Entra ID OAuth.
Currently, the choice between these options depends on whether a user
specifies the azure_client_id or client_id parameter in the Config.
Because Config is used within OAuthClient, this means that OAuthClient
needs to expose a parameter to configure either client_id or
azure_client_id.

Rather than having these classes deeply coupled to one another, we can
allow users to fetch the OIDC endpoints for a given account/workspace as
a top-level functionality and provide this to `OAuthClient`. This breaks
the cyclic dependency and doesn't require `OAuthClient` to expose any
unnecessary parameters.

Further, I've also tried to remove the coupling of the other classes in
`oauth.py` to `OAuthClient`. Currently, `OAuthClient` serves both as the
mechanism to initialize OAuth and as a kind of configuration object,
capturing OAuth endpoint URLs, client ID/secret, redirect URL, and
scopes. Now, the parameters for each of these classes are explicit,
removing all unnecessarily coupling between them. One nice advantage is
that the Consent can be serialized/deserialized without any reference to
the `OAuthClient` anymore.

There is definitely more work to be done to simplify and clean up the
OAuth implementation, but this should at least unblock users who need to
use Azure Entra ID U2M OAuth in the SDK.

## Tests
The new OIDC endpoint methods are tested, and those tests also verify
that those endpoints are retried in case of rate limiting.

I ran the flask app example against an AWS workspace, and I ran the
external-browser demo example against AWS, Azure and GCP workspaces with
the default client ID and with a newly created OAuth app with and
without credentials.

- [ ] `make test` run locally
- [ ] `make fmt` applied
- [ ] relevant integration tests applied
---
 databricks/sdk/_base_client.py         |  20 +++
 databricks/sdk/config.py               |  44 ++---
 databricks/sdk/credentials_provider.py |  31 ++--
 databricks/sdk/oauth.py                | 230 +++++++++++++++++++------
 examples/external_browser_auth.py      |  72 ++++++++
 examples/flask_app_with_oauth.py       |  64 +++----
 tests/test_oauth.py                    | 155 +++++++++++++----
 7 files changed, 459 insertions(+), 157 deletions(-)
 create mode 100644 examples/external_browser_auth.py

diff --git a/databricks/sdk/_base_client.py b/databricks/sdk/_base_client.py
index 62c2974ec..95ce39cbe 100644
--- a/databricks/sdk/_base_client.py
+++ b/databricks/sdk/_base_client.py
@@ -1,4 +1,5 @@
 import logging
+import urllib.parse
 from datetime import timedelta
 from types import TracebackType
 from typing import (Any, BinaryIO, Callable, Dict, Iterable, Iterator, List,
@@ -17,6 +18,25 @@
 logger = logging.getLogger('databricks.sdk')
 
 
+def _fix_host_if_needed(host: Optional[str]) -> Optional[str]:
+    if not host:
+        return host
+
+    # Add a default scheme if it's missing
+    if '://' not in host:
+        host = 'https://' + host
+
+    o = urllib.parse.urlparse(host)
+    # remove trailing slash
+    path = o.path.rstrip('/')
+    # remove port if 443
+    netloc = o.netloc
+    if o.port == 443:
+        netloc = netloc.split(':')[0]
+
+    return urllib.parse.urlunparse((o.scheme, netloc, path, o.params, o.query, o.fragment))
+
+
 class _BaseClient:
 
     def __init__(self,
diff --git a/databricks/sdk/config.py b/databricks/sdk/config.py
index 5cae1b2b4..b4efdf603 100644
--- a/databricks/sdk/config.py
+++ b/databricks/sdk/config.py
@@ -10,11 +10,14 @@
 import requests
 
 from . import useragent
+from ._base_client import _fix_host_if_needed
 from .clock import Clock, RealClock
 from .credentials_provider import CredentialsStrategy, DefaultCredentials
 from .environments import (ALL_ENVS, AzureEnvironment, Cloud,
                            DatabricksEnvironment, get_environment_for_hostname)
-from .oauth import OidcEndpoints, Token
+from .oauth import (OidcEndpoints, Token, get_account_endpoints,
+                    get_azure_entra_id_workspace_endpoints,
+                    get_workspace_endpoints)
 
 logger = logging.getLogger('databricks.sdk')
 
@@ -254,24 +257,10 @@ def oidc_endpoints(self) -> Optional[OidcEndpoints]:
         if not self.host:
             return None
         if self.is_azure and self.azure_client_id:
-            # Retrieve authorize endpoint to retrieve token endpoint after
-            res = requests.get(f'{self.host}/oidc/oauth2/v2.0/authorize', allow_redirects=False)
-            real_auth_url = res.headers.get('location')
-            if not real_auth_url:
-                return None
-            return OidcEndpoints(authorization_endpoint=real_auth_url,
-                                 token_endpoint=real_auth_url.replace('/authorize', '/token'))
+            return get_azure_entra_id_workspace_endpoints(self.host)
         if self.is_account_client and self.account_id:
-            prefix = f'{self.host}/oidc/accounts/{self.account_id}'
-            return OidcEndpoints(authorization_endpoint=f'{prefix}/v1/authorize',
-                                 token_endpoint=f'{prefix}/v1/token')
-        oidc = f'{self.host}/oidc/.well-known/oauth-authorization-server'
-        res = requests.get(oidc)
-        if res.status_code != 200:
-            return None
-        auth_metadata = res.json()
-        return OidcEndpoints(authorization_endpoint=auth_metadata.get('authorization_endpoint'),
-                             token_endpoint=auth_metadata.get('token_endpoint'))
+            return get_account_endpoints(self.host, self.account_id)
+        return get_workspace_endpoints(self.host)
 
     def debug_string(self) -> str:
         """ Returns log-friendly representation of configured attributes """
@@ -346,22 +335,9 @@ def attributes(cls) -> Iterable[ConfigAttribute]:
         return cls._attributes
 
     def _fix_host_if_needed(self):
-        if not self.host:
-            return
-
-        # Add a default scheme if it's missing
-        if '://' not in self.host:
-            self.host = 'https://' + self.host
-
-        o = urllib.parse.urlparse(self.host)
-        # remove trailing slash
-        path = o.path.rstrip('/')
-        # remove port if 443
-        netloc = o.netloc
-        if o.port == 443:
-            netloc = netloc.split(':')[0]
-
-        self.host = urllib.parse.urlunparse((o.scheme, netloc, path, o.params, o.query, o.fragment))
+        updated_host = _fix_host_if_needed(self.host)
+        if updated_host:
+            self.host = updated_host
 
     def load_azure_tenant_id(self):
         """[Internal] Load the Azure tenant ID from the Azure Databricks login page.
diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py
index 232465dab..a79151b5a 100644
--- a/databricks/sdk/credentials_provider.py
+++ b/databricks/sdk/credentials_provider.py
@@ -187,30 +187,35 @@ def token() -> Token:
 def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
     if cfg.auth_type != 'external-browser':
         return None
+    client_id, client_secret = None, None
     if cfg.client_id:
         client_id = cfg.client_id
-    elif cfg.is_aws:
+        client_secret = cfg.client_secret
+    elif cfg.azure_client_id:
+        client_id = cfg.azure_client
+        client_secret = cfg.azure_client_secret
+
+    if not client_id:
         client_id = 'databricks-cli'
-    elif cfg.is_azure:
-        # Use Azure AD app for cases when Azure CLI is not available on the machine.
-        # App has to be registered as Single-page multi-tenant to support PKCE
-        # TODO: temporary app ID, change it later.
-        client_id = '6128a518-99a9-425b-8333-4cc94f04cacd'
-    else:
-        raise ValueError(f'local browser SSO is not supported')
-    oauth_client = OAuthClient(host=cfg.host,
-                               client_id=client_id,
-                               redirect_url='http://localhost:8020',
-                               client_secret=cfg.client_secret)
 
     # Load cached credentials from disk if they exist.
     # Note that these are local to the Python SDK and not reused by other SDKs.
-    token_cache = TokenCache(oauth_client)
+    oidc_endpoints = cfg.oidc_endpoints
+    redirect_url = 'http://localhost:8020'
+    token_cache = TokenCache(host=cfg.host,
+                             oidc_endpoints=oidc_endpoints,
+                             client_id=client_id,
+                             client_secret=client_secret,
+                             redirect_url=redirect_url)
     credentials = token_cache.load()
     if credentials:
         # Force a refresh in case the loaded credentials are expired.
         credentials.token()
     else:
+        oauth_client = OAuthClient(oidc_endpoints=oidc_endpoints,
+                                   client_id=client_id,
+                                   redirect_url=redirect_url,
+                                   client_secret=client_secret)
         consent = oauth_client.initiate_consent()
         if not consent:
             return None
diff --git a/databricks/sdk/oauth.py b/databricks/sdk/oauth.py
index e9a3afb90..6cac45afc 100644
--- a/databricks/sdk/oauth.py
+++ b/databricks/sdk/oauth.py
@@ -17,6 +17,8 @@
 import requests
 import requests.auth
 
+from ._base_client import _BaseClient, _fix_host_if_needed
+
 # Error code for PKCE flow in Azure Active Directory, that gets additional retry.
 # See https://stackoverflow.com/a/75466778/277035 for more info
 NO_ORIGIN_FOR_SPA_CLIENT_ERROR = 'AADSTS9002327'
@@ -46,8 +48,24 @@ def __call__(self, r):
 
 @dataclass
 class OidcEndpoints:
+    """
+    The endpoints used for OAuth-based authentication in Databricks.
+    """
+
     authorization_endpoint: str # ../v1/authorize
+    """The authorization endpoint for the OAuth flow. The user-agent should be directed to this endpoint in order for
+    the user to login and authorize the client for user-to-machine (U2M) flows."""
+
     token_endpoint: str # ../v1/token
+    """The token endpoint for the OAuth flow."""
+
+    @staticmethod
+    def from_dict(d: dict) -> 'OidcEndpoints':
+        return OidcEndpoints(authorization_endpoint=d.get('authorization_endpoint'),
+                             token_endpoint=d.get('token_endpoint'))
+
+    def as_dict(self) -> dict:
+        return {'authorization_endpoint': self.authorization_endpoint, 'token_endpoint': self.token_endpoint}
 
 
 @dataclass
@@ -220,18 +238,76 @@ def do_GET(self):
         self.wfile.write(b'You can close this tab.')
 
 
+def get_account_endpoints(host: str, account_id: str, client: _BaseClient = _BaseClient()) -> OidcEndpoints:
+    """
+    Get the OIDC endpoints for a given account.
+    :param host: The Databricks account host.
+    :param account_id: The account ID.
+    :return: The account's OIDC endpoints.
+    """
+    host = _fix_host_if_needed(host)
+    oidc = f'{host}/oidc/accounts/{account_id}/.well-known/oauth-authorization-server'
+    resp = client.do('GET', oidc)
+    return OidcEndpoints.from_dict(resp)
+
+
+def get_workspace_endpoints(host: str, client: _BaseClient = _BaseClient()) -> OidcEndpoints:
+    """
+    Get the OIDC endpoints for a given workspace.
+    :param host: The Databricks workspace host.
+    :return: The workspace's OIDC endpoints.
+    """
+    host = _fix_host_if_needed(host)
+    oidc = f'{host}/oidc/.well-known/oauth-authorization-server'
+    resp = client.do('GET', oidc)
+    return OidcEndpoints.from_dict(resp)
+
+
+def get_azure_entra_id_workspace_endpoints(host: str) -> Optional[OidcEndpoints]:
+    """
+    Get the Azure Entra ID endpoints for a given workspace. Can only be used when authenticating to Azure Databricks
+    using an application registered in Azure Entra ID.
+    :param host: The Databricks workspace host.
+    :return: The OIDC endpoints for the workspace's Azure Entra ID tenant.
+    """
+    # In Azure, this workspace endpoint redirects to the Entra ID authorization endpoint
+    host = _fix_host_if_needed(host)
+    res = requests.get(f'{host}/oidc/oauth2/v2.0/authorize', allow_redirects=False)
+    real_auth_url = res.headers.get('location')
+    if not real_auth_url:
+        return None
+    return OidcEndpoints(authorization_endpoint=real_auth_url,
+                         token_endpoint=real_auth_url.replace('/authorize', '/token'))
+
+
 class SessionCredentials(Refreshable):
 
-    def __init__(self, client: 'OAuthClient', token: Token):
-        self._client = client
+    def __init__(self,
+                 token: Token,
+                 token_endpoint: str,
+                 client_id: str,
+                 client_secret: str = None,
+                 redirect_url: str = None):
+        self._token_endpoint = token_endpoint
+        self._client_id = client_id
+        self._client_secret = client_secret
+        self._redirect_url = redirect_url
         super().__init__(token)
 
     def as_dict(self) -> dict:
         return {'token': self._token.as_dict()}
 
     @staticmethod
-    def from_dict(client: 'OAuthClient', raw: dict) -> 'SessionCredentials':
-        return SessionCredentials(client=client, token=Token.from_dict(raw['token']))
+    def from_dict(raw: dict,
+                  token_endpoint: str,
+                  client_id: str,
+                  client_secret: str = None,
+                  redirect_url: str = None) -> 'SessionCredentials':
+        return SessionCredentials(token=Token.from_dict(raw['token']),
+                                  token_endpoint=token_endpoint,
+                                  client_id=client_id,
+                                  client_secret=client_secret,
+                                  redirect_url=redirect_url)
 
     def auth_type(self):
         """Implementing CredentialsProvider protocol"""
@@ -252,13 +328,13 @@ def refresh(self) -> Token:
             raise ValueError('oauth2: token expired and refresh token is not set')
         params = {'grant_type': 'refresh_token', 'refresh_token': refresh_token}
         headers = {}
-        if 'microsoft' in self._client.token_url:
+        if 'microsoft' in self._token_endpoint:
             # Tokens issued for the 'Single-Page Application' client-type may
             # only be redeemed via cross-origin requests
-            headers = {'Origin': self._client.redirect_url}
-        return retrieve_token(client_id=self._client.client_id,
-                              client_secret=self._client.client_secret,
-                              token_url=self._client.token_url,
+            headers = {'Origin': self._redirect_url}
+        return retrieve_token(client_id=self._client_id,
+                              client_secret=self._client_secret,
+                              token_url=self._token_endpoint,
                               params=params,
                               use_params=True,
                               headers=headers)
@@ -266,27 +342,53 @@ def refresh(self) -> Token:
 
 class Consent:
 
-    def __init__(self, client: 'OAuthClient', state: str, verifier: str, auth_url: str = None) -> None:
-        self.auth_url = auth_url
-
+    def __init__(self,
+                 state: str,
+                 verifier: str,
+                 authorization_url: str,
+                 redirect_url: str,
+                 token_endpoint: str,
+                 client_id: str,
+                 client_secret: str = None) -> None:
         self._verifier = verifier
         self._state = state
-        self._client = client
+        self._authorization_url = authorization_url
+        self._redirect_url = redirect_url
+        self._token_endpoint = token_endpoint
+        self._client_id = client_id
+        self._client_secret = client_secret
 
     def as_dict(self) -> dict:
-        return {'state': self._state, 'verifier': self._verifier}
+        return {
+            'state': self._state,
+            'verifier': self._verifier,
+            'authorization_url': self._authorization_url,
+            'redirect_url': self._redirect_url,
+            'token_endpoint': self._token_endpoint,
+            'client_id': self._client_id,
+        }
+
+    @property
+    def authorization_url(self) -> str:
+        return self._authorization_url
 
     @staticmethod
-    def from_dict(client: 'OAuthClient', raw: dict) -> 'Consent':
-        return Consent(client, raw['state'], raw['verifier'])
+    def from_dict(raw: dict, client_secret: str = None) -> 'Consent':
+        return Consent(raw['state'],
+                       raw['verifier'],
+                       authorization_url=raw['authorization_url'],
+                       redirect_url=raw['redirect_url'],
+                       token_endpoint=raw['token_endpoint'],
+                       client_id=raw['client_id'],
+                       client_secret=client_secret)
 
     def launch_external_browser(self) -> SessionCredentials:
-        redirect_url = urllib.parse.urlparse(self._client.redirect_url)
+        redirect_url = urllib.parse.urlparse(self._redirect_url)
         if redirect_url.hostname not in ('localhost', '127.0.0.1'):
             raise ValueError(f'cannot listen on {redirect_url.hostname}')
         feedback = []
-        logger.info(f'Opening {self.auth_url} in a browser')
-        webbrowser.open_new(self.auth_url)
+        logger.info(f'Opening {self._authorization_url} in a browser')
+        webbrowser.open_new(self._authorization_url)
         port = redirect_url.port
         handler_factory = functools.partial(_OAuthCallback, feedback)
         with HTTPServer(("localhost", port), handler_factory) as httpd:
@@ -308,7 +410,7 @@ def exchange(self, code: str, state: str) -> SessionCredentials:
         if self._state != state:
             raise ValueError('state mismatch')
         params = {
-            'redirect_uri': self._client.redirect_url,
+            'redirect_uri': self._redirect_url,
             'grant_type': 'authorization_code',
             'code_verifier': self._verifier,
             'code': code
@@ -316,19 +418,20 @@ def exchange(self, code: str, state: str) -> SessionCredentials:
         headers = {}
         while True:
             try:
-                token = retrieve_token(client_id=self._client.client_id,
-                                       client_secret=self._client.client_secret,
-                                       token_url=self._client.token_url,
+                token = retrieve_token(client_id=self._client_id,
+                                       client_secret=self._client_secret,
+                                       token_url=self._token_endpoint,
                                        params=params,
                                        headers=headers,
                                        use_params=True)
-                return SessionCredentials(self._client, token)
+                return SessionCredentials(token, self._token_endpoint, self._client_id, self._client_secret,
+                                          self._redirect_url)
             except ValueError as e:
                 if NO_ORIGIN_FOR_SPA_CLIENT_ERROR in str(e):
                     # Retry in cases of 'Single-Page Application' client-type with
                     # 'Origin' header equal to client's redirect URL.
-                    headers['Origin'] = self._client.redirect_url
-                    msg = f'Retrying OAuth token exchange with {self._client.redirect_url} origin'
+                    headers['Origin'] = self._redirect_url
+                    msg = f'Retrying OAuth token exchange with {self._redirect_url} origin'
                     logger.debug(msg)
                     continue
                 raise e
@@ -354,13 +457,28 @@ class OAuthClient:
     """
 
     def __init__(self,
-                 host: str,
-                 client_id: str,
+                 oidc_endpoints: OidcEndpoints,
                  redirect_url: str,
-                 *,
+                 client_id: str,
                  scopes: List[str] = None,
                  client_secret: str = None):
-        # TODO: is it a circular dependency?..
+
+        if not scopes:
+            scopes = ['all-apis']
+
+        self.redirect_url = redirect_url
+        self._client_id = client_id
+        self._client_secret = client_secret
+        self._oidc_endpoints = oidc_endpoints
+        self._scopes = scopes
+
+    @staticmethod
+    def from_host(host: str,
+                  client_id: str,
+                  redirect_url: str,
+                  *,
+                  scopes: List[str] = None,
+                  client_secret: str = None) -> 'OAuthClient':
         from .core import Config
         from .credentials_provider import credentials_strategy
 
@@ -374,18 +492,7 @@ def noop_credentials(_: any):
         oidc = config.oidc_endpoints
         if not oidc:
             raise ValueError(f'{host} does not support OAuth')
-
-        self.host = host
-        self.redirect_url = redirect_url
-        self.client_id = client_id
-        self.client_secret = client_secret
-        self.token_url = oidc.token_endpoint
-        self.is_aws = config.is_aws
-        self.is_azure = config.is_azure
-        self.is_gcp = config.is_gcp
-
-        self._auth_url = oidc.authorization_endpoint
-        self._scopes = scopes
+        return OAuthClient(oidc, redirect_url, client_id, scopes, client_secret)
 
     def initiate_consent(self) -> Consent:
         state = secrets.token_urlsafe(16)
@@ -397,18 +504,24 @@ def initiate_consent(self) -> Consent:
 
         params = {
             'response_type': 'code',
-            'client_id': self.client_id,
+            'client_id': self._client_id,
             'redirect_uri': self.redirect_url,
             'scope': ' '.join(self._scopes),
             'state': state,
             'code_challenge': challenge,
             'code_challenge_method': 'S256'
         }
-        url = f'{self._auth_url}?{urllib.parse.urlencode(params)}'
-        return Consent(self, state, verifier, auth_url=url)
+        auth_url = f'{self._oidc_endpoints.authorization_endpoint}?{urllib.parse.urlencode(params)}'
+        return Consent(state,
+                       verifier,
+                       authorization_url=auth_url,
+                       redirect_url=self.redirect_url,
+                       token_endpoint=self._oidc_endpoints.token_endpoint,
+                       client_id=self._client_id,
+                       client_secret=self._client_secret)
 
     def __repr__(self) -> str:
-        return f''
+        return f''
 
 
 @dataclass
@@ -448,17 +561,28 @@ def refresh(self) -> Token:
                               use_header=self.use_header)
 
 
-class TokenCache():
+class TokenCache:
     BASE_PATH = "~/.config/databricks-sdk-py/oauth"
 
-    def __init__(self, client: OAuthClient) -> None:
-        self.client = client
+    def __init__(self,
+                 host: str,
+                 oidc_endpoints: OidcEndpoints,
+                 client_id: str,
+                 redirect_url: str = None,
+                 client_secret: str = None,
+                 scopes: List[str] = None) -> None:
+        self._host = host
+        self._client_id = client_id
+        self._oidc_endpoints = oidc_endpoints
+        self._redirect_url = redirect_url
+        self._client_secret = client_secret
+        self._scopes = scopes or []
 
     @property
     def filename(self) -> str:
         # Include host, client_id, and scopes in the cache filename to make it unique.
         hash = hashlib.sha256()
-        for chunk in [self.client.host, self.client.client_id, ",".join(self.client._scopes), ]:
+        for chunk in [self._host, self._client_id, ",".join(self._scopes), ]:
             hash.update(chunk.encode('utf-8'))
         return os.path.expanduser(os.path.join(self.__class__.BASE_PATH, hash.hexdigest() + ".json"))
 
@@ -472,7 +596,11 @@ def load(self) -> Optional[SessionCredentials]:
         try:
             with open(self.filename, 'r') as f:
                 raw = json.load(f)
-                return SessionCredentials.from_dict(self.client, raw)
+                return SessionCredentials.from_dict(raw,
+                                                    token_endpoint=self._oidc_endpoints.token_endpoint,
+                                                    client_id=self._client_id,
+                                                    client_secret=self._client_secret,
+                                                    redirect_url=self._redirect_url)
         except Exception:
             return None
 
diff --git a/examples/external_browser_auth.py b/examples/external_browser_auth.py
new file mode 100644
index 000000000..061ff60c7
--- /dev/null
+++ b/examples/external_browser_auth.py
@@ -0,0 +1,72 @@
+from databricks.sdk import WorkspaceClient
+import argparse
+import logging
+
+logging.basicConfig(level=logging.DEBUG)
+
+
+def register_custom_app(confidential: bool) -> tuple[str, str]:
+    """Creates new Custom OAuth App in Databricks Account"""
+    logging.info("No OAuth custom app client/secret provided, creating new app")
+
+    from databricks.sdk import AccountClient
+
+    account_client = AccountClient()
+
+    custom_app = account_client.custom_app_integration.create(
+        name="external-browser-demo",
+        redirect_urls=[
+            f"http://localhost:8020",
+        ],
+        confidential=confidential,
+        scopes=["all-apis"],
+    )
+    logging.info(f"Created new custom app: "
+                 f"--client_id {custom_app.client_id} "
+                 f"{'--client_secret ' + custom_app.client_secret if confidential else ''}")
+
+    return custom_app.client_id, custom_app.client_secret
+
+
+def delete_custom_app(client_id: str):
+    """Creates new Custom OAuth App in Databricks Account"""
+    logging.info(f"Deleting custom app {client_id}")
+    from databricks.sdk import AccountClient
+    account_client = AccountClient()
+    account_client.custom_app_integration.delete(client_id)
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser()
+    parser.add_argument("--host", help="Databricks host", required=True)
+    parser.add_argument("--client_id", help="Databricks client_id", default=None)
+    parser.add_argument("--azure_client_id", help="Databricks azure_client_id", default=None)
+    parser.add_argument("--client_secret", help="Databricks client_secret", default=None)
+    parser.add_argument("--azure_client_secret", help="Databricks azure_client_secret", default=None)
+    parser.add_argument("--register-custom-app", action="store_true", help="Register a new custom app")
+    parser.add_argument("--register-custom-app-confidential", action="store_true", help="Register a new custom app")
+    namespace = parser.parse_args()
+    if namespace.register_custom_app and (namespace.client_id is not None or namespace.azure_client_id is not None):
+        raise ValueError("Cannot register custom app and provide --client_id/--azure_client_id at the same time")
+    if not namespace.register_custom_app and namespace.client_id is None and namespace.azure_client_secret is None:
+        raise ValueError("Must provide --client_id/--azure_client_id or register a custom app")
+    if namespace.register_custom_app:
+        client_id, client_secret = register_custom_app(namespace.register_custom_app_confidential)
+    else:
+        client_id, client_secret = namespace.client_id, namespace.client_secret
+
+    w = WorkspaceClient(
+        host=namespace.host,
+        client_id=client_id,
+        client_secret=client_secret,
+        azure_client_id=namespace.azure_client_id,
+        azure_client_secret=namespace.azure_client_secret,
+        auth_type="external-browser",
+    )
+    me = w.current_user.me()
+    print(me)
+
+    if namespace.register_custom_app:
+        delete_custom_app(client_id)
+
+
diff --git a/examples/flask_app_with_oauth.py b/examples/flask_app_with_oauth.py
index 4128de5ca..7c18eadc7 100755
--- a/examples/flask_app_with_oauth.py
+++ b/examples/flask_app_with_oauth.py
@@ -31,20 +31,21 @@
 import logging
 import sys
 
-from databricks.sdk.oauth import OAuthClient
+from databricks.sdk.oauth import OAuthClient, get_workspace_endpoints
+from databricks.sdk.service.compute import ListClustersFilterBy, State
 
 APP_NAME = "flask-demo"
 all_clusters_template = """"""
 
 
-def create_flask_app(oauth_client: OAuthClient):
+def create_flask_app(workspace_host: str, client_id: str, client_secret: str):
     """The create_flask_app function creates a Flask app that is enabled with OAuth.
 
     It initializes the app and web session secret keys with a randomly generated token. It defines two routes for
@@ -64,7 +65,7 @@ def callback():
         the callback parameters, and redirects the user to the index page."""
         from databricks.sdk.oauth import Consent
 
-        consent = Consent.from_dict(oauth_client, session["consent"])
+        consent = Consent.from_dict(session["consent"], client_secret=client_secret)
         session["creds"] = consent.exchange_callback_parameters(request.args).as_dict()
         return redirect(url_for("index"))
 
@@ -72,21 +73,34 @@ def callback():
     def index():
         """The index page checks if the user has already authenticated and retrieves the user's credentials using
         the Databricks SDK WorkspaceClient. It then renders the template with the clusters' list."""
+        oidc_endpoints = get_workspace_endpoints(workspace_host)
+        port = request.environ.get("SERVER_PORT")
+        redirect_url=f"http://localhost:{port}/callback"
         if "creds" not in session:
+            oauth_client = OAuthClient(oidc_endpoints=oidc_endpoints,
+                                       client_id=client_id,
+                                       client_secret=client_secret,
+                                       redirect_url=redirect_url)
             consent = oauth_client.initiate_consent()
             session["consent"] = consent.as_dict()
-            return redirect(consent.auth_url)
+            return redirect(consent.authorization_url)
 
         from databricks.sdk import WorkspaceClient
         from databricks.sdk.oauth import SessionCredentials
 
-        credentials_provider = SessionCredentials.from_dict(oauth_client, session["creds"])
-        workspace_client = WorkspaceClient(host=oauth_client.host,
+        credentials_strategy = SessionCredentials.from_dict(session["creds"],
+                                                            token_endpoint=oidc_endpoints.token_endpoint,
+                                                            client_id=client_id,
+                                                            client_secret=client_secret,
+                                                            redirect_url=redirect_url)
+        workspace_client = WorkspaceClient(host=workspace_host,
                                            product=APP_NAME,
-                                           credentials_provider=credentials_provider,
+                                           credentials_strategy=credentials_strategy,
                                            )
-
-        return render_template_string(all_clusters_template, w=workspace_client)
+        clusters = workspace_client.clusters.list(
+            filter_by=ListClustersFilterBy(cluster_states=[State.RUNNING, State.PENDING])
+        )
+        return render_template_string(all_clusters_template, workspace_host=workspace_host, clusters=clusters)
 
     return app
 
@@ -100,7 +114,11 @@ def register_custom_app(args: argparse.Namespace) -> tuple[str, str]:
     account_client = AccountClient(profile=args.profile)
 
     custom_app = account_client.custom_app_integration.create(
-        name=APP_NAME, redirect_urls=[f"http://localhost:{args.port}/callback"], confidential=True,
+        name=APP_NAME,
+        redirect_urls=[
+            f"http://localhost:{args.port}/callback",
+        ],
+        confidential=True,
         scopes=["all-apis"],
     )
     logging.info(f"Created new custom app: "
@@ -110,22 +128,6 @@ def register_custom_app(args: argparse.Namespace) -> tuple[str, str]:
     return custom_app.client_id, custom_app.client_secret
 
 
-def init_oauth_config(args) -> OAuthClient:
-    """Creates Databricks SDK configuration for OAuth"""
-    oauth_client = OAuthClient(host=args.host,
-                               client_id=args.client_id,
-                               client_secret=args.client_secret,
-                               redirect_url=f"http://localhost:{args.port}/callback",
-                               scopes=["all-apis"],
-                               )
-    if not oauth_client.client_id:
-        client_id, client_secret = register_custom_app(args)
-        oauth_client.client_id = client_id
-        oauth_client.client_secret = client_secret
-
-    return oauth_client
-
-
 def parse_arguments() -> argparse.Namespace:
     """Parses arguments for this demo"""
     parser = argparse.ArgumentParser(prog=APP_NAME, description=__doc__.strip())
@@ -145,8 +147,10 @@ def parse_arguments() -> argparse.Namespace:
     logging.getLogger("databricks.sdk").setLevel(logging.DEBUG)
 
     args = parse_arguments()
-    oauth_cfg = init_oauth_config(args)
-    app = create_flask_app(oauth_cfg)
+    client_id, client_secret = args.client_id, args.client_secret
+    if not client_id:
+        client_id, client_secret = register_custom_app(args)
+    app = create_flask_app(args.host, client_id, client_secret)
 
     app.run(
         host="localhost",
diff --git a/tests/test_oauth.py b/tests/test_oauth.py
index ce2d514ff..a637a5508 100644
--- a/tests/test_oauth.py
+++ b/tests/test_oauth.py
@@ -1,29 +1,126 @@
-from databricks.sdk.core import Config
-from databricks.sdk.oauth import OAuthClient, OidcEndpoints, TokenCache
-
-
-def test_token_cache_unique_filename_by_host(mocker):
-    mocker.patch.object(Config, "oidc_endpoints",
-                        OidcEndpoints("http://localhost:1234", "http://localhost:1234"))
-    common_args = dict(client_id="abc", redirect_url="http://localhost:8020")
-    c1 = OAuthClient(host="http://localhost:", **common_args)
-    c2 = OAuthClient(host="https://bar.cloud.databricks.com", **common_args)
-    assert TokenCache(c1).filename != TokenCache(c2).filename
-
-
-def test_token_cache_unique_filename_by_client_id(mocker):
-    mocker.patch.object(Config, "oidc_endpoints",
-                        OidcEndpoints("http://localhost:1234", "http://localhost:1234"))
-    common_args = dict(host="http://localhost:", redirect_url="http://localhost:8020")
-    c1 = OAuthClient(client_id="abc", **common_args)
-    c2 = OAuthClient(client_id="def", **common_args)
-    assert TokenCache(c1).filename != TokenCache(c2).filename
-
-
-def test_token_cache_unique_filename_by_scopes(mocker):
-    mocker.patch.object(Config, "oidc_endpoints",
-                        OidcEndpoints("http://localhost:1234", "http://localhost:1234"))
-    common_args = dict(host="http://localhost:", client_id="abc", redirect_url="http://localhost:8020")
-    c1 = OAuthClient(scopes=["foo"], **common_args)
-    c2 = OAuthClient(scopes=["bar"], **common_args)
-    assert TokenCache(c1).filename != TokenCache(c2).filename
+from databricks.sdk._base_client import _BaseClient
+from databricks.sdk.oauth import (OidcEndpoints, TokenCache,
+                                  get_account_endpoints,
+                                  get_workspace_endpoints)
+
+from .clock import FakeClock
+
+
+def test_token_cache_unique_filename_by_host():
+    common_args = dict(client_id="abc",
+                       redirect_url="http://localhost:8020",
+                       oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234"))
+    assert TokenCache(host="http://localhost:",
+                      **common_args).filename != TokenCache("https://bar.cloud.databricks.com",
+                                                            **common_args).filename
+
+
+def test_token_cache_unique_filename_by_client_id():
+    common_args = dict(host="http://localhost:",
+                       redirect_url="http://localhost:8020",
+                       oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234"))
+    assert TokenCache(client_id="abc", **common_args).filename != TokenCache(client_id="def",
+                                                                             **common_args).filename
+
+
+def test_token_cache_unique_filename_by_scopes():
+    common_args = dict(host="http://localhost:",
+                       client_id="abc",
+                       redirect_url="http://localhost:8020",
+                       oidc_endpoints=OidcEndpoints("http://localhost:1234", "http://localhost:1234"))
+    assert TokenCache(scopes=["foo"], **common_args).filename != TokenCache(scopes=["bar"],
+                                                                            **common_args).filename
+
+
+def test_account_oidc_endpoints(requests_mock):
+    requests_mock.get(
+        "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/.well-known/oauth-authorization-server",
+        json={
+            "authorization_endpoint":
+            "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/authorize",
+            "token_endpoint": "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/token"
+        })
+    client = _BaseClient(clock=FakeClock())
+    endpoints = get_account_endpoints("accounts.cloud.databricks.com", "abc-123", client=client)
+    assert endpoints == OidcEndpoints(
+        "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/authorize",
+        "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/token")
+
+
+def test_account_oidc_endpoints_retry_on_429(requests_mock):
+    # It doesn't seem possible to use requests_mock to return different responses for the same request, e.g. when
+    # simulating a transient failure. Instead, the nth_request matcher increments a test-wide counter and only matches
+    # the nth request.
+    request_count = 0
+
+    def nth_request(n):
+
+        def observe_request(_request):
+            nonlocal request_count
+            is_match = request_count == n
+            if is_match:
+                request_count += 1
+            return is_match
+
+        return observe_request
+
+    requests_mock.get(
+        "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/.well-known/oauth-authorization-server",
+        additional_matcher=nth_request(0),
+        status_code=429)
+    requests_mock.get(
+        "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/.well-known/oauth-authorization-server",
+        additional_matcher=nth_request(1),
+        json={
+            "authorization_endpoint":
+            "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/authorize",
+            "token_endpoint": "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/token"
+        })
+    client = _BaseClient(clock=FakeClock())
+    endpoints = get_account_endpoints("accounts.cloud.databricks.com", "abc-123", client=client)
+    assert endpoints == OidcEndpoints(
+        "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/authorize",
+        "https://accounts.cloud.databricks.com/oidc/accounts/abc-123/oauth/token")
+
+
+def test_workspace_oidc_endpoints(requests_mock):
+    requests_mock.get("https://my-workspace.cloud.databricks.com/oidc/.well-known/oauth-authorization-server",
+                      json={
+                          "authorization_endpoint":
+                          "https://my-workspace.cloud.databricks.com/oidc/oauth/authorize",
+                          "token_endpoint": "https://my-workspace.cloud.databricks.com/oidc/oauth/token"
+                      })
+    client = _BaseClient(clock=FakeClock())
+    endpoints = get_workspace_endpoints("my-workspace.cloud.databricks.com", client=client)
+    assert endpoints == OidcEndpoints("https://my-workspace.cloud.databricks.com/oidc/oauth/authorize",
+                                      "https://my-workspace.cloud.databricks.com/oidc/oauth/token")
+
+
+def test_workspace_oidc_endpoints_retry_on_429(requests_mock):
+    request_count = 0
+
+    def nth_request(n):
+
+        def observe_request(_request):
+            nonlocal request_count
+            is_match = request_count == n
+            if is_match:
+                request_count += 1
+            return is_match
+
+        return observe_request
+
+    requests_mock.get("https://my-workspace.cloud.databricks.com/oidc/.well-known/oauth-authorization-server",
+                      additional_matcher=nth_request(0),
+                      status_code=429)
+    requests_mock.get("https://my-workspace.cloud.databricks.com/oidc/.well-known/oauth-authorization-server",
+                      additional_matcher=nth_request(1),
+                      json={
+                          "authorization_endpoint":
+                          "https://my-workspace.cloud.databricks.com/oidc/oauth/authorize",
+                          "token_endpoint": "https://my-workspace.cloud.databricks.com/oidc/oauth/token"
+                      })
+    client = _BaseClient(clock=FakeClock())
+    endpoints = get_workspace_endpoints("my-workspace.cloud.databricks.com", client=client)
+    assert endpoints == OidcEndpoints("https://my-workspace.cloud.databricks.com/oidc/oauth/authorize",
+                                      "https://my-workspace.cloud.databricks.com/oidc/oauth/token")

From d3b85cb867137657a875ceb18192e06456e39952 Mon Sep 17 00:00:00 2001
From: Omer Lachish <289488+rauchy@users.noreply.github.com>
Date: Tue, 22 Oct 2024 15:33:14 +0200
Subject: [PATCH 058/136] [Release] Release v0.36.0 (#798)

### Breaking Changes
* `external_browser` now uses the `databricks-cli` app instead of the
third-party "6128a518-99a9-425b-8333-4cc94f04cacd" application when
performing the U2M login flow for Azure workspaces when a client ID is
not otherwise specified. This matches the AWS behavior.
* The signatures of several OAuth-related constructors have changed to
support U2M OAuth with Azure Entra ID application registrations. See
https://github.com/databricks/databricks-sdk-py/blob/main/examples/flask_app_with_oauth.py
for examples of how to use these classes.
  * `OAuthClient()`: renamed to `OAuthClient.from_host()`
* `SessionCredentials()` and `SessionCredentials.from_dict()`: now
accepts `token_endpoint`, `client_id`, `client_secret`, and
`refresh_url` as parameters, rather than accepting the `OAuthClient`.
* `TokenCache()`: now accepts `host`, `token_endpoint`, `client_id`,
`client_secret`, and `refresh_url` as parameters, rather than accepting
the `OAuthClient`.

### Bug Fixes

* Decouple OAuth functionality from `Config`
([#784](https://github.com/databricks/databricks-sdk-py/pull/784)).


### Release

* Release v0.35.0
([#793](https://github.com/databricks/databricks-sdk-py/pull/793)).

Co-authored-by: Omer Lachish 
---
 CHANGELOG.md              | 20 ++++++++++++++++++++
 databricks/sdk/version.py |  2 +-
 2 files changed, 21 insertions(+), 1 deletion(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 344e975d9..458921ee0 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,25 @@
 # Version changelog
 
+## [Release] Release v0.36.0
+
+### Breaking Changes
+* `external_browser` now uses the `databricks-cli` app instead of the third-party "6128a518-99a9-425b-8333-4cc94f04cacd" application when performing the U2M login flow for Azure workspaces when a client ID is not otherwise specified. This matches the AWS behavior.
+* The signatures of several OAuth-related constructors have changed to support U2M OAuth with Azure Entra ID application registrations. See https://github.com/databricks/databricks-sdk-py/blob/main/examples/flask_app_with_oauth.py for examples of how to use these classes.
+  * `OAuthClient()`: renamed to `OAuthClient.from_host()`
+  * `SessionCredentials()` and `SessionCredentials.from_dict()`: now accepts `token_endpoint`, `client_id`, `client_secret`, and `refresh_url` as parameters, rather than accepting the `OAuthClient`.
+  * `TokenCache()`: now accepts `host`, `token_endpoint`, `client_id`, `client_secret`, and `refresh_url` as parameters, rather than accepting the `OAuthClient`.
+
+### Bug Fixes
+
+ * Decouple OAuth functionality from `Config` ([#784](https://github.com/databricks/databricks-sdk-py/pull/784)).
+
+
+### Release
+
+ * Release v0.35.0 ([#793](https://github.com/databricks/databricks-sdk-py/pull/793)).
+
+
+
 ## [Release] Release v0.35.0
 
 ### New Features and Improvements
diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py
index 2670d0523..aae5aca67 100644
--- a/databricks/sdk/version.py
+++ b/databricks/sdk/version.py
@@ -1 +1 @@
-__version__ = '0.35.0'
+__version__ = '0.36.0'

From a1bfd690ccfc6d036a39c60c8b5e5ca36c06dae2 Mon Sep 17 00:00:00 2001
From: hectorcast-db 
Date: Thu, 24 Oct 2024 09:00:12 +0200
Subject: [PATCH 059/136] [Internal] Automatically trigger integration tests on
 PR (#800)

## Changes
Automatically trigger integration tests when a PR is opened or updated

## Tests
Updated workflow triggered the tests. On success, the tests marked the
check as succesfull.

- [ ] `make test` passing
- [ ] `make fmt` applied
- [ ] relevant integration tests applied
---
 .github/workflows/integration-tests.yml | 59 +++++++++++++++++++++++++
 1 file changed, 59 insertions(+)
 create mode 100644 .github/workflows/integration-tests.yml

diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml
new file mode 100644
index 000000000..4f91ccabe
--- /dev/null
+++ b/.github/workflows/integration-tests.yml
@@ -0,0 +1,59 @@
+name: Integration Tests
+
+on:
+
+  pull_request:
+    types: [opened, synchronize]
+
+  merge_group:
+  
+
+jobs:
+  trigger-tests:
+    if: github.event_name == 'pull_request'
+    name: Trigger Tests
+    runs-on: ubuntu-latest
+    environment: "test-trigger-is"
+    
+    steps:
+    - uses: actions/checkout@v3
+
+    - name: Generate GitHub App Token
+      id: generate-token
+      uses: actions/create-github-app-token@v1
+      with:
+        app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
+        private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
+        owner: ${{ secrets.ORG_NAME }}
+        repositories: ${{secrets.REPO_NAME}}
+    
+    - name: Trigger Workflow in Another Repo
+      env:
+        GH_TOKEN: ${{ steps.generate-token.outputs.token }}
+      run: |
+        gh workflow run sdk-py-isolated-pr.yml -R ${{ secrets.ORG_NAME }}/${{secrets.REPO_NAME}} \
+        --ref main \
+        -f pull_request_number=${{ github.event.pull_request.number }} \
+        -f commit_sha=${{ github.event.pull_request.head.sha }} 
+
+  # Statuses and checks apply to specific commits (by hash). 
+  # Enforcement of required checks is done both at the PR level and the merge queue level.
+  # In case of multiple commits in a single PR, the hash of the squashed commit 
+  # will not match the one for the latest (approved) commit in the PR.
+  # We auto approve the check for the merge queue for two reasons:
+  # * Queue times out due to duration of tests.
+  # * Avoid running integration tests twice, since it was already run at the tip of the branch before squashing.
+  auto-approve:
+    if: github.event_name == 'merge_group'
+    runs-on: ubuntu-latest
+    steps:
+      - name: Mark Check
+        env:
+          GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+        shell: bash
+        run: |
+            gh api -X POST -H "Accept: application/vnd.github+json" \
+              -H "X-GitHub-Api-Version: 2022-11-28" \
+              /repos/${{ github.repository }}/statuses/${{ github.sha }} \
+              -f 'state=success' \
+              -f 'context=Integration Tests Check'
\ No newline at end of file

From 7aaba2db2334339c43311b7656680d48037e10a8 Mon Sep 17 00:00:00 2001
From: Renaud Hartert 
Date: Mon, 28 Oct 2024 22:25:03 +0100
Subject: [PATCH 060/136] [Internal] Better isolate ML serving auth unit tests
 (#803)

## Changes

This PR better isolates ML serving auth tests by explicitly ignoring the
config file if any. This solves an issue for users running the tests in
an environment (e.g. their local environment) that already has a
`.databrickscfg` file.

Note: the solution is a little hacky and we should think about a better
way to communicate that the config file should be ignored.

## Tests

Verified that the unit tests succeed in an environment that has a
`.databrickscfg`.

- [x] `make test` run locally
- [x] `make fmt` applied
- [x] relevant integration tests applied
---
 tests/test_model_serving_auth.py | 6 ++++++
 1 file changed, 6 insertions(+)

diff --git a/tests/test_model_serving_auth.py b/tests/test_model_serving_auth.py
index 092a3bf16..e0e368fae 100644
--- a/tests/test_model_serving_auth.py
+++ b/tests/test_model_serving_auth.py
@@ -56,6 +56,12 @@ def test_model_serving_auth(env_values, del_env_values, oauth_file_name, monkeyp
 ])
 @raises(default_auth_base_error_message)
 def test_model_serving_auth_errors(env_values, oauth_file_name, monkeypatch):
+    # Guarantee that the tests defaults to env variables rather than config file.
+    #
+    # TODO: this is hacky and we should find a better way to tell the config
+    # that it should not read from the config file.
+    monkeypatch.setenv('DATABRICKS_CONFIG_FILE', 'x')
+
     for (env_name, env_value) in env_values:
         monkeypatch.setenv(env_name, env_value)
     monkeypatch.setattr(

From 52a295574b50b5e12e67eb2323c37e018fbeefce Mon Sep 17 00:00:00 2001
From: hectorcast-db 
Date: Wed, 30 Oct 2024 10:15:22 +0100
Subject: [PATCH 061/136] [Internal] Add test instructions for external
 contributors (#804)

## Changes
Add test instructions for external contributors

## Tests
See Go Changes
https://github.com/databricks/databricks-sdk-go/pull/1073
---
 .github/workflows/external-message.yml  | 114 ++++++++++++++++++++++++
 .github/workflows/integration-tests.yml |  20 ++++-
 2 files changed, 133 insertions(+), 1 deletion(-)
 create mode 100644 .github/workflows/external-message.yml

diff --git a/.github/workflows/external-message.yml b/.github/workflows/external-message.yml
new file mode 100644
index 000000000..3392fc8e0
--- /dev/null
+++ b/.github/workflows/external-message.yml
@@ -0,0 +1,114 @@
+name: PR Comment
+
+# WARNING:
+# THIS WORKFLOW ALWAYS RUNS FOR EXTERNAL CONTRIBUTORS WITHOUT ANY APPROVAL.
+# THIS WORKFLOW RUNS FROM MAIN BRANCH, NOT FROM THE PR BRANCH.
+# DO NOT PULL THE PR OR EXECUTE ANY CODE FROM THE PR.
+
+on:
+  pull_request_target:
+    types: [opened, reopened, synchronize]
+    branches:
+      - main
+
+
+jobs:
+  comment-on-pr:
+    runs-on: ubuntu-latest
+    permissions:
+      pull-requests: write
+
+    steps:
+      # NOTE: The following checks may not be accurate depending on Org or Repo settings. 
+      - name: Check user and potential secret access
+        id: check-secrets-access
+        env:
+          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+        run: |
+          USER_LOGIN="${{ github.event.pull_request.user.login }}"
+          REPO_OWNER="${{ github.repository_owner }}"
+          REPO_NAME="${{ github.event.repository.name }}"
+          
+          echo "Pull request opened by: $USER_LOGIN"
+          
+          # Check if PR is from a fork
+          IS_FORK=$([[ "${{ github.event.pull_request.head.repo.full_name }}" != "${{ github.repository }}" ]] && echo "true" || echo "false")
+          
+          HAS_ACCESS="false"
+          
+          # Check user's permission level on the repository
+          USER_PERMISSION=$(gh api repos/$REPO_OWNER/$REPO_NAME/collaborators/$USER_LOGIN/permission --jq '.permission')
+          
+          if [[ "$USER_PERMISSION" == "admin" || "$USER_PERMISSION" == "write" ]]; then
+            HAS_ACCESS="true"
+          elif [[ "$USER_PERMISSION" == "read" ]]; then
+            # For read access, we need to check if the user has been explicitly granted secret access
+            # This information is not directly available via API, so we'll make an assumption
+            # that read access does not imply secret access
+            HAS_ACCESS="false"
+          fi
+          
+          # Check if repo owner is an organization
+          IS_ORG=$(gh api users/$REPO_OWNER --jq '.type == "Organization"')
+          
+          if [[ "$IS_ORG" == "true" && "$HAS_ACCESS" == "false" ]]; then
+            # Check if user is a member of any team with write or admin access to the repo
+            TEAMS_WITH_ACCESS=$(gh api repos/$REPO_OWNER/$REPO_NAME/teams --jq '.[] | select(.permission == "push" or .permission == "admin") | .slug')
+            for team in $TEAMS_WITH_ACCESS; do
+              IS_TEAM_MEMBER=$(gh api orgs/$REPO_OWNER/teams/$team/memberships/$USER_LOGIN --silent && echo "true" || echo "false")
+              if [[ "$IS_TEAM_MEMBER" == "true" ]]; then
+                HAS_ACCESS="true"
+                break
+              fi
+            done
+          fi
+          
+          # If it's a fork, set HAS_ACCESS to false regardless of other checks
+          if [[ "$IS_FORK" == "true" ]]; then
+            HAS_ACCESS="false"
+          fi
+          
+          echo "has_secrets_access=$HAS_ACCESS" >> $GITHUB_OUTPUT
+          if [[ "$HAS_ACCESS" == "true" ]]; then
+            echo "User $USER_LOGIN likely has access to secrets"
+          else
+            echo "User $USER_LOGIN likely does not have access to secrets"
+          fi
+
+
+      - uses: actions/checkout@v4
+
+      - name: Delete old comments
+        if: steps.check-secrets-access.outputs.has_secrets_access != 'true'
+        env:
+           GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+        run: |
+            # Delete previous comment if it exists
+            previous_comment_ids=$(gh api "repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments" \
+              --jq '.[] | select(.body | startswith("")) | .id')
+            echo "Previous comment IDs: $previous_comment_ids"
+            # Iterate over each comment ID and delete the comment
+            if [ ! -z "$previous_comment_ids" ]; then
+              echo "$previous_comment_ids" | while read -r comment_id; do
+                echo "Deleting comment with ID: $comment_id"
+                gh api "repos/${{ github.repository }}/issues/comments/$comment_id" -X DELETE
+              done
+            fi
+
+      - name: Comment on PR
+        if: steps.check-secrets-access.outputs.has_secrets_access != 'true'
+        env:
+          GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+          COMMIT_SHA: ${{ github.event.pull_request.head.sha }}
+        run: |
+          gh pr comment ${{ github.event.pull_request.number }} --body \
+          "
+          Run integration tests manually:
+          [go/deco-tests-run/sdk-py](https://go/deco-tests-run/sdk-py)
+
+          Inputs:
+          * PR number: ${{github.event.pull_request.number}}
+          * Commit SHA: \`${{ env.COMMIT_SHA }}\`
+          
+          Checks will be approved automatically on success.
+          "
diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml
index 4f91ccabe..88d3e865a 100644
--- a/.github/workflows/integration-tests.yml
+++ b/.github/workflows/integration-tests.yml
@@ -9,10 +9,28 @@ on:
   
 
 jobs:
+  check-token:
+    name: Check secrets access
+    runs-on: ubuntu-latest
+    outputs:
+      has_token: ${{ steps.set-token-status.outputs.has_token }}
+    steps:
+      - name: Check if GITHUB_TOKEN is set
+        id: set-token-status
+        run: |
+          if [ -z "${{ secrets.GITHUB_TOKEN }}" ]; then
+            echo "GITHUB_TOKEN is empty. User has no access to tokens."
+            echo "::set-output name=has_token::false"
+          else
+            echo "GITHUB_TOKEN is set. User has no access to tokens."
+            echo "::set-output name=has_token::true"
+          fi
+
   trigger-tests:
-    if: github.event_name == 'pull_request'
     name: Trigger Tests
     runs-on: ubuntu-latest
+    needs: check-token
+    if: github.event_name == 'pull_request'  && needs.check-token.outputs.has_token == 'true'
     environment: "test-trigger-is"
     
     steps:

From 3db35699bb937a2fccd11fc4c5cc4e71d2267a4c Mon Sep 17 00:00:00 2001
From: hectorcast-db 
Date: Wed, 30 Oct 2024 16:00:13 +0100
Subject: [PATCH 062/136] [Fix] Correctly generate classes with nested body
 fields (#808)

## Changes
Correctly generate classes with nested body fields.

### Backwards incompatible changes
The next time code is generated, the following backward incompatible
changes will happen:
* Removal of the following classes: `apps.CreateAppDeploymentRequest`,
`apps.CreateAppRequest`, `apps.UpdateAppRequest`,
`catalog.CreateOnlineTableRequest`, `dashboards.CreateDashboardRequest`,
`dashboards.CreateScheduleRequest`,
`dashboards.CreateSubscriptionRequest`,
`dashboards.UpdateDashboardRequest` and
`dashboards.UpdateScheduleRequest`
* Change of signature for the following methods: `AppsAPI.create`,
`AppsAPI.create_and_wait`, `AppsAPI.deploy`, `AppsAPI.deploy_and_wait`,
`AppsAPI.update`, `OnlineTablesAPI.create`,
`OnlineTablesAPI.create_and_wait`, `LakeviewAPI.create`,
`LakeviewAPI.create_schedule`, `LakeviewAPI.create_subscription`,
`LakeviewAPI.update` and `LakeviewAPI.update_schedule`. Each of those
methods now take an object instead of a list of parameters.

## Tests
Preview

https://github.com/databricks/databricks-sdk-py/commit/66ce4505690ed7ca452a651df614ceb99bba24b1

- [ ] `make test` run locally
- [ ] `make fmt` applied
- [ ] relevant integration tests applied
---
 .codegen/service.py.tmpl | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/.codegen/service.py.tmpl b/.codegen/service.py.tmpl
index 4307e0913..a062e4d10 100644
--- a/.codegen/service.py.tmpl
+++ b/.codegen/service.py.tmpl
@@ -240,6 +240,9 @@ class {{.PascalName}}API:{{if .Description}}
 {{- end}}
 
 {{define "method-serialize" -}}
+        {{if and .Request.HasJsonField .RequestBodyField -}}
+        body = {{template "safe-snake-name" .RequestBodyField}}
+        {{- else -}}
         {{if or .Request.HasJsonField .Request.HasQueryField -}}
         {{if .Request.HasJsonField}}body = {}{{end}}{{if .Request.HasQueryField}}
         query = {}{{end}}
@@ -251,6 +254,7 @@ class {{.PascalName}}API:{{if .Description}}
         {{- end}}
         {{- end}}
         {{- end}}
+        {{- end}}
 {{- end}}
 
 {{ define "method-headers" -}}

From f2b858c4c6d3e6c425114ce40e4063866539819c Mon Sep 17 00:00:00 2001
From: Miles Yucht 
Date: Wed, 30 Oct 2024 17:33:32 +0100
Subject: [PATCH 063/136] [Internal] Add `cleanrooms` package (#806)

## Changes
The cleanrooms package is being introduced into the SDK. This ensures
that docs will be generated for this package when ready.

## Tests


- [ ] `make test` run locally
- [ ] `make fmt` applied
- [ ] relevant integration tests applied
---
 docs/gen-client-docs.py | 11 +++++++++--
 1 file changed, 9 insertions(+), 2 deletions(-)

diff --git a/docs/gen-client-docs.py b/docs/gen-client-docs.py
index 5c32beffe..ac18406b7 100644
--- a/docs/gen-client-docs.py
+++ b/docs/gen-client-docs.py
@@ -248,6 +248,7 @@ class Generator:
         Package("dashboards", "Dashboards", "Manage Lakeview dashboards"),
         Package("marketplace", "Marketplace", "Manage AI and analytics assets such as ML models, notebooks, applications in an open marketplace"),
         Package("apps", "Apps", "Build custom applications on Databricks"),
+        Package("cleanrooms", "Clean Rooms", "Manage clean rooms and their assets and task runs"),
     ]
 
     def __init__(self):
@@ -375,13 +376,19 @@ def _make_folder_if_not_exists(folder):
 
     def write_dataclass_docs(self):
         self._make_folder_if_not_exists(f'{__dir__}/dbdataclasses')
+        all_packages = []
         for pkg in self.packages:
-            module = importlib.import_module(f'databricks.sdk.service.{pkg.name}')
+            try:
+                module = importlib.import_module(f'databricks.sdk.service.{pkg.name}')
+            except ModuleNotFoundError:
+                print(f'No module found for {pkg.name}, continuing')
+                continue
+            all_packages.append(pkg.name)
             all_members = [name for name, _ in inspect.getmembers(module, predicate=self._should_document)]
             doc = DataclassesDoc(package=pkg, dataclasses=sorted(all_members))
             with open(f'{__dir__}/dbdataclasses/{pkg.name}.rst', 'w') as f:
                 f.write(doc.as_rst())
-        all = "\n   ".join(sorted([p.name for p in self.packages]))
+        all = "\n   ".join(sorted(all_packages))
         with open(f'{__dir__}/dbdataclasses/index.rst', 'w') as f:
             f.write(f'''
 Dataclasses

From f5697f071131b8bbb1725da9c8b1a12d648648d6 Mon Sep 17 00:00:00 2001
From: Parth Bansal 
Date: Thu, 31 Oct 2024 12:38:07 +0100
Subject: [PATCH 064/136] [Internal] Move templates in the code generator
 (#809)

## Changes

Move templates in the code generator.

## Tests

The generator is working correctly with the new configuration.
---
 .codegen.json                    |  17 +-
 .codegen/__init__.py.tmpl        | 194 --------------
 .codegen/error_mapping.py.tmpl   |  20 --
 .codegen/error_overrides.py.tmpl |  20 --
 .codegen/example.py.tmpl         | 112 --------
 .codegen/lib.tmpl                |  12 -
 .codegen/service.py.tmpl         | 423 -------------------------------
 7 files changed, 2 insertions(+), 796 deletions(-)
 delete mode 100644 .codegen/__init__.py.tmpl
 delete mode 100644 .codegen/error_mapping.py.tmpl
 delete mode 100644 .codegen/error_overrides.py.tmpl
 delete mode 100644 .codegen/example.py.tmpl
 delete mode 100644 .codegen/lib.tmpl
 delete mode 100644 .codegen/service.py.tmpl

diff --git a/.codegen.json b/.codegen.json
index a1886bd80..3a880d1a9 100644
--- a/.codegen.json
+++ b/.codegen.json
@@ -1,20 +1,6 @@
 {
-  "formatter": "yapf -pri $FILENAMES && autoflake -i $FILENAMES && isort $FILENAMES",
+  "mode": "py_v0",
   "changelog_config": ".codegen/changelog_config.yml",
-  "template_libraries": [
-    ".codegen/lib.tmpl"
-  ],
-  "packages": {
-    ".codegen/service.py.tmpl": "databricks/sdk/service/{{.Name}}.py"
-  },
-  "batch": {
-    ".codegen/__init__.py.tmpl": "databricks/sdk/__init__.py",
-    ".codegen/error_mapping.py.tmpl": "databricks/sdk/errors/platform.py",
-    ".codegen/error_overrides.py.tmpl": "databricks/sdk/errors/overrides.py"
-  },
-  "samples": {
-    ".codegen/example.py.tmpl": "examples/{{if .IsAccount}}account{{else}}workspace{{end}}/{{.Service.SnakeName}}/{{.Method.SnakeName}}_{{.SnakeName}}.py"
-  },
   "version": {
     "databricks/sdk/version.py": "__version__ = '$VERSION'"
   },
@@ -28,6 +14,7 @@
       "pip install '.[dev]'"
     ],
     "post_generate": [
+      "make fmt",
       "pytest -m 'not integration' --cov=databricks --cov-report html tests",
       "pip install .",
       "python docs/gen-client-docs.py"
diff --git a/.codegen/__init__.py.tmpl b/.codegen/__init__.py.tmpl
deleted file mode 100644
index d54e9dfff..000000000
--- a/.codegen/__init__.py.tmpl
+++ /dev/null
@@ -1,194 +0,0 @@
-import databricks.sdk.core as client
-import databricks.sdk.dbutils as dbutils
-from databricks.sdk.credentials_provider import CredentialsStrategy
-
-from databricks.sdk.mixins.files import DbfsExt
-from databricks.sdk.mixins.compute import ClustersExt
-from databricks.sdk.mixins.workspace import WorkspaceExt
-from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt
-{{- range .Services}}
-from databricks.sdk.service.{{.Package.Name}} import {{.PascalName}}API{{end}}
-from databricks.sdk.service.provisioning import Workspace
-from databricks.sdk import azure
-from typing import Optional
-
-{{$args := list "host" "account_id" "username" "password" "client_id" "client_secret"
-  "token" "profile" "config_file" "azure_workspace_resource_id" "azure_client_secret"
-  "azure_client_id" "azure_tenant_id" "azure_environment" "auth_type" "cluster_id"
-  "google_credentials" "google_service_account" }}
-
-{{- define "api" -}}
-  {{- $mixins := dict "ClustersAPI" "ClustersExt" "DbfsAPI" "DbfsExt" "WorkspaceAPI" "WorkspaceExt" "ServingEndpointsAPI" "ServingEndpointsExt" -}}
-  {{- $genApi := concat .PascalName "API" -}}
-  {{- getOrDefault $mixins $genApi $genApi -}}
-{{- end -}}
-
-def _make_dbutils(config: client.Config):
-    # We try to directly check if we are in runtime, instead of
-    # trying to import from databricks.sdk.runtime. This is to prevent
-    # remote dbutils from being created without the config, which is both
-    # expensive (will need to check all credential providers) and can
-    # throw errors (when no env vars are set).
-    try:
-        from dbruntime import UserNamespaceInitializer
-    except ImportError:
-        return dbutils.RemoteDbUtils(config)
-
-    # We are in runtime, so we can use the runtime dbutils
-    from databricks.sdk.runtime import dbutils as runtime_dbutils
-    return runtime_dbutils
-
-
-class WorkspaceClient:
-    """
-    The WorkspaceClient is a client for the workspace-level Databricks REST API.
-    """
-    def __init__(self, *{{range $args}}, {{.}}: Optional[str] = None{{end}},
-                 debug_truncate_bytes: Optional[int] = None,
-                 debug_headers: Optional[bool] = None,
-                 product="unknown",
-                 product_version="0.0.0",
-                 credentials_strategy: Optional[CredentialsStrategy] = None,
-                 credentials_provider: Optional[CredentialsStrategy] = None,
-                 config: Optional[client.Config] = None):
-        if not config:
-          config = client.Config({{range $args}}{{.}}={{.}}, {{end}}
-            credentials_strategy=credentials_strategy,
-            credentials_provider=credentials_provider,
-            debug_truncate_bytes=debug_truncate_bytes,
-            debug_headers=debug_headers,
-            product=product,
-            product_version=product_version)
-        self._config = config.copy()
-        self._dbutils = _make_dbutils(self._config)
-        self._api_client = client.ApiClient(self._config)
-
-        {{- range .Services}}{{if and (not .IsAccounts) (not .HasParent) .HasDataPlaneAPI (not .IsDataPlane)}}
-        {{.SnakeName}} = {{template "api" .}}(self._api_client){{end -}}{{end}}
-
-        {{- range .Services}}
-        {{- if and (not .IsAccounts) (not .HasParent)}}
-        {{- if .IsDataPlane}}
-        self._{{.SnakeName}} = {{template "api" .}}(self._api_client, {{.ControlPlaneService.SnakeName}})
-        {{- else if .HasDataPlaneAPI}}
-        self._{{.SnakeName}} = {{.SnakeName}}
-        {{- else}}
-        self._{{.SnakeName}} = {{template "api" .}}(self._api_client)
-        {{- end -}}
-        {{- end -}}
-        {{end}}
-
-    @property
-    def config(self) -> client.Config:
-        return self._config
-
-    @property
-    def api_client(self) -> client.ApiClient:
-        return self._api_client
-
-    @property
-    def dbutils(self) -> dbutils.RemoteDbUtils:
-        return self._dbutils
-
-    {{- range .Services}}{{if and (not .IsAccounts) (not .HasParent)}}
-    @property
-    def {{.SnakeName}}(self) -> {{template "api" .}}:
-        {{if .Description}}"""{{.Summary}}"""{{end}}
-        return self._{{.SnakeName}}
-    {{end -}}{{end}}
-
-    def get_workspace_id(self) -> int:
-        """Get the workspace ID of the workspace that this client is connected to."""
-        response = self._api_client.do("GET",
-                                       "/api/2.0/preview/scim/v2/Me",
-                                       response_headers=['X-Databricks-Org-Id'])
-        return int(response["X-Databricks-Org-Id"])
-
-    def __repr__(self):
-        return f"WorkspaceClient(host='{self._config.host}', auth_type='{self._config.auth_type}', ...)"
-
-class AccountClient:
-    """
-    The AccountClient is a client for the account-level Databricks REST API.
-    """
-
-    def __init__(self, *{{range $args}}, {{.}}: Optional[str] = None{{end}},
-                 debug_truncate_bytes: Optional[int] = None,
-                 debug_headers: Optional[bool] = None,
-                 product="unknown",
-                 product_version="0.0.0",
-                 credentials_strategy: Optional[CredentialsStrategy] = None,
-                 credentials_provider: Optional[CredentialsStrategy] = None,
-                 config: Optional[client.Config] = None):
-        if not config:
-          config = client.Config({{range $args}}{{.}}={{.}}, {{end}}
-            credentials_strategy=credentials_strategy,
-            credentials_provider=credentials_provider,
-            debug_truncate_bytes=debug_truncate_bytes,
-            debug_headers=debug_headers,
-            product=product,
-            product_version=product_version)
-        self._config = config.copy()
-        self._api_client = client.ApiClient(self._config)
-
-        {{- range .Services}}{{if and .IsAccounts (not .HasParent) .HasDataPlaneAPI (not .IsDataPlane)}}
-        {{(.TrimPrefix "account").SnakeName}} = {{template "api" .}}(self._api_client){{end -}}{{end}}
-
-        {{- range .Services}}
-        {{- if and .IsAccounts (not .HasParent)}}
-        {{- if .IsDataPlane}}
-        self._{{(.TrimPrefix "account").SnakeName}} = {{template "api" .}}(self._api_client, {{.ControlPlaneService.SnakeName}})
-        {{- else if .HasDataPlaneAPI}}
-        self._{{(.TrimPrefix "account").SnakeName}} = {{(.TrimPrefix "account").SnakeName}}
-        {{- else}}
-        self._{{(.TrimPrefix "account").SnakeName}} = {{template "api" .}}(self._api_client)
-        {{- end -}}
-        {{- end -}}
-        {{end}}
-
-    @property
-    def config(self) -> client.Config:
-        return self._config
-
-    @property
-    def api_client(self) -> client.ApiClient:
-        return self._api_client
-
-    {{- range .Services}}{{if and .IsAccounts (not .HasParent)}}
-    @property
-    def {{(.TrimPrefix "account").SnakeName}}(self) -> {{template "api" .}}:{{if .Description}}
-        """{{.Summary}}"""{{end}}
-        return self._{{(.TrimPrefix "account").SnakeName}}
-    {{end -}}{{end}}
-
-    def get_workspace_client(self, workspace: Workspace) -> WorkspaceClient:
-        """Constructs a ``WorkspaceClient`` for the given workspace.
-
-        Returns a ``WorkspaceClient`` that is configured to use the same
-        credentials as this ``AccountClient``. The underlying config is
-        copied from this ``AccountClient``, but the ``host`` and
-        ``azure_workspace_resource_id`` are overridden to match the
-        given workspace, and the ``account_id`` field is cleared.
-
-        Usage:
-
-        .. code-block::
-
-            wss = list(a.workspaces.list())
-            if len(wss) == 0:
-                pytest.skip("no workspaces")
-            w = a.get_workspace_client(wss[0])
-            assert w.current_user.me().active
-
-        :param workspace: The workspace to construct a client for.
-        :return: A ``WorkspaceClient`` for the given workspace.
-        """
-        config = self._config.deep_copy()
-        config.host = config.environment.deployment_url(workspace.deployment_name)
-        config.azure_workspace_resource_id = azure.get_azure_resource_id(workspace)
-        config.account_id = None
-        config.init_auth()
-        return WorkspaceClient(config=config)
-
-    def __repr__(self):
-        return f"AccountClient(account_id='{self._config.account_id}', auth_type='{self._config.auth_type}', ...)"
diff --git a/.codegen/error_mapping.py.tmpl b/.codegen/error_mapping.py.tmpl
deleted file mode 100644
index b3cc8cea6..000000000
--- a/.codegen/error_mapping.py.tmpl
+++ /dev/null
@@ -1,20 +0,0 @@
-# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-from .base import DatabricksError
-
-{{range .ExceptionTypes}}
-class {{.PascalName}}({{if .Inherit -}}
-    {{.Inherit.PascalName}}
-  {{- else -}}
-    DatabricksError
-  {{- end -}}):
-  """{{.Comment "    " 100 | trimSuffix "\"" }}"""
-{{end}}
-
-STATUS_CODE_MAPPING = { {{range .ErrorStatusCodeMapping}}
-   {{.StatusCode}}: {{.PascalName}},{{- end}}
-}
-
-ERROR_CODE_MAPPING = { {{range .ErrorCodeMapping}}
-    '{{.ErrorCode}}': {{.PascalName}},{{- end}}
-}
diff --git a/.codegen/error_overrides.py.tmpl b/.codegen/error_overrides.py.tmpl
deleted file mode 100644
index adcfea555..000000000
--- a/.codegen/error_overrides.py.tmpl
+++ /dev/null
@@ -1,20 +0,0 @@
-# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-from .base import _ErrorOverride
-from .platform import *
-import re
-
-
-_ALL_OVERRIDES = [
-    {{ range .ErrorOverrides -}}
-    _ErrorOverride(
-        debug_name="{{.Name}}",
-        path_regex=re.compile(r'{{.PathRegex}}'),
-        verb="{{.Verb}}",
-        status_code_matcher=re.compile(r'{{replaceAll "'" "\\'" .StatusCodeMatcher}}'),
-        error_code_matcher=re.compile(r'{{replaceAll "'" "\\'" .ErrorCodeMatcher}}'),
-        message_matcher=re.compile(r'{{replaceAll "'" "\\'" .MessageMatcher}}'),
-        custom_error={{.OverrideErrorCode.PascalName}},
-    ),
-{{- end }}
-]
diff --git a/.codegen/example.py.tmpl b/.codegen/example.py.tmpl
deleted file mode 100644
index dba71d9bf..000000000
--- a/.codegen/example.py.tmpl
+++ /dev/null
@@ -1,112 +0,0 @@
-from databricks.sdk import {{if .IsAccount}}AccountClient{{else}}WorkspaceClient{{end}}
-from databricks.sdk.service import _internal{{range .Suite.ServiceToPackage}}, {{.}}{{end}}
-import time, base64, os
-
-{{$example := .}}
-{{if .IsAccount}}a{{else}}w{{end}} = {{if .IsAccount}}Account{{else}}Workspace{{end}}Client()
-
-{{range .Init}}
-{{.SnakeName}} = {{template "expr" .Value}}
-{{end}}
-
-{{range .Calls}}
-{{if .Service -}}
-  {{template "svc-call" .}}
-{{- else -}}
-  {{with .Assign}}{{.SnakeName}} = {{end}}{{template "expr" .}}
-{{- end}}
-{{end}}
-
-{{with .Cleanup}}
-# cleanup
-{{range . -}}
-  {{template "svc-call" .}}
-{{end}}
-{{end}}
-
-{{define "svc-call" -}}
-  {{with .Assign}}{{.SnakeName}} = {{end}}{{if .IsAccount}}a{{else}}w{{end}}.{{.Service.SnakeName}}.{{.Original.SnakeName}}{{if eq .Original.SnakeName "import"}}_{{end}}({{template "method-args" .}})
-  {{- if .IsWait}}.result(){{end}}
-{{- end}}
-
-{{define "expr" -}}
-{{- if eq .Type "binary" -}}
-    {{template "expr" .Left}} {{.Op}} {{template "expr" .Right}}
-{{- else if eq .Type "index" -}}
-    {{template "expr" .Left}}[{{template "expr" .Right}}]
-{{- else if eq .Type "boolean" -}}
-    {{if .Value}}True{{else}}False{{end}}
-{{- else if eq .Type "heredoc" -}}
-"""{{.Value}}"""
-{{- else if eq .Type "literal" -}}
-    {{.Value}}
-{{- else if eq .Type "lookup" -}}
-    {{template "expr" .X}}.{{.Field.SnakeName}}
-{{- else if eq .Type "enum" -}}
-    {{.Package}}.{{.Entity.PascalName}}.{{.ConstantName}}
-{{- else if eq .Type "variable" -}}
-    {{if eq .SnakeName "true"}}True
-    {{- else if eq .SnakeName "false"}}False
-    {{else}}{{.SnakeName}}{{end}}
-{{- else if eq .Type "entity" -}}
-    {{.Package}}.{{.PascalName}}({{template "kwargs" .FieldValues}})
-{{- else if eq .Type "call" -}}
-    {{template "call" .}}
-{{- else if eq .Type "map" -}}
-    { {{range .Pairs}}{{template "expr" .Key}}: {{template "expr" .Value}},{{end}} }
-{{- else if eq .Type "array" -}}
-    [ {{range $i, $x := .Values}}{{if $i}}, {{end}}{{template "expr" .}}{{end}} ]
-{{- else -}}
-    /* UNKNOWN: {{.Type}} */
-{{- end -}}
-{{- end}}
-
-{{define "method-args" -}}
-  {{with .Request -}}
-    {{template "kwargs" .}}
-  {{- else -}}
-    {{template "args" .}}
-  {{- end}}
-{{- end}}
-
-{{define "kwargs" -}}
-  {{range $i, $x := . -}}
-    {{if $i}}, {{end}}{{.SnakeName}}={{template "expr" .Value}}
-  {{- end}}
-{{- end}}
-
-{{define "args" -}}
-  {{range $i, $x := .Args -}}
-    {{if $i}}, {{end}}{{template "expr" .}}
-  {{- end}}
-{{- end}}
-
-{{define "call" -}}
-{{- if eq .PascalName "GetEnvOrSkipTest" -}}
-os.environ[{{template "args" .}}]
-{{- else if eq .PascalName "Dir" -}}
-os.path.dirname({{template "args" .}})
-{{- else if eq .PascalName "Sprintf" -}}
-{{range $i, $x := .Args}}{{if eq $i 0}}{{template "expr" .}} % ({{else}} {{if gt $i 1}}, {{end}}  {{template "expr" .}} {{end}}{{end}})
-{{- else if eq .PascalName "MustParseInt64" -}}
-{{template "args" .}}
-{{- else if eq .PascalName "RandomEmail" -}}
-f'sdk-{time.time_ns()}@example.com'
-{{- else if eq .PascalName "RandomName" -}}
-f'sdk-{time.time_ns()}'
-{{- else if eq .PascalName "RandomHex" -}}
-hex(time.time_ns())[2:]
-{{- else if eq .PascalName "EncodeToString" -}}
-base64.b64encode({{template "args" .}}.encode()).decode()
-{{- else if eq .PascalName "CanonicalHostName" -}}
-w.config.host
-{{- else if eq .PascalName "SharedRunningCluster" -}}
-w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
-{{- else if eq .PascalName "DltNotebook" -}}
-"CREATE LIVE TABLE dlt_sample AS SELECT 1"
-{{- else if eq .PascalName "MyNotebookPath" -}}
-f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
-{{- else -}}
-{{.SnakeName}}({{range $i, $x := .Args}}{{if $i}}, {{end}}{{template "expr" .}}{{end}})
-{{- end -}}
-{{- end}}
diff --git a/.codegen/lib.tmpl b/.codegen/lib.tmpl
deleted file mode 100644
index 50233ca08..000000000
--- a/.codegen/lib.tmpl
+++ /dev/null
@@ -1,12 +0,0 @@
-{{ define "safe-name" -}}
-  {{/* https://docs.python.org/3/reference/lexical_analysis.html#keywords */}}
-  {{- $keywords := list	"False" "await" "else" "import" "pass" "None" "break" "except" "in" "raise"
-                       	"True" "class" "finally" "is" "return" "and" "continue" "for" "lambda" "try"
-                       	"as" "def" "from" "nonlocal" "while" "assert" "del" "global" "not" "with"
-                       	"async" "elif" "if" "or" "yield" -}}
-  {{.}}{{ if in $keywords . }}_{{ end }}
-{{- end}}
-
-{{ define "safe-snake-name" -}}
-  {{ template "safe-name" .SnakeName }}
-{{- end}}
diff --git a/.codegen/service.py.tmpl b/.codegen/service.py.tmpl
deleted file mode 100644
index a062e4d10..000000000
--- a/.codegen/service.py.tmpl
+++ /dev/null
@@ -1,423 +0,0 @@
-# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-from __future__ import annotations
-from dataclasses import dataclass
-from datetime import timedelta
-from enum import Enum
-from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO
-import time
-import random
-import logging
-import requests
-
-from ..data_plane import DataPlaneService
-from ..errors import OperationTimeout, OperationFailed
-from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter
-from ..oauth import Token
-
-_LOG = logging.getLogger('databricks.sdk')
-
-{{range .ImportedEntities}}
-from databricks.sdk.service import {{.Package.Name}}{{end}}
-
-# all definitions in this file are in alphabetical order
-{{range .Types}}
-{{if or .Fields .IsEmpty -}}{{if not .IsRequest}}@dataclass
-class {{.PascalName}}{{if eq "List" .PascalName}}Request{{end}}:{{if .Description}}
-    """{{.Comment "    " 100}}"""
-    {{end}}
-    {{- range .RequiredFields}}
-    {{template "safe-snake-name" .}}: {{template "type" .Entity}}{{if .Description}}
-    """{{.Comment "    " 100 | trimSuffix "\""}}"""{{end}}
-    {{end}}
-    {{- range .NonRequiredFields}}
-    {{template "safe-snake-name" .}}: Optional[{{template "type" .Entity}}] = None{{if .Description}}
-    """{{.Comment "    " 100 | trimSuffix "\""}}"""{{end}}
-    {{end}}
-    {{if or .IsEmpty .HasJsonField .HasHeaderField .HasByteStreamField -}}
-    def as_dict(self) -> dict:
-        """Serializes the {{.PascalName}}{{if eq "List" .PascalName}}Request{{end}} into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        {{range .Fields}}if self.{{template "safe-snake-name" .}}{{with .Entity.IsPrimitive}} is not None{{end}}: body['{{.Name}}'] = {{template "as_request_type" .}}
-        {{end -}}
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> {{.PascalName}}{{if eq "List" .PascalName}}Request{{end}}:
-        """Deserializes the {{.PascalName}}{{if eq "List" .PascalName}}Request{{end}} from a dictionary."""
-        return cls({{range $i, $f := .Fields}}{{if $i}}, {{end}}{{template "safe-snake-name" $f}}={{template "from_dict_type" $f}}{{end}})
-    {{end}}
-{{end}}
-{{else if .ArrayValue}}type {{.PascalName}} []{{template "type" .ArrayValue}}
-{{else if .MapValue}}{{.PascalName}} = {{template "type" .}}
-{{else if .Enum}}class {{.PascalName}}(Enum):
-    {{if .Description}}"""{{.Comment "    " 100 | trimSuffix "\"" }}"""{{end}}
-    {{range .Enum }}
-    {{.ConstantName}} = '{{.Content}}'{{end}}{{end}}
-{{end}}
-{{- define "from_dict_type" -}}
-	{{- if not .Entity }}None
-	{{- else if .Entity.ArrayValue }}
-		{{- if (or .Entity.ArrayValue.IsObject .Entity.ArrayValue.IsExternal) }}_repeated_dict(d, '{{.Name}}', {{template "type" .Entity.ArrayValue}})
-		{{- else if .Entity.ArrayValue.Enum }}_repeated_enum(d, '{{.Name}}', {{template "type" .Entity.ArrayValue}})
-		{{- else}}d.get('{{.Name}}', None){{- end -}}
-	{{- else if or .Entity.IsObject .Entity.IsExternal .Entity.IsEmpty }}_from_dict(d, '{{.Name}}', {{template "type" .Entity}})
-	{{- else if .Entity.Enum }}_enum(d, '{{.Name}}', {{template "type" .Entity}})
-	{{- else if and .IsHeader (or .Entity.IsInt64 .Entity.IsInt) }} int(d.get('{{.Name}}', None))
-	{{- else}}d.get('{{.Name}}', None){{- end -}}
-{{- end -}}
-{{- define "as_request_type" -}}
-	{{- if not .Entity }}None # ERROR: No Type
-	{{- /* This should be done recursively, but recursion in text templates is not supported. */ -}}
-	{{- else if .Entity.ArrayValue }}[{{if or .Entity.ArrayValue.IsObject .Entity.ArrayValue.IsExternal}}v.as_dict(){{ else if .Entity.ArrayValue.Enum }}v.value{{else}}v{{end}} for v in self.{{template "safe-snake-name" .}}]
-	{{- else if or .Entity.IsObject .Entity.IsExternal .Entity.IsEmpty }}self.{{template "safe-snake-name" .}}.as_dict()
-	{{- else if .Entity.Enum }}self.{{template "safe-snake-name" .}}.value
-	{{- else}}self.{{template "safe-snake-name" .}}{{- end -}}
-{{- end -}}
-{{- define "type" -}}
-	{{- if not . }}any # ERROR: No Type
-	{{- else if .IsExternal }}{{.Package.Name}}.{{.PascalName}}
-	{{- else if .ArrayValue }}List[{{template "type" .ArrayValue}}]
-	{{- else if .MapValue }}Dict[str,{{template "type" .MapValue}}]
-	{{- else if .IsObject }}{{.PascalName}}{{if eq "List" .PascalName}}Request{{end}}
-	{{- else if .Enum }}{{.PascalName}}
-	{{- else if .IsString}}str
-	{{- else if .IsAny}}Any
-	{{- else if .IsEmpty}}{{.PascalName}}
-	{{- else if .IsBool}}bool
-	{{- else if .IsInt64}}int
-	{{- else if .IsFloat64}}float
-	{{- else if .IsInt}}int
-	{{- else if .IsByteStream}}BinaryIO
-	{{- else}}any /* MISSING TYPE */
-	{{- end -}}
-{{- end -}}
-
-{{- define "type-doc" -}}
-	{{- if .IsExternal }}:class:`{{.PascalName}}`
-	{{- else if .IsEmpty}}:class:`{{template "type" .}}`
-	{{- else if .ArrayValue }}List[{{template "type-doc" .ArrayValue}}]
-	{{- else if .MapValue }}Dict[str,{{template "type-doc" .MapValue}}]
-	{{- else if .IsObject }}:class:`{{.PascalName}}{{if eq "List" .PascalName}}Request{{end}}`
-	{{- else if .Enum }}:class:`{{.PascalName}}`
-	{{- else}}{{template "type" . }}
-	{{- end -}}
-{{- end -}}
-
-{{range .Services}}
-class {{.PascalName}}API:{{if .Description}}
-    """{{.Comment "    " 110}}"""
-    {{end}}
-    def __init__(self, api_client{{if .IsDataPlane}}, control_plane{{end}}):
-        self._api = api_client
-        {{if .IsDataPlane -}}
-        self._control_plane = control_plane
-        self._data_plane_service = DataPlaneService()
-        {{end -}}
-        {{range .Subservices}}
-        self._{{.SnakeName}} = {{.PascalName}}API(self._api){{end}}
-
-    {{range .Subservices}}
-    @property
-    def {{.SnakeName}}(self) -> {{.PascalName}}API:
-        {{if .Description}}"""{{.Summary}}"""{{end}}
-        return self._{{.SnakeName}}
-    {{end}}
-
-    {{range .Waits}}
-    def {{template "safe-snake-name" .}}(self{{range .Binding}}, {{template "safe-snake-name" .PollField}}: {{template "type" .PollField.Entity}}{{end}},
-      timeout=timedelta(minutes={{.Timeout}}), callback: Optional[Callable[[{{.Poll.Response.PascalName}}], None]] = None) -> {{.Poll.Response.PascalName}}:
-      deadline = time.time() + timeout.total_seconds()
-      target_states = ({{range .Success}}{{.Entity.PascalName}}.{{.ConstantName}}, {{end}}){{if .Failure}}
-      failure_states = ({{range .Failure}}{{.Entity.PascalName}}.{{.ConstantName}}, {{end}}){{end}}
-      status_message = 'polling...'
-      attempt = 1
-      while time.time() < deadline:
-        poll = self.{{template "safe-snake-name" .Poll}}({{range $i, $b := .Binding}}{{if $i}}, {{end}}{{template "safe-snake-name" .PollField}}={{template "safe-snake-name" .PollField}}{{- end}})
-        status = poll{{range .StatusPath}}.{{template "safe-snake-name" .}}{{end}}
-        {{if .ComplexMessagePath -}}
-        status_message = f'current status: {status}'
-        if poll.{{template "safe-snake-name" .MessagePathHead}}:
-          status_message = poll{{range .MessagePath}}.{{template "safe-snake-name" .}}{{end}}
-        {{- else if .MessagePath -}}
-        status_message = poll{{range .MessagePath}}.{{template "safe-snake-name" .}}{{end}}
-        {{- else -}}
-        status_message = f'current status: {status}'
-        {{- end}}
-        if status in target_states:
-          return poll
-        if callback:
-          callback(poll)
-        {{if .Failure -}}
-        if status in failure_states:
-          msg = f'failed to reach {{range $i, $e := .Success}}{{if $i}} or {{end}}{{$e.Content}}{{end}}, got {status}: {status_message}'
-          raise OperationFailed(msg)
-        {{end}}prefix = f"{{range $i, $b := .Binding}}{{if $i}}, {{end -}}
-           {{template "safe-snake-name" .PollField}}={{"{"}}{{template "safe-snake-name" .PollField}}{{"}"}}
-        {{- end}}"
-        sleep = attempt
-        if sleep > 10:
-          # sleep 10s max per attempt
-          sleep = 10
-        _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
-        time.sleep(sleep + random.random())
-        attempt += 1
-      raise TimeoutError(f'timed out after {timeout}: {status_message}')
-    {{end}}
-
-    {{range .Methods}}
-    def {{template "safe-snake-name" .}}({{ template "method-parameters" . }}){{template "method-return-type" .}}:
-        {{if .Description}}"""{{.Comment "        " 110 | trimSuffix "\"" }}
-        {{with .Request}}{{range .RequiredFields}}
-        :param {{template "safe-snake-name" .}}: {{template "type-doc" .Entity}}{{if .Description}}
-          {{.Comment "          " 110 | trimSuffix "\"" }}{{end}}
-        {{- end}}{{range .NonRequiredFields}}
-        :param {{template "safe-snake-name" .}}: {{template "type-doc" .Entity}} (optional){{if .Description}}
-          {{.Comment "          " 110 | trimSuffix "\"" }}{{end}}
-        {{- end}}
-        {{end}}
-        {{if and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) -}}
-        :returns:
-          Long-running operation waiter for {{template "type-doc" .Wait.Poll.Response}}.
-          See :method:{{template "safe-snake-name" .Wait}} for more details.
-        {{- else if not .Response.IsEmpty }}:returns: {{if .Response.ArrayValue -}}
-          Iterator over {{template "type-doc" .Response.ArrayValue}}
-        {{- else if .Pagination -}}
-          Iterator over {{template "type-doc" .Pagination.Entity}}
-        {{- else -}}
-          {{template "type-doc" .Response}}
-        {{- end}}{{end}}
-        """{{end}}
-        {{if .Request -}}
-        {{template "method-serialize" .}}
-        {{- end}}
-        {{- if .Service.IsDataPlane}}
-        {{template "data-plane" .}}
-        {{- end}}
-        {{template "method-headers" . }}
-        {{if .Response.HasHeaderField -}}
-        {{template "method-response-headers" . }}
-        {{- end}}
-        {{template "method-call" .}}
-
-    {{if and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) }}
-    def {{.SnakeName}}_and_wait({{ template "method-parameters" . }},
-        timeout=timedelta(minutes={{.Wait.Timeout}})) -> {{.Wait.Poll.Response.PascalName}}:
-        return self.{{template "safe-snake-name" .}}({{range $i, $x := .Request.Fields}}{{if $i}}, {{end}}{{template "safe-snake-name" .}}={{template "safe-snake-name" .}}{{end}}).result(timeout=timeout)
-    {{end}}
-    {{end -}}
-{{- end}}
-
-{{define "data-plane" -}}
-        def info_getter():
-            response = self._control_plane.{{.Service.DataPlaneInfoMethod.SnakeName}}(
-                {{- range .Service.DataPlaneInfoMethod.Request.Fields }}
-                {{.SnakeName}} = {{.SnakeName}},
-                {{- end}}
-                )
-            if response.{{(index .DataPlaneInfoFields 0).SnakeName}} is None:
-            	raise Exception("Resource does not support direct Data Plane access")
-            return response{{range .DataPlaneInfoFields}}.{{.SnakeName}}{{end}}
-
-        get_params = [{{- range .Service.DataPlaneInfoMethod.Request.Fields }}{{.SnakeName}},{{end}}]
-        data_plane_details = self._data_plane_service.get_data_plane_details('{{.SnakeName}}', get_params, info_getter, self._api.get_oauth_token)
-        token = data_plane_details.token
-
-        def auth(r: requests.PreparedRequest) -> requests.PreparedRequest:
-            authorization = f"{token.token_type} {token.access_token}"
-            r.headers["Authorization"] = authorization
-            return r
-{{- end}}
-
-{{define "method-parameters" -}}
-  self{{if .Request}}
-       {{- if .Request.MapValue }}, contents: {{template "type" .Request }}{{ end }}
-       {{range .Request.RequiredFields}}, {{template "safe-snake-name" .}}: {{template "type" .Entity}}{{end}}
-       {{if .Request.NonRequiredFields}}, *
-         {{range .Request.NonRequiredFields}}, {{template "safe-snake-name" .}}: Optional[{{template "type" .Entity}}] = None{{end}}
-       {{- end}}
-     {{- end}}
-{{- end}}
-
-{{define "method-serialize" -}}
-        {{if and .Request.HasJsonField .RequestBodyField -}}
-        body = {{template "safe-snake-name" .RequestBodyField}}
-        {{- else -}}
-        {{if or .Request.HasJsonField .Request.HasQueryField -}}
-        {{if .Request.HasJsonField}}body = {}{{end}}{{if .Request.HasQueryField}}
-        query = {}{{end}}
-        {{- range .Request.Fields}}{{ if and (not .IsPath) (not .IsHeader) }}
-        {{- if .IsQuery }}
-        if {{template "safe-snake-name" .}} is not None: query['{{.Name}}'] = {{template "method-param-bind" .}}{{end}}
-        {{- if .IsJson }}
-        if {{template "safe-snake-name" .}} is not None: body['{{.Name}}'] = {{template "method-param-bind" .}}{{end}}
-        {{- end}}
-        {{- end}}
-        {{- end}}
-        {{- end}}
-{{- end}}
-
-{{ define "method-headers" -}}
-    headers = {
-      {{- range $k, $v := .FixedRequestHeaders}}'{{ $k }}': '{{ $v }}',{{ end -}}
-    }
-{{- end }}
-
-{{ define "method-response-headers" -}}
-    response_headers = [
-      {{- range $h := .ResponseHeaders}}'{{ $h.Name }}',{{ end -}}
-    ]
-{{- end }}
-
-{{- define "method-param-bind" -}}
-      {{- if not .Entity }}None # ERROR: No Type
-      {{- else if .Entity.ArrayValue }}[
-        {{- if or .Entity.ArrayValue.IsObject .Entity.ArrayValue.IsExternal -}}v.as_dict()
-        {{- else if .Entity.ArrayValue.Enum -}}v.value
-        {{- else}}v{{end}} for v in {{template "safe-snake-name" .}}]
-      {{- else if .Entity.IsObject }}{{template "safe-snake-name" .}}.as_dict()
-      {{- else if .Entity.Enum }}{{template "safe-snake-name" .}}.value
-      {{- else}}{{template "safe-snake-name" .}}{{- end -}}
-{{- end -}}
-
-{{define "method-call" -}}
-        {{if .Pagination -}}{{template "method-call-paginated" .}}
-        {{- else if and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) -}}{{template "method-call-retried" .}}
-        {{- else}}{{template "method-call-default" .}}{{end}}
-{{- end}}
-
-{{define "method-call-retried" -}}
-        {{if .Response}}op_response = {{end}}{{template "method-do" .}}
-        return Wait(self.{{template "safe-snake-name" .Wait}}
-          {{if .Response}}, response = {{.Response.PascalName}}.from_dict(op_response){{end}}
-          {{range .Wait.Binding}}, {{template "safe-snake-name" .PollField}}={{if .IsResponseBind}}op_response['{{.Bind.Name}}']{{else}}{{template "safe-snake-name" .Bind}}{{end}}
-        {{- end}})
-{{- end}}
-
-{{define "method-call-paginated" -}}
-        {{if .Pagination.MultiRequest}}
-        {{if .NeedsOffsetDedupe -}}
-        # deduplicate items that may have been added during iteration
-        seen = set()
-        {{- end}}{{if and .Pagination.Offset (not (eq .Path "/api/2.1/clusters/events")) }}
-        query['{{.Pagination.Offset.Name}}'] =
-        {{- if eq .Pagination.Increment 1 -}}
-          1
-        {{- else if contains .Path "/scim/v2/" -}}
-          1
-        {{- else -}}
-          0
-        {{- end}}{{end}}{{if and .Pagination.Limit (contains .Path "/scim/v2/")}}
-        if "{{.Pagination.Limit.Name}}" not in query: query['{{.Pagination.Limit.Name}}'] = 100
-        {{- end}}
-        while True:
-          json = {{template "method-do" .}}
-          if '{{.Pagination.Results.Name}}' in json:
-            for v in json['{{.Pagination.Results.Name}}']:
-              {{if .NeedsOffsetDedupe -}}
-              i = v['{{.IdentifierField.Name}}']
-              if i in seen:
-                continue
-              seen.add(i)
-              {{end -}}
-              yield {{.Pagination.Entity.PascalName}}.from_dict(v)
-          {{ if .Pagination.Token -}}
-          if '{{.Pagination.Token.Bind.Name}}' not in json or not json['{{.Pagination.Token.Bind.Name}}']:
-            return
-          {{if or (eq "GET" .Verb) (eq "HEAD" .Verb)}}query{{else}}body{{end}}['{{.Pagination.Token.PollField.Name}}'] = json['{{.Pagination.Token.Bind.Name}}']
-          {{- else if eq .Path "/api/2.1/clusters/events" -}}
-          if 'next_page' not in json or not json['next_page']:
-            return
-          body = json['next_page']
-          {{- else -}}
-          if '{{.Pagination.Results.Name}}' not in json or not json['{{.Pagination.Results.Name}}']:
-            return
-          {{ if eq .Pagination.Increment 1 -}}
-          query['{{.Pagination.Offset.Name}}'] += 1
-          {{- else -}}
-          query['{{.Pagination.Offset.Name}}'] += len(json['{{.Pagination.Results.Name}}'])
-          {{- end}}
-          {{- end}}
-        {{else -}}
-        json = {{template "method-do" .}}
-        parsed = {{.Response.PascalName}}.from_dict(json).{{template "safe-snake-name" .Pagination.Results}}
-        return parsed if parsed is not None else []
-        {{end}}
-{{- end}}
-
-{{define "method-call-default" -}}
-        {{if not .Response.IsEmpty -}}
-        res = {{end}}{{template "method-do" .}}
-        {{if not .Response.IsEmpty -}}
-          {{- if .Response.ArrayValue -}}
-            return [{{.Response.ArrayValue.PascalName}}.from_dict(v) for v in res]
-          {{- else if .Response.MapValue -}}
-            return res
-          {{- else -}}
-            return {{template "type" .Response}}.from_dict(res)
-          {{- end}}
-        {{- end}}
-{{- end}}
-
-{{define "method-do" -}}
-    self._api.do('{{.Verb}}',
-    {{- if .Service.IsDataPlane -}}
-    url=data_plane_details.endpoint_url
-    {{- else -}}
-    {{ template "path" . }}
-    {{- end -}}
-    {{if .Request}}
-        {{- if .Request.HasQueryField}}, query=query{{end}}
-        {{- if .Request.MapValue}}, body=contents
-        {{- else if .Request.HasJsonField}}, body=body{{end}}
-    {{end}}
-    , headers=headers
-    {{if .Response.HasHeaderField -}}
-    , response_headers=response_headers
-    {{- end}}
-    {{- if and .IsRequestByteStream .RequestBodyField }}, data={{template "safe-snake-name" .RequestBodyField}}{{ end }}
-    {{- if .Service.IsDataPlane -}}
-    ,auth=auth
-    {{- end -}}
-    {{- if .IsResponseByteStream }}, raw=True{{ end }})
-{{- end}}
-
-{{- define "path" -}}
-{{- if .PathParts -}}
-  f'{{range  .PathParts -}}
-    {{- .Prefix -}}
-    {{- if .Field -}}
-      {{- "{" -}}
-      {{- if .Field.IsPathMultiSegment -}}_escape_multi_segment_path_parameter({{ template "path-parameter" . }})
-      {{- else -}}{{ template "path-parameter" . }}
-      {{- end -}}
-      {{- "}" -}}
-    {{- else if .IsAccountId}}
-      {{- "{" -}}
-      self._api.account_id
-      {{- "}" -}}
-    {{- end -}}
-  {{- end }}'
-{{- else -}}
-  '{{.Path}}'
-{{- end -}}
-{{- end -}}
-
-{{- define "path-parameter" -}}
-  {{template "safe-snake-name" .Field}}{{with .Field.Entity.Enum}}.value{{end}}
-{{- end -}}
-
-{{define "method-return-type" -}}
-  {{if and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) }} -> Wait[{{.Wait.Poll.Response.PascalName}}]
-  {{- else if not .Response.IsEmpty }} -> {{if .Response.ArrayValue -}}
-    Iterator[{{template "type" .Response.ArrayValue}}]
-  {{- else if .Pagination -}}
-    Iterator[{{template "type" .Pagination.Entity}}]
-  {{- else -}}
-    {{- if .Response.IsExternal -}}
-      {{.Response.Package.Name}}.{{.Response.PascalName}}
-    {{- else -}}
-      {{.Response.PascalName}}
-    {{- end -}}
-  {{- end}}{{end}}
-{{- end}}

From 782a565ecd9b73a54c45200192122e5b8452d8b6 Mon Sep 17 00:00:00 2001
From: hectorcast-db 
Date: Fri, 1 Nov 2024 10:30:17 +0100
Subject: [PATCH 065/136] [Internal] Always write message for manual test
 execution (#811)

## Changes
Old script could not be run from master due to security restrictions and
there is no reliable way to detect if a user as secrets.

## Tests
Opened a PR in SDK Java from fork
https://github.com/databricks/databricks-sdk-java/pull/375
---
 .github/workflows/external-message.yml  | 68 ++-----------------------
 .github/workflows/integration-tests.yml | 19 +++----
 2 files changed, 15 insertions(+), 72 deletions(-)

diff --git a/.github/workflows/external-message.yml b/.github/workflows/external-message.yml
index 3392fc8e0..a2d9dc2e8 100644
--- a/.github/workflows/external-message.yml
+++ b/.github/workflows/external-message.yml
@@ -11,7 +11,6 @@ on:
     branches:
       - main
 
-
 jobs:
   comment-on-pr:
     runs-on: ubuntu-latest
@@ -19,73 +18,15 @@ jobs:
       pull-requests: write
 
     steps:
-      # NOTE: The following checks may not be accurate depending on Org or Repo settings. 
-      - name: Check user and potential secret access
-        id: check-secrets-access
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-        run: |
-          USER_LOGIN="${{ github.event.pull_request.user.login }}"
-          REPO_OWNER="${{ github.repository_owner }}"
-          REPO_NAME="${{ github.event.repository.name }}"
-          
-          echo "Pull request opened by: $USER_LOGIN"
-          
-          # Check if PR is from a fork
-          IS_FORK=$([[ "${{ github.event.pull_request.head.repo.full_name }}" != "${{ github.repository }}" ]] && echo "true" || echo "false")
-          
-          HAS_ACCESS="false"
-          
-          # Check user's permission level on the repository
-          USER_PERMISSION=$(gh api repos/$REPO_OWNER/$REPO_NAME/collaborators/$USER_LOGIN/permission --jq '.permission')
-          
-          if [[ "$USER_PERMISSION" == "admin" || "$USER_PERMISSION" == "write" ]]; then
-            HAS_ACCESS="true"
-          elif [[ "$USER_PERMISSION" == "read" ]]; then
-            # For read access, we need to check if the user has been explicitly granted secret access
-            # This information is not directly available via API, so we'll make an assumption
-            # that read access does not imply secret access
-            HAS_ACCESS="false"
-          fi
-          
-          # Check if repo owner is an organization
-          IS_ORG=$(gh api users/$REPO_OWNER --jq '.type == "Organization"')
-          
-          if [[ "$IS_ORG" == "true" && "$HAS_ACCESS" == "false" ]]; then
-            # Check if user is a member of any team with write or admin access to the repo
-            TEAMS_WITH_ACCESS=$(gh api repos/$REPO_OWNER/$REPO_NAME/teams --jq '.[] | select(.permission == "push" or .permission == "admin") | .slug')
-            for team in $TEAMS_WITH_ACCESS; do
-              IS_TEAM_MEMBER=$(gh api orgs/$REPO_OWNER/teams/$team/memberships/$USER_LOGIN --silent && echo "true" || echo "false")
-              if [[ "$IS_TEAM_MEMBER" == "true" ]]; then
-                HAS_ACCESS="true"
-                break
-              fi
-            done
-          fi
-          
-          # If it's a fork, set HAS_ACCESS to false regardless of other checks
-          if [[ "$IS_FORK" == "true" ]]; then
-            HAS_ACCESS="false"
-          fi
-          
-          echo "has_secrets_access=$HAS_ACCESS" >> $GITHUB_OUTPUT
-          if [[ "$HAS_ACCESS" == "true" ]]; then
-            echo "User $USER_LOGIN likely has access to secrets"
-          else
-            echo "User $USER_LOGIN likely does not have access to secrets"
-          fi
-
-
       - uses: actions/checkout@v4
 
       - name: Delete old comments
-        if: steps.check-secrets-access.outputs.has_secrets_access != 'true'
         env:
            GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
         run: |
             # Delete previous comment if it exists
             previous_comment_ids=$(gh api "repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments" \
-              --jq '.[] | select(.body | startswith("")) | .id')
+              --jq '.[] | select(.body | startswith("")) | .id')
             echo "Previous comment IDs: $previous_comment_ids"
             # Iterate over each comment ID and delete the comment
             if [ ! -z "$previous_comment_ids" ]; then
@@ -96,14 +37,15 @@ jobs:
             fi
 
       - name: Comment on PR
-        if: steps.check-secrets-access.outputs.has_secrets_access != 'true'
         env:
           GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
           COMMIT_SHA: ${{ github.event.pull_request.head.sha }}
         run: |
           gh pr comment ${{ github.event.pull_request.number }} --body \
-          "
-          Run integration tests manually:
+          "
+          If integration tests don't run automatically, an authorized user can run them manually by following the instructions below:
+          
+          Trigger:
           [go/deco-tests-run/sdk-py](https://go/deco-tests-run/sdk-py)
 
           Inputs:
diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml
index 88d3e865a..93a6c2676 100644
--- a/.github/workflows/integration-tests.yml
+++ b/.github/workflows/integration-tests.yml
@@ -12,20 +12,21 @@ jobs:
   check-token:
     name: Check secrets access
     runs-on: ubuntu-latest
+    environment: "test-trigger-is"
     outputs:
       has_token: ${{ steps.set-token-status.outputs.has_token }}
     steps:
-      - name: Check if GITHUB_TOKEN is set
+      - name: Check if DECO_WORKFLOW_TRIGGER_APP_ID is set
         id: set-token-status
         run: |
-          if [ -z "${{ secrets.GITHUB_TOKEN }}" ]; then
-            echo "GITHUB_TOKEN is empty. User has no access to tokens."
-            echo "::set-output name=has_token::false"
-          else
-            echo "GITHUB_TOKEN is set. User has no access to tokens."
-            echo "::set-output name=has_token::true"
-          fi
-
+            if [ -z "${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}" ]; then
+              echo "DECO_WORKFLOW_TRIGGER_APP_ID is empty. User has no access to secrets."
+              echo "::set-output name=has_token::false"
+            else
+              echo "DECO_WORKFLOW_TRIGGER_APP_ID is set. User has access to secrets."
+              echo "::set-output name=has_token::true"
+            fi
+    
   trigger-tests:
     name: Trigger Tests
     runs-on: ubuntu-latest

From 216709fe2cc766c66e5e43ac114ea36d51eedb25 Mon Sep 17 00:00:00 2001
From: Renaud Hartert 
Date: Tue, 5 Nov 2024 17:24:32 +0100
Subject: [PATCH 066/136] [Release] Release v0.37.0 (#813)

### Bug Fixes

* Correctly generate classes with nested body fields
([#808](https://github.com/databricks/databricks-sdk-py/pull/808)).


### Internal Changes

* Add `cleanrooms` package
([#806](https://github.com/databricks/databricks-sdk-py/pull/806)).
* Add test instructions for external contributors
([#804](https://github.com/databricks/databricks-sdk-py/pull/804)).
* Always write message for manual test execution
([#811](https://github.com/databricks/databricks-sdk-py/pull/811)).
* Automatically trigger integration tests on PR
([#800](https://github.com/databricks/databricks-sdk-py/pull/800)).
* Better isolate ML serving auth unit tests
([#803](https://github.com/databricks/databricks-sdk-py/pull/803)).
* Move templates in the code generator
([#809](https://github.com/databricks/databricks-sdk-py/pull/809)).


### API Changes:

* Added
[w.aibi_dashboard_embedding_access_policy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/aibi_dashboard_embedding_access_policy.html)
workspace-level service and
[w.aibi_dashboard_embedding_approved_domains](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/aibi_dashboard_embedding_approved_domains.html)
workspace-level service.
* Added
[w.credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/credentials.html)
workspace-level service.
* Added `app_deployment` field for
`databricks.sdk.service.apps.CreateAppDeploymentRequest`.
 * Added `app` field for `databricks.sdk.service.apps.CreateAppRequest`.
 * Added `app` field for `databricks.sdk.service.apps.UpdateAppRequest`.
* Added `table` field for
`databricks.sdk.service.catalog.CreateOnlineTableRequest`.
* Added `azure_aad` field for
`databricks.sdk.service.catalog.GenerateTemporaryTableCredentialResponse`.
* Added `full_name` field for
`databricks.sdk.service.catalog.StorageCredentialInfo`.
* Added `dashboard` field for
`databricks.sdk.service.dashboards.CreateDashboardRequest`.
* Added `schedule` field for
`databricks.sdk.service.dashboards.CreateScheduleRequest`.
* Added `subscription` field for
`databricks.sdk.service.dashboards.CreateSubscriptionRequest`.
* Added `warehouse_id` field for
`databricks.sdk.service.dashboards.Schedule`.
* Added `dashboard` field for
`databricks.sdk.service.dashboards.UpdateDashboardRequest`.
* Added `schedule` field for
`databricks.sdk.service.dashboards.UpdateScheduleRequest`.
* Added `page_token` field for
`databricks.sdk.service.oauth2.ListServicePrincipalSecretsRequest`.
* Added `next_page_token` field for
`databricks.sdk.service.oauth2.ListServicePrincipalSecretsResponse`.
* Added `connection_name` field for
`databricks.sdk.service.pipelines.IngestionGatewayPipelineDefinition`.
* Added `is_no_public_ip_enabled` field for
`databricks.sdk.service.provisioning.CreateWorkspaceRequest`.
* Added `external_customer_info` and `is_no_public_ip_enabled` fields
for `databricks.sdk.service.provisioning.Workspace`.
* Added `last_used_day` field for
`databricks.sdk.service.settings.TokenInfo`.
* Changed `create()` method for
[w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html)
workspace-level service with new required argument order.
* Changed `execute_message_query()` method for
[w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html)
workspace-level service . New request type is
`databricks.sdk.service.dashboards.GenieExecuteMessageQueryRequest`
dataclass.
* Changed `execute_message_query()` method for
[w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html)
workspace-level service to type `execute_message_query()` method for
[w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html)
workspace-level service.
* Changed `create()`, `create_schedule()`, `create_subscription()` and
`update_schedule()` methods for
[w.lakeview](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/lakeview.html)
workspace-level service with new required argument order.
* Removed
[w.clean_rooms](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/clean_rooms.html)
workspace-level service.
* Removed `prev_page_token` field for `databricks.sdk.service.jobs.Run`.
* Removed `deployment_id`, `mode` and `source_code_path` fields for
`databricks.sdk.service.apps.CreateAppDeploymentRequest`.
* Removed `description`, `name` and `resources` fields for
`databricks.sdk.service.apps.CreateAppRequest`.
* Removed `description` and `resources` fields for
`databricks.sdk.service.apps.UpdateAppRequest`.
* Removed `name` and `spec` fields for
`databricks.sdk.service.catalog.CreateOnlineTableRequest`.
* Removed `display_name`, `parent_path`, `serialized_dashboard` and
`warehouse_id` fields for
`databricks.sdk.service.dashboards.CreateDashboardRequest`.
* Removed `cron_schedule`, `display_name` and `pause_status` fields for
`databricks.sdk.service.dashboards.CreateScheduleRequest`.
* Removed `subscriber` field for
`databricks.sdk.service.dashboards.CreateSubscriptionRequest`.
* Removed `display_name`, `etag`, `serialized_dashboard` and
`warehouse_id` fields for
`databricks.sdk.service.dashboards.UpdateDashboardRequest`.
* Removed `cron_schedule`, `display_name`, `etag` and `pause_status`
fields for `databricks.sdk.service.dashboards.UpdateScheduleRequest`.

OpenAPI SHA: 5285ce76f81314f342c1702d5c2ad4ef42488781, Date: 2024-11-04
---
 .codegen/_openapi_sha                         |   2 +-
 CHANGELOG.md                                  |  57 ++
 databricks/sdk/__init__.py                    |  46 +-
 databricks/sdk/service/apps.py                | 234 +----
 databricks/sdk/service/catalog.py             | 833 +++++++++++++++++-
 databricks/sdk/service/compute.py             |  43 +-
 databricks/sdk/service/dashboards.py          | 302 +------
 databricks/sdk/service/iam.py                 |   6 +-
 databricks/sdk/service/jobs.py                | 144 +--
 databricks/sdk/service/marketplace.py         |   1 +
 databricks/sdk/service/ml.py                  |   7 +-
 databricks/sdk/service/oauth2.py              |  36 +-
 databricks/sdk/service/pipelines.py           |  21 +-
 databricks/sdk/service/provisioning.py        |  53 ++
 databricks/sdk/service/serving.py             |   4 +-
 databricks/sdk/service/settings.py            | 320 ++++++-
 databricks/sdk/service/sharing.py             | 618 -------------
 databricks/sdk/service/sql.py                 |  14 +-
 databricks/sdk/service/workspace.py           |   8 +-
 databricks/sdk/version.py                     |   2 +-
 .../oauth2/service_principal_secrets.rst      |   9 +-
 docs/account/provisioning/workspaces.rst      |   6 +-
 docs/dbdataclasses/apps.rst                   |  12 -
 docs/dbdataclasses/catalog.rst                |  96 +-
 docs/dbdataclasses/dashboards.rst             |  23 +-
 docs/dbdataclasses/marketplace.rst            |   3 +
 docs/dbdataclasses/provisioning.rst           |   4 +
 docs/dbdataclasses/settings.rst               |  35 +
 docs/dbdataclasses/sharing.rst                | 119 ---
 docs/dbdataclasses/sql.rst                    |   4 +-
 docs/workspace/apps/apps.rst                  |  40 +-
 docs/workspace/catalog/external_locations.rst |   1 -
 docs/workspace/catalog/online_tables.rst      |  19 +-
 .../workspace/catalog/storage_credentials.rst |   1 -
 docs/workspace/compute/cluster_policies.rst   |   3 +-
 docs/workspace/compute/clusters.rst           |   5 +-
 docs/workspace/compute/instance_pools.rst     |   3 +-
 docs/workspace/dashboards/lakeview.rst        |  60 +-
 docs/workspace/iam/permissions.rst            |   3 +-
 docs/workspace/iam/users.rst                  |   3 +-
 docs/workspace/index.rst                      |   1 +
 docs/workspace/jobs/jobs.rst                  |  17 +-
 docs/workspace/ml/experiments.rst             |   3 +-
 docs/workspace/ml/model_registry.rst          |   4 +-
 docs/workspace/pipelines/pipelines.rst        |   3 +-
 docs/workspace/provisioning/credentials.rst   | 123 +++
 docs/workspace/provisioning/index.rst         |  10 +
 docs/workspace/serving/serving_endpoints.rst  |   4 +-
 ...aibi_dashboard_embedding_access_policy.rst |  42 +
 ...i_dashboard_embedding_approved_domains.rst |  42 +
 docs/workspace/settings/index.rst             |   2 +
 docs/workspace/settings/settings.rst          |  12 +
 docs/workspace/settings/token_management.rst  |   3 +-
 docs/workspace/sharing/index.rst              |   1 -
 docs/workspace/sql/statement_execution.rst    |   9 +-
 docs/workspace/sql/warehouses.rst             |   3 +-
 docs/workspace/workspace/repos.rst            |   3 +-
 docs/workspace/workspace/workspace.rst        |   5 +-
 58 files changed, 1928 insertions(+), 1559 deletions(-)
 create mode 100644 docs/workspace/provisioning/credentials.rst
 create mode 100644 docs/workspace/provisioning/index.rst
 create mode 100644 docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst
 create mode 100644 docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst

diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 2d9cb6d86..00e5d84f9 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-cf9c61453990df0f9453670f2fe68e1b128647a2
\ No newline at end of file
+5285ce76f81314f342c1702d5c2ad4ef42488781
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 458921ee0..409fce709 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,62 @@
 # Version changelog
 
+## [Release] Release v0.37.0
+
+### Bug Fixes
+
+ * Correctly generate classes with nested body fields ([#808](https://github.com/databricks/databricks-sdk-py/pull/808)).
+
+
+### Internal Changes
+
+ * Add `cleanrooms` package ([#806](https://github.com/databricks/databricks-sdk-py/pull/806)).
+ * Add test instructions for external contributors ([#804](https://github.com/databricks/databricks-sdk-py/pull/804)).
+ * Always write message for manual test execution ([#811](https://github.com/databricks/databricks-sdk-py/pull/811)).
+ * Automatically trigger integration tests on PR ([#800](https://github.com/databricks/databricks-sdk-py/pull/800)).
+ * Better isolate ML serving auth unit tests ([#803](https://github.com/databricks/databricks-sdk-py/pull/803)).
+ * Move templates in the code generator ([#809](https://github.com/databricks/databricks-sdk-py/pull/809)).
+
+
+### API Changes:
+
+ * Added [w.aibi_dashboard_embedding_access_policy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/aibi_dashboard_embedding_access_policy.html) workspace-level service and [w.aibi_dashboard_embedding_approved_domains](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/aibi_dashboard_embedding_approved_domains.html) workspace-level service.
+ * Added [w.credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/credentials.html) workspace-level service.
+ * Added `app_deployment` field for `databricks.sdk.service.apps.CreateAppDeploymentRequest`.
+ * Added `app` field for `databricks.sdk.service.apps.CreateAppRequest`.
+ * Added `app` field for `databricks.sdk.service.apps.UpdateAppRequest`.
+ * Added `table` field for `databricks.sdk.service.catalog.CreateOnlineTableRequest`.
+ * Added `azure_aad` field for `databricks.sdk.service.catalog.GenerateTemporaryTableCredentialResponse`.
+ * Added `full_name` field for `databricks.sdk.service.catalog.StorageCredentialInfo`.
+ * Added `dashboard` field for `databricks.sdk.service.dashboards.CreateDashboardRequest`.
+ * Added `schedule` field for `databricks.sdk.service.dashboards.CreateScheduleRequest`.
+ * Added `subscription` field for `databricks.sdk.service.dashboards.CreateSubscriptionRequest`.
+ * Added `warehouse_id` field for `databricks.sdk.service.dashboards.Schedule`.
+ * Added `dashboard` field for `databricks.sdk.service.dashboards.UpdateDashboardRequest`.
+ * Added `schedule` field for `databricks.sdk.service.dashboards.UpdateScheduleRequest`.
+ * Added `page_token` field for `databricks.sdk.service.oauth2.ListServicePrincipalSecretsRequest`.
+ * Added `next_page_token` field for `databricks.sdk.service.oauth2.ListServicePrincipalSecretsResponse`.
+ * Added `connection_name` field for `databricks.sdk.service.pipelines.IngestionGatewayPipelineDefinition`.
+ * Added `is_no_public_ip_enabled` field for `databricks.sdk.service.provisioning.CreateWorkspaceRequest`.
+ * Added `external_customer_info` and `is_no_public_ip_enabled` fields for `databricks.sdk.service.provisioning.Workspace`.
+ * Added `last_used_day` field for `databricks.sdk.service.settings.TokenInfo`.
+ * Changed `create()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service with new required argument order.
+ * Changed `execute_message_query()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html) workspace-level service . New request type is `databricks.sdk.service.dashboards.GenieExecuteMessageQueryRequest` dataclass.
+ * Changed `execute_message_query()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html) workspace-level service to type `execute_message_query()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html) workspace-level service.
+ * Changed `create()`, `create_schedule()`, `create_subscription()` and `update_schedule()` methods for [w.lakeview](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/lakeview.html) workspace-level service with new required argument order.
+ * Removed [w.clean_rooms](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/clean_rooms.html) workspace-level service.
+ * Removed `deployment_id`, `mode` and `source_code_path` fields for `databricks.sdk.service.apps.CreateAppDeploymentRequest`.
+ * Removed `description`, `name` and `resources` fields for `databricks.sdk.service.apps.CreateAppRequest`.
+ * Removed `description` and `resources` fields for `databricks.sdk.service.apps.UpdateAppRequest`.
+ * Removed `name` and `spec` fields for `databricks.sdk.service.catalog.CreateOnlineTableRequest`.
+ * Removed `display_name`, `parent_path`, `serialized_dashboard` and `warehouse_id` fields for `databricks.sdk.service.dashboards.CreateDashboardRequest`.
+ * Removed `cron_schedule`, `display_name` and `pause_status` fields for `databricks.sdk.service.dashboards.CreateScheduleRequest`.
+ * Removed `subscriber` field for `databricks.sdk.service.dashboards.CreateSubscriptionRequest`.
+ * Removed `display_name`, `etag`, `serialized_dashboard` and `warehouse_id` fields for `databricks.sdk.service.dashboards.UpdateDashboardRequest`.
+ * Removed `cron_schedule`, `display_name`, `etag` and `pause_status` fields for `databricks.sdk.service.dashboards.UpdateScheduleRequest`.
+ * Removed `prev_page_token` field for `databricks.sdk.service.jobs.Run`.
+
+OpenAPI SHA: 5285ce76f81314f342c1702d5c2ad4ef42488781, Date: 2024-11-04
+
 ## [Release] Release v0.36.0
 
 ### Breaking Changes
diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py
index 159946461..746f8d7e9 100755
--- a/databricks/sdk/__init__.py
+++ b/databricks/sdk/__init__.py
@@ -15,7 +15,7 @@
                                             AccountMetastoresAPI,
                                             AccountStorageCredentialsAPI,
                                             ArtifactAllowlistsAPI, CatalogsAPI,
-                                            ConnectionsAPI,
+                                            ConnectionsAPI, CredentialsAPI,
                                             ExternalLocationsAPI, FunctionsAPI,
                                             GrantsAPI, MetastoresAPI,
                                             ModelVersionsAPI, OnlineTablesAPI,
@@ -64,26 +64,18 @@
                                                  Workspace, WorkspacesAPI)
 from databricks.sdk.service.serving import (ServingEndpointsAPI,
                                             ServingEndpointsDataPlaneAPI)
-from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
-                                             AccountSettingsAPI,
-                                             AutomaticClusterUpdateAPI,
-                                             ComplianceSecurityProfileAPI,
-                                             CredentialsManagerAPI,
-                                             CspEnablementAccountAPI,
-                                             DefaultNamespaceAPI,
-                                             DisableLegacyAccessAPI,
-                                             DisableLegacyDbfsAPI,
-                                             DisableLegacyFeaturesAPI,
-                                             EnhancedSecurityMonitoringAPI,
-                                             EsmEnablementAccountAPI,
-                                             IpAccessListsAPI,
-                                             NetworkConnectivityAPI,
-                                             NotificationDestinationsAPI,
-                                             PersonalComputeAPI,
-                                             RestrictWorkspaceAdminsAPI,
-                                             SettingsAPI, TokenManagementAPI,
-                                             TokensAPI, WorkspaceConfAPI)
-from databricks.sdk.service.sharing import (CleanRoomsAPI, ProvidersAPI,
+from databricks.sdk.service.settings import (
+    AccountIpAccessListsAPI, AccountSettingsAPI,
+    AibiDashboardEmbeddingAccessPolicyAPI,
+    AibiDashboardEmbeddingApprovedDomainsAPI, AutomaticClusterUpdateAPI,
+    ComplianceSecurityProfileAPI, CredentialsManagerAPI,
+    CspEnablementAccountAPI, DefaultNamespaceAPI, DisableLegacyAccessAPI,
+    DisableLegacyDbfsAPI, DisableLegacyFeaturesAPI,
+    EnhancedSecurityMonitoringAPI, EsmEnablementAccountAPI, IpAccessListsAPI,
+    NetworkConnectivityAPI, NotificationDestinationsAPI, PersonalComputeAPI,
+    RestrictWorkspaceAdminsAPI, SettingsAPI, TokenManagementAPI, TokensAPI,
+    WorkspaceConfAPI)
+from databricks.sdk.service.sharing import (ProvidersAPI,
                                             RecipientActivationAPI,
                                             RecipientsAPI, SharesAPI)
 from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI,
@@ -183,7 +175,6 @@ def __init__(self,
         self._apps = AppsAPI(self._api_client)
         self._artifact_allowlists = ArtifactAllowlistsAPI(self._api_client)
         self._catalogs = CatalogsAPI(self._api_client)
-        self._clean_rooms = CleanRoomsAPI(self._api_client)
         self._cluster_policies = ClusterPoliciesAPI(self._api_client)
         self._clusters = ClustersExt(self._api_client)
         self._command_execution = CommandExecutionAPI(self._api_client)
@@ -193,6 +184,7 @@ def __init__(self,
         self._consumer_listings = ConsumerListingsAPI(self._api_client)
         self._consumer_personalization_requests = ConsumerPersonalizationRequestsAPI(self._api_client)
         self._consumer_providers = ConsumerProvidersAPI(self._api_client)
+        self._credentials = CredentialsAPI(self._api_client)
         self._credentials_manager = CredentialsManagerAPI(self._api_client)
         self._current_user = CurrentUserAPI(self._api_client)
         self._dashboard_widgets = DashboardWidgetsAPI(self._api_client)
@@ -312,11 +304,6 @@ def catalogs(self) -> CatalogsAPI:
         """A catalog is the first layer of Unity Catalog’s three-level namespace."""
         return self._catalogs
 
-    @property
-    def clean_rooms(self) -> CleanRoomsAPI:
-        """A clean room is a secure, privacy-protecting environment where two or more parties can share sensitive enterprise data, including customer data, for measurements, insights, activation and other use cases."""
-        return self._clean_rooms
-
     @property
     def cluster_policies(self) -> ClusterPoliciesAPI:
         """You can use cluster policies to control users' ability to configure clusters based on a set of rules."""
@@ -362,6 +349,11 @@ def consumer_providers(self) -> ConsumerProvidersAPI:
         """Providers are the entities that publish listings to the Marketplace."""
         return self._consumer_providers
 
+    @property
+    def credentials(self) -> CredentialsAPI:
+        """A credential represents an authentication and authorization mechanism for accessing services on your cloud tenant."""
+        return self._credentials
+
     @property
     def credentials_manager(self) -> CredentialsManagerAPI:
         """Credentials manager interacts with with Identity Providers to to perform token exchanges using stored credentials and refresh tokens."""
diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py
index 52796d0e8..4123ea08c 100755
--- a/databricks/sdk/service/apps.py
+++ b/databricks/sdk/service/apps.py
@@ -611,70 +611,6 @@ def from_dict(cls, d: Dict[str, any]) -> ComputeStatus:
         return cls(message=d.get('message', None), state=_enum(d, 'state', ComputeState))
 
 
-@dataclass
-class CreateAppDeploymentRequest:
-    app_name: Optional[str] = None
-    """The name of the app."""
-
-    deployment_id: Optional[str] = None
-    """The unique id of the deployment."""
-
-    mode: Optional[AppDeploymentMode] = None
-    """The mode of which the deployment will manage the source code."""
-
-    source_code_path: Optional[str] = None
-    """The workspace file system path of the source code used to create the app deployment. This is
-    different from `deployment_artifacts.source_code_path`, which is the path used by the deployed
-    app. The former refers to the original source code location of the app in the workspace during
-    deployment creation, whereas the latter provides a system generated stable snapshotted source
-    code path used by the deployment."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CreateAppDeploymentRequest into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.app_name is not None: body['app_name'] = self.app_name
-        if self.deployment_id is not None: body['deployment_id'] = self.deployment_id
-        if self.mode is not None: body['mode'] = self.mode.value
-        if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateAppDeploymentRequest:
-        """Deserializes the CreateAppDeploymentRequest from a dictionary."""
-        return cls(app_name=d.get('app_name', None),
-                   deployment_id=d.get('deployment_id', None),
-                   mode=_enum(d, 'mode', AppDeploymentMode),
-                   source_code_path=d.get('source_code_path', None))
-
-
-@dataclass
-class CreateAppRequest:
-    name: str
-    """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens.
-    It must be unique within the workspace."""
-
-    description: Optional[str] = None
-    """The description of the app."""
-
-    resources: Optional[List[AppResource]] = None
-    """Resources for the app."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CreateAppRequest into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        if self.resources: body['resources'] = [v.as_dict() for v in self.resources]
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateAppRequest:
-        """Deserializes the CreateAppRequest from a dictionary."""
-        return cls(description=d.get('description', None),
-                   name=d.get('name', None),
-                   resources=_repeated_dict(d, 'resources', AppResource))
-
-
 @dataclass
 class GetAppPermissionLevelsResponse:
     permission_levels: Optional[List[AppPermissionsDescription]] = None
@@ -746,34 +682,6 @@ class StopAppRequest:
     """The name of the app."""
 
 
-@dataclass
-class UpdateAppRequest:
-    name: str
-    """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens.
-    It must be unique within the workspace."""
-
-    description: Optional[str] = None
-    """The description of the app."""
-
-    resources: Optional[List[AppResource]] = None
-    """Resources for the app."""
-
-    def as_dict(self) -> dict:
-        """Serializes the UpdateAppRequest into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        if self.resources: body['resources'] = [v.as_dict() for v in self.resources]
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateAppRequest:
-        """Deserializes the UpdateAppRequest from a dictionary."""
-        return cls(description=d.get('description', None),
-                   name=d.get('name', None),
-                   resources=_repeated_dict(d, 'resources', AppResource))
-
-
 class AppsAPI:
     """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend
     Databricks services, and enable users to interact through single sign-on."""
@@ -813,29 +721,31 @@ def wait_get_app_active(self,
             attempt += 1
         raise TimeoutError(f'timed out after {timeout}: {status_message}')
 
-    def wait_get_app_stopped(self,
-                             name: str,
-                             timeout=timedelta(minutes=20),
-                             callback: Optional[Callable[[App], None]] = None) -> App:
+    def wait_get_deployment_app_succeeded(
+            self,
+            app_name: str,
+            deployment_id: str,
+            timeout=timedelta(minutes=20),
+            callback: Optional[Callable[[AppDeployment], None]] = None) -> AppDeployment:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (ComputeState.STOPPED, )
-        failure_states = (ComputeState.ERROR, )
+        target_states = (AppDeploymentState.SUCCEEDED, )
+        failure_states = (AppDeploymentState.FAILED, )
         status_message = 'polling...'
         attempt = 1
         while time.time() < deadline:
-            poll = self.get(name=name)
-            status = poll.compute_status.state
+            poll = self.get_deployment(app_name=app_name, deployment_id=deployment_id)
+            status = poll.status.state
             status_message = f'current status: {status}'
-            if poll.compute_status:
-                status_message = poll.compute_status.message
+            if poll.status:
+                status_message = poll.status.message
             if status in target_states:
                 return poll
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach STOPPED, got {status}: {status_message}'
+                msg = f'failed to reach SUCCEEDED, got {status}: {status_message}'
                 raise OperationFailed(msg)
-            prefix = f"name={name}"
+            prefix = f"app_name={app_name}, deployment_id={deployment_id}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
@@ -845,31 +755,29 @@ def wait_get_app_stopped(self,
             attempt += 1
         raise TimeoutError(f'timed out after {timeout}: {status_message}')
 
-    def wait_get_deployment_app_succeeded(
-            self,
-            app_name: str,
-            deployment_id: str,
-            timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[AppDeployment], None]] = None) -> AppDeployment:
+    def wait_get_app_stopped(self,
+                             name: str,
+                             timeout=timedelta(minutes=20),
+                             callback: Optional[Callable[[App], None]] = None) -> App:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (AppDeploymentState.SUCCEEDED, )
-        failure_states = (AppDeploymentState.FAILED, )
+        target_states = (ComputeState.STOPPED, )
+        failure_states = (ComputeState.ERROR, )
         status_message = 'polling...'
         attempt = 1
         while time.time() < deadline:
-            poll = self.get_deployment(app_name=app_name, deployment_id=deployment_id)
-            status = poll.status.state
+            poll = self.get(name=name)
+            status = poll.compute_status.state
             status_message = f'current status: {status}'
-            if poll.status:
-                status_message = poll.status.message
+            if poll.compute_status:
+                status_message = poll.compute_status.message
             if status in target_states:
                 return poll
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach SUCCEEDED, got {status}: {status_message}'
+                msg = f'failed to reach STOPPED, got {status}: {status_message}'
                 raise OperationFailed(msg)
-            prefix = f"app_name={app_name}, deployment_id={deployment_id}"
+            prefix = f"name={name}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
@@ -879,43 +787,25 @@ def wait_get_deployment_app_succeeded(
             attempt += 1
         raise TimeoutError(f'timed out after {timeout}: {status_message}')
 
-    def create(self,
-               name: str,
-               *,
-               description: Optional[str] = None,
-               resources: Optional[List[AppResource]] = None) -> Wait[App]:
+    def create(self, *, app: Optional[App] = None) -> Wait[App]:
         """Create an app.
         
         Creates a new app.
         
-        :param name: str
-          The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
-          must be unique within the workspace.
-        :param description: str (optional)
-          The description of the app.
-        :param resources: List[:class:`AppResource`] (optional)
-          Resources for the app.
+        :param app: :class:`App` (optional)
         
         :returns:
           Long-running operation waiter for :class:`App`.
           See :method:wait_get_app_active for more details.
         """
-        body = {}
-        if description is not None: body['description'] = description
-        if name is not None: body['name'] = name
-        if resources is not None: body['resources'] = [v.as_dict() for v in resources]
+        body = app
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         op_response = self._api.do('POST', '/api/2.0/apps', body=body, headers=headers)
         return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response['name'])
 
-    def create_and_wait(self,
-                        name: str,
-                        *,
-                        description: Optional[str] = None,
-                        resources: Optional[List[AppResource]] = None,
-                        timeout=timedelta(minutes=20)) -> App:
-        return self.create(description=description, name=name, resources=resources).result(timeout=timeout)
+    def create_and_wait(self, *, app: Optional[App] = None, timeout=timedelta(minutes=20)) -> App:
+        return self.create(app=app).result(timeout=timeout)
 
     def delete(self, name: str) -> App:
         """Delete an app.
@@ -933,37 +823,20 @@ def delete(self, name: str) -> App:
         res = self._api.do('DELETE', f'/api/2.0/apps/{name}', headers=headers)
         return App.from_dict(res)
 
-    def deploy(self,
-               app_name: str,
-               *,
-               deployment_id: Optional[str] = None,
-               mode: Optional[AppDeploymentMode] = None,
-               source_code_path: Optional[str] = None) -> Wait[AppDeployment]:
+    def deploy(self, app_name: str, *, app_deployment: Optional[AppDeployment] = None) -> Wait[AppDeployment]:
         """Create an app deployment.
         
         Creates an app deployment for the app with the supplied name.
         
         :param app_name: str
           The name of the app.
-        :param deployment_id: str (optional)
-          The unique id of the deployment.
-        :param mode: :class:`AppDeploymentMode` (optional)
-          The mode of which the deployment will manage the source code.
-        :param source_code_path: str (optional)
-          The workspace file system path of the source code used to create the app deployment. This is
-          different from `deployment_artifacts.source_code_path`, which is the path used by the deployed app.
-          The former refers to the original source code location of the app in the workspace during deployment
-          creation, whereas the latter provides a system generated stable snapshotted source code path used by
-          the deployment.
+        :param app_deployment: :class:`AppDeployment` (optional)
         
         :returns:
           Long-running operation waiter for :class:`AppDeployment`.
           See :method:wait_get_deployment_app_succeeded for more details.
         """
-        body = {}
-        if deployment_id is not None: body['deployment_id'] = deployment_id
-        if mode is not None: body['mode'] = mode.value
-        if source_code_path is not None: body['source_code_path'] = source_code_path
+        body = app_deployment
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         op_response = self._api.do('POST',
@@ -975,18 +848,12 @@ def deploy(self,
                     app_name=app_name,
                     deployment_id=op_response['deployment_id'])
 
-    def deploy_and_wait(
-        self,
-        app_name: str,
-        *,
-        deployment_id: Optional[str] = None,
-        mode: Optional[AppDeploymentMode] = None,
-        source_code_path: Optional[str] = None,
-        timeout=timedelta(minutes=20)) -> AppDeployment:
-        return self.deploy(app_name=app_name,
-                           deployment_id=deployment_id,
-                           mode=mode,
-                           source_code_path=source_code_path).result(timeout=timeout)
+    def deploy_and_wait(self,
+                        app_name: str,
+                        *,
+                        app_deployment: Optional[AppDeployment] = None,
+                        timeout=timedelta(minutes=20)) -> AppDeployment:
+        return self.deploy(app_deployment=app_deployment, app_name=app_name).result(timeout=timeout)
 
     def get(self, name: str) -> App:
         """Get an app.
@@ -1121,7 +988,8 @@ def set_permissions(
             access_control_list: Optional[List[AppAccessControlRequest]] = None) -> AppPermissions:
         """Set app permissions.
         
-        Sets permissions on an app. Apps can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param app_name: str
           The app for which to get or manage permissions.
@@ -1179,28 +1047,18 @@ def stop(self, name: str) -> Wait[App]:
     def stop_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App:
         return self.stop(name=name).result(timeout=timeout)
 
-    def update(self,
-               name: str,
-               *,
-               description: Optional[str] = None,
-               resources: Optional[List[AppResource]] = None) -> App:
+    def update(self, name: str, *, app: Optional[App] = None) -> App:
         """Update an app.
         
         Updates the app with the supplied name.
         
         :param name: str
-          The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
-          must be unique within the workspace.
-        :param description: str (optional)
-          The description of the app.
-        :param resources: List[:class:`AppResource`] (optional)
-          Resources for the app.
+          The name of the app.
+        :param app: :class:`App` (optional)
         
         :returns: :class:`App`
         """
-        body = {}
-        if description is not None: body['description'] = description
-        if resources is not None: body['resources'] = [v.as_dict() for v in resources]
+        body = app
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('PATCH', f'/api/2.0/apps/{name}', body=body, headers=headers)
diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py
index b149dbbaa..3943608ef 100755
--- a/databricks/sdk/service/catalog.py
+++ b/databricks/sdk/service/catalog.py
@@ -3,11 +3,15 @@
 from __future__ import annotations
 
 import logging
+import random
+import time
 from dataclasses import dataclass
+from datetime import timedelta
 from enum import Enum
-from typing import Dict, Iterator, List, Optional
+from typing import Callable, Dict, Iterator, List, Optional
 
-from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum
+from ..errors import OperationFailed
+from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum
 
 _LOG = logging.getLogger('databricks.sdk')
 
@@ -310,6 +314,36 @@ def from_dict(cls, d: Dict[str, any]) -> AwsCredentials:
                    session_token=d.get('session_token', None))
 
 
+@dataclass
+class AwsIamRole:
+    """The AWS IAM role configuration"""
+
+    external_id: Optional[str] = None
+    """The external ID used in role assumption to prevent the confused deputy problem."""
+
+    role_arn: Optional[str] = None
+    """The Amazon Resource Name (ARN) of the AWS IAM role used to vend temporary credentials."""
+
+    unity_catalog_iam_arn: Optional[str] = None
+    """The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity
+    that is going to assume the AWS IAM role."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AwsIamRole into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.external_id is not None: body['external_id'] = self.external_id
+        if self.role_arn is not None: body['role_arn'] = self.role_arn
+        if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AwsIamRole:
+        """Deserializes the AwsIamRole from a dictionary."""
+        return cls(external_id=d.get('external_id', None),
+                   role_arn=d.get('role_arn', None),
+                   unity_catalog_iam_arn=d.get('unity_catalog_iam_arn', None))
+
+
 @dataclass
 class AwsIamRoleRequest:
     role_arn: str
@@ -355,6 +389,64 @@ def from_dict(cls, d: Dict[str, any]) -> AwsIamRoleResponse:
                    unity_catalog_iam_arn=d.get('unity_catalog_iam_arn', None))
 
 
+@dataclass
+class AzureActiveDirectoryToken:
+    """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed
+    Identity. Read more at
+    https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token"""
+
+    aad_token: Optional[str] = None
+    """Opaque token that contains claims that you can use in Azure Active Directory to access cloud
+    services."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AzureActiveDirectoryToken into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.aad_token is not None: body['aad_token'] = self.aad_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AzureActiveDirectoryToken:
+        """Deserializes the AzureActiveDirectoryToken from a dictionary."""
+        return cls(aad_token=d.get('aad_token', None))
+
+
+@dataclass
+class AzureManagedIdentity:
+    """The Azure managed identity configuration."""
+
+    access_connector_id: Optional[str] = None
+    """The Azure resource ID of the Azure Databricks Access Connector. Use the format
+    `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}`."""
+
+    credential_id: Optional[str] = None
+    """The Databricks internal ID that represents this managed identity. This field is only used to
+    persist the credential_id once it is fetched from the credentials manager - as we only use the
+    protobuf serializer to store credentials, this ID gets persisted to the database. ."""
+
+    managed_identity_id: Optional[str] = None
+    """The Azure resource ID of the managed identity. Use the format,
+    `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}`
+    This is only available for user-assgined identities. For system-assigned identities, the
+    access_connector_id is used to identify the identity. If this field is not provided, then we
+    assume the AzureManagedIdentity is using the system-assigned identity."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AzureManagedIdentity into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AzureManagedIdentity:
+        """Deserializes the AzureManagedIdentity from a dictionary."""
+        return cls(access_connector_id=d.get('access_connector_id', None),
+                   credential_id=d.get('credential_id', None),
+                   managed_identity_id=d.get('managed_identity_id', None))
+
+
 @dataclass
 class AzureManagedIdentityRequest:
     access_connector_id: str
@@ -793,6 +885,7 @@ class ColumnTypeName(Enum):
     TIMESTAMP = 'TIMESTAMP'
     TIMESTAMP_NTZ = 'TIMESTAMP_NTZ'
     USER_DEFINED_TYPE = 'USER_DEFINED_TYPE'
+    VARIANT = 'VARIANT'
 
 
 @dataclass
@@ -1066,6 +1159,49 @@ def from_dict(cls, d: Dict[str, any]) -> CreateConnection:
                    read_only=d.get('read_only', None))
 
 
+@dataclass
+class CreateCredentialRequest:
+    aws_iam_role: Optional[AwsIamRole] = None
+    """The AWS IAM role configuration"""
+
+    azure_managed_identity: Optional[AzureManagedIdentity] = None
+    """The Azure managed identity configuration."""
+
+    comment: Optional[str] = None
+    """Comment associated with the credential."""
+
+    name: Optional[str] = None
+    """The credential name. The name must be unique among storage and service credentials within the
+    metastore."""
+
+    purpose: Optional[CredentialPurpose] = None
+    """Indicates the purpose of the credential."""
+
+    skip_validation: Optional[bool] = None
+    """Optional. Supplying true to this argument skips validation of the created set of credentials."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateCredentialRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.purpose is not None: body['purpose'] = self.purpose.value
+        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateCredentialRequest:
+        """Deserializes the CreateCredentialRequest from a dictionary."""
+        return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole),
+                   azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
+                   comment=d.get('comment', None),
+                   name=d.get('name', None),
+                   purpose=_enum(d, 'purpose', CredentialPurpose),
+                   skip_validation=d.get('skip_validation', None))
+
+
 @dataclass
 class CreateExternalLocation:
     name: str
@@ -1278,7 +1414,7 @@ class CreateFunctionRoutineBody(Enum):
 
 
 class CreateFunctionSecurityType(Enum):
-    """Function security type."""
+    """The security type of the function."""
 
     DEFINER = 'DEFINER'
 
@@ -1439,29 +1575,6 @@ def from_dict(cls, d: Dict[str, any]) -> CreateMonitor:
                    warehouse_id=d.get('warehouse_id', None))
 
 
-@dataclass
-class CreateOnlineTableRequest:
-    """Online Table information."""
-
-    name: Optional[str] = None
-    """Full three-part (catalog, schema, table) name of the table."""
-
-    spec: Optional[OnlineTableSpec] = None
-    """Specification of the online table."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CreateOnlineTableRequest into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.spec: body['spec'] = self.spec.as_dict()
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateOnlineTableRequest:
-        """Deserializes the CreateOnlineTableRequest from a dictionary."""
-        return cls(name=d.get('name', None), spec=_from_dict(d, 'spec', OnlineTableSpec))
-
-
 @dataclass
 class CreateRegisteredModelRequest:
     catalog_name: str
@@ -1675,6 +1788,94 @@ def from_dict(cls, d: Dict[str, any]) -> CreateVolumeRequestContent:
                    volume_type=_enum(d, 'volume_type', VolumeType))
 
 
+@dataclass
+class CredentialInfo:
+    aws_iam_role: Optional[AwsIamRole] = None
+    """The AWS IAM role configuration"""
+
+    azure_managed_identity: Optional[AzureManagedIdentity] = None
+    """The Azure managed identity configuration."""
+
+    comment: Optional[str] = None
+    """Comment associated with the credential."""
+
+    created_at: Optional[int] = None
+    """Time at which this credential was created, in epoch milliseconds."""
+
+    created_by: Optional[str] = None
+    """Username of credential creator."""
+
+    full_name: Optional[str] = None
+    """The full name of the credential."""
+
+    id: Optional[str] = None
+    """The unique identifier of the credential."""
+
+    isolation_mode: Optional[IsolationMode] = None
+    """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
+
+    metastore_id: Optional[str] = None
+    """Unique identifier of the parent metastore."""
+
+    name: Optional[str] = None
+    """The credential name. The name must be unique among storage and service credentials within the
+    metastore."""
+
+    owner: Optional[str] = None
+    """Username of current owner of credential."""
+
+    purpose: Optional[CredentialPurpose] = None
+    """Indicates the purpose of the credential."""
+
+    updated_at: Optional[int] = None
+    """Time at which this credential was last modified, in epoch milliseconds."""
+
+    updated_by: Optional[str] = None
+    """Username of user who last modified the credential."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CredentialInfo into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.id is not None: body['id'] = self.id
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.purpose is not None: body['purpose'] = self.purpose.value
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CredentialInfo:
+        """Deserializes the CredentialInfo from a dictionary."""
+        return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole),
+                   azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
+                   comment=d.get('comment', None),
+                   created_at=d.get('created_at', None),
+                   created_by=d.get('created_by', None),
+                   full_name=d.get('full_name', None),
+                   id=d.get('id', None),
+                   isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
+                   metastore_id=d.get('metastore_id', None),
+                   name=d.get('name', None),
+                   owner=d.get('owner', None),
+                   purpose=_enum(d, 'purpose', CredentialPurpose),
+                   updated_at=d.get('updated_at', None),
+                   updated_by=d.get('updated_by', None))
+
+
+class CredentialPurpose(Enum):
+
+    SERVICE = 'SERVICE'
+
+
 class CredentialType(Enum):
     """The type of credential."""
 
@@ -1682,6 +1883,27 @@ class CredentialType(Enum):
     USERNAME_PASSWORD = 'USERNAME_PASSWORD'
 
 
+@dataclass
+class CredentialValidationResult:
+    message: Optional[str] = None
+    """Error message would exist when the result does not equal to **PASS**."""
+
+    result: Optional[ValidateCredentialResult] = None
+    """The results of the tested operation."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CredentialValidationResult into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        if self.result is not None: body['result'] = self.result.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CredentialValidationResult:
+        """Deserializes the CredentialValidationResult from a dictionary."""
+        return cls(message=d.get('message', None), result=_enum(d, 'result', ValidateCredentialResult))
+
+
 @dataclass
 class CurrentWorkspaceBindings:
     """Currently assigned workspaces"""
@@ -1778,6 +2000,20 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteAliasResponse:
         return cls()
 
 
+@dataclass
+class DeleteCredentialResponse:
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteCredentialResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteCredentialResponse:
+        """Deserializes the DeleteCredentialResponse from a dictionary."""
+        return cls()
+
+
 @dataclass
 class DeleteResponse:
 
@@ -2052,7 +2288,6 @@ class ExternalLocationInfo:
     sufficient."""
 
     isolation_mode: Optional[IsolationMode] = None
-    """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
 
     metastore_id: Optional[str] = None
     """Unique identifier of metastore hosting the external location."""
@@ -2382,7 +2617,7 @@ class FunctionInfoRoutineBody(Enum):
 
 
 class FunctionInfoSecurityType(Enum):
-    """Function security type."""
+    """The security type of the function."""
 
     DEFINER = 'DEFINER'
 
@@ -2516,6 +2751,50 @@ def from_dict(cls, d: Dict[str, any]) -> GcpOauthToken:
         return cls(oauth_token=d.get('oauth_token', None))
 
 
+@dataclass
+class GenerateTemporaryServiceCredentialAzureOptions:
+    """Options to customize the requested temporary credential"""
+
+    resources: Optional[List[str]] = None
+    """The resources to which the temporary Azure credential should apply. These resources are the
+    scopes that are passed to the token provider (see
+    https://learn.microsoft.com/python/api/azure-core/azure.core.credentials.tokencredential?view=azure-python)"""
+
+    def as_dict(self) -> dict:
+        """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.resources: body['resources'] = [v for v in self.resources]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryServiceCredentialAzureOptions:
+        """Deserializes the GenerateTemporaryServiceCredentialAzureOptions from a dictionary."""
+        return cls(resources=d.get('resources', None))
+
+
+@dataclass
+class GenerateTemporaryServiceCredentialRequest:
+    azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None
+    """Options to customize the requested temporary credential"""
+
+    credential_name: Optional[str] = None
+    """The name of the service credential used to generate a temporary credential"""
+
+    def as_dict(self) -> dict:
+        """Serializes the GenerateTemporaryServiceCredentialRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.azure_options: body['azure_options'] = self.azure_options.as_dict()
+        if self.credential_name is not None: body['credential_name'] = self.credential_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryServiceCredentialRequest:
+        """Deserializes the GenerateTemporaryServiceCredentialRequest from a dictionary."""
+        return cls(azure_options=_from_dict(d, 'azure_options',
+                                            GenerateTemporaryServiceCredentialAzureOptions),
+                   credential_name=d.get('credential_name', None))
+
+
 @dataclass
 class GenerateTemporaryTableCredentialRequest:
     operation: Optional[TableOperation] = None
@@ -2545,6 +2824,11 @@ class GenerateTemporaryTableCredentialResponse:
     """AWS temporary credentials for API authentication. Read more at
     https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html."""
 
+    azure_aad: Optional[AzureActiveDirectoryToken] = None
+    """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed
+    Identity. Read more at
+    https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token"""
+
     azure_user_delegation_sas: Optional[AzureUserDelegationSas] = None
     """Azure temporary credentials for API authentication. Read more at
     https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas"""
@@ -2568,6 +2852,7 @@ def as_dict(self) -> dict:
         """Serializes the GenerateTemporaryTableCredentialResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials.as_dict()
+        if self.azure_aad: body['azure_aad'] = self.azure_aad.as_dict()
         if self.azure_user_delegation_sas:
             body['azure_user_delegation_sas'] = self.azure_user_delegation_sas.as_dict()
         if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
@@ -2580,6 +2865,7 @@ def as_dict(self) -> dict:
     def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryTableCredentialResponse:
         """Deserializes the GenerateTemporaryTableCredentialResponse from a dictionary."""
         return cls(aws_temp_credentials=_from_dict(d, 'aws_temp_credentials', AwsCredentials),
+                   azure_aad=_from_dict(d, 'azure_aad', AzureActiveDirectoryToken),
                    azure_user_delegation_sas=_from_dict(d, 'azure_user_delegation_sas',
                                                         AzureUserDelegationSas),
                    expiration_time=d.get('expiration_time', None),
@@ -2592,6 +2878,7 @@ class GetBindingsSecurableType(Enum):
 
     CATALOG = 'catalog'
     EXTERNAL_LOCATION = 'external_location'
+    SERVICE_CREDENTIAL = 'service_credential'
     STORAGE_CREDENTIAL = 'storage_credential'
 
 
@@ -2738,7 +3025,6 @@ def from_dict(cls, d: Dict[str, any]) -> GetQuotaResponse:
 
 
 class IsolationMode(Enum):
-    """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
 
     ISOLATION_MODE_ISOLATED = 'ISOLATION_MODE_ISOLATED'
     ISOLATION_MODE_OPEN = 'ISOLATION_MODE_OPEN'
@@ -2826,6 +3112,28 @@ def from_dict(cls, d: Dict[str, any]) -> ListConnectionsResponse:
                    next_page_token=d.get('next_page_token', None))
 
 
+@dataclass
+class ListCredentialsResponse:
+    credentials: Optional[List[CredentialInfo]] = None
+
+    next_page_token: Optional[str] = None
+    """Opaque token to retrieve the next page of results. Absent if there are no more pages.
+    __page_token__ should be set to this value for the next request (for the next page of results)."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListCredentialsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.credentials: body['credentials'] = [v.as_dict() for v in self.credentials]
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListCredentialsResponse:
+        """Deserializes the ListCredentialsResponse from a dictionary."""
+        return cls(credentials=_repeated_dict(d, 'credentials', CredentialInfo),
+                   next_page_token=d.get('next_page_token', None))
+
+
 @dataclass
 class ListExternalLocationsResponse:
     external_locations: Optional[List[ExternalLocationInfo]] = None
@@ -4619,6 +4927,7 @@ class SecurableType(Enum):
 
     CATALOG = 'catalog'
     CONNECTION = 'connection'
+    CREDENTIAL = 'credential'
     EXTERNAL_LOCATION = 'external_location'
     FUNCTION = 'function'
     METASTORE = 'metastore'
@@ -4738,11 +5047,13 @@ class StorageCredentialInfo:
     databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountResponse] = None
     """The Databricks managed GCP service account configuration."""
 
+    full_name: Optional[str] = None
+    """The full name of the credential."""
+
     id: Optional[str] = None
     """The unique identifier of the credential."""
 
     isolation_mode: Optional[IsolationMode] = None
-    """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
 
     metastore_id: Optional[str] = None
     """Unique identifier of parent metastore."""
@@ -4778,6 +5089,7 @@ def as_dict(self) -> dict:
         if self.created_by is not None: body['created_by'] = self.created_by
         if self.databricks_gcp_service_account:
             body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
+        if self.full_name is not None: body['full_name'] = self.full_name
         if self.id is not None: body['id'] = self.id
         if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
         if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
@@ -4803,6 +5115,7 @@ def from_dict(cls, d: Dict[str, any]) -> StorageCredentialInfo:
                    created_by=d.get('created_by', None),
                    databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
                                                              DatabricksGcpServiceAccountResponse),
+                   full_name=d.get('full_name', None),
                    id=d.get('id', None),
                    isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
                    metastore_id=d.get('metastore_id', None),
@@ -5158,6 +5471,37 @@ class TableType(Enum):
     VIEW = 'VIEW'
 
 
+@dataclass
+class TemporaryCredentials:
+    aws_temp_credentials: Optional[AwsCredentials] = None
+    """AWS temporary credentials for API authentication. Read more at
+    https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html."""
+
+    azure_aad: Optional[AzureActiveDirectoryToken] = None
+    """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed
+    Identity. Read more at
+    https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token"""
+
+    expiration_time: Optional[int] = None
+    """Server time when the credential will expire, in epoch milliseconds. The API client is advised to
+    cache the credential given this expiration time."""
+
+    def as_dict(self) -> dict:
+        """Serializes the TemporaryCredentials into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials.as_dict()
+        if self.azure_aad: body['azure_aad'] = self.azure_aad.as_dict()
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> TemporaryCredentials:
+        """Deserializes the TemporaryCredentials from a dictionary."""
+        return cls(aws_temp_credentials=_from_dict(d, 'aws_temp_credentials', AwsCredentials),
+                   azure_aad=_from_dict(d, 'azure_aad', AzureActiveDirectoryToken),
+                   expiration_time=d.get('expiration_time', None))
+
+
 @dataclass
 class TriggeredUpdateStatus:
     """Detailed status of an online table. Shown if the online table is in the ONLINE_TRIGGERED_UPDATE
@@ -5224,6 +5568,7 @@ class UpdateBindingsSecurableType(Enum):
 
     CATALOG = 'catalog'
     EXTERNAL_LOCATION = 'external_location'
+    SERVICE_CREDENTIAL = 'service_credential'
     STORAGE_CREDENTIAL = 'storage_credential'
 
 
@@ -5308,6 +5653,63 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateConnection:
                    owner=d.get('owner', None))
 
 
+@dataclass
+class UpdateCredentialRequest:
+    aws_iam_role: Optional[AwsIamRole] = None
+    """The AWS IAM role configuration"""
+
+    azure_managed_identity: Optional[AzureManagedIdentity] = None
+    """The Azure managed identity configuration."""
+
+    comment: Optional[str] = None
+    """Comment associated with the credential."""
+
+    force: Optional[bool] = None
+    """Force update even if there are dependent services."""
+
+    isolation_mode: Optional[IsolationMode] = None
+    """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
+
+    name_arg: Optional[str] = None
+    """Name of the credential."""
+
+    new_name: Optional[str] = None
+    """New name of credential."""
+
+    owner: Optional[str] = None
+    """Username of current owner of credential."""
+
+    skip_validation: Optional[bool] = None
+    """Supply true to this argument to skip validation of the updated credential."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateCredentialRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
+        if self.comment is not None: body['comment'] = self.comment
+        if self.force is not None: body['force'] = self.force
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
+        if self.name_arg is not None: body['name_arg'] = self.name_arg
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateCredentialRequest:
+        """Deserializes the UpdateCredentialRequest from a dictionary."""
+        return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole),
+                   azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
+                   comment=d.get('comment', None),
+                   force=d.get('force', None),
+                   isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
+                   name_arg=d.get('name_arg', None),
+                   new_name=d.get('new_name', None),
+                   owner=d.get('owner', None),
+                   skip_validation=d.get('skip_validation', None))
+
+
 @dataclass
 class UpdateExternalLocation:
     access_point: Optional[str] = None
@@ -5331,7 +5733,6 @@ class UpdateExternalLocation:
     """Force update even if changing url invalidates dependent external tables or mounts."""
 
     isolation_mode: Optional[IsolationMode] = None
-    """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
 
     name: Optional[str] = None
     """Name of the external location."""
@@ -5751,7 +6152,6 @@ class UpdateStorageCredential:
     """Force update even if there are dependent external locations or external tables."""
 
     isolation_mode: Optional[IsolationMode] = None
-    """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
 
     name: Optional[str] = None
     """Name of the storage credential."""
@@ -5899,6 +6299,63 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceBindingsParameters:
                    securable_type=_enum(d, 'securable_type', UpdateBindingsSecurableType))
 
 
+@dataclass
+class ValidateCredentialRequest:
+    aws_iam_role: Optional[AwsIamRole] = None
+    """The AWS IAM role configuration"""
+
+    azure_managed_identity: Optional[AzureManagedIdentity] = None
+    """The Azure managed identity configuration."""
+
+    credential_name: Optional[str] = None
+    """Required. The name of an existing credential or long-lived cloud credential to validate."""
+
+    purpose: Optional[CredentialPurpose] = None
+    """The purpose of the credential. This should only be used when the credential is specified."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ValidateCredentialRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
+        if self.credential_name is not None: body['credential_name'] = self.credential_name
+        if self.purpose is not None: body['purpose'] = self.purpose.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ValidateCredentialRequest:
+        """Deserializes the ValidateCredentialRequest from a dictionary."""
+        return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole),
+                   azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
+                   credential_name=d.get('credential_name', None),
+                   purpose=_enum(d, 'purpose', CredentialPurpose))
+
+
+@dataclass
+class ValidateCredentialResponse:
+    results: Optional[List[CredentialValidationResult]] = None
+    """The results of the validation check."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ValidateCredentialResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.results: body['results'] = [v.as_dict() for v in self.results]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ValidateCredentialResponse:
+        """Deserializes the ValidateCredentialResponse from a dictionary."""
+        return cls(results=_repeated_dict(d, 'results', CredentialValidationResult))
+
+
+class ValidateCredentialResult(Enum):
+    """A enum represents the result of the file operation"""
+
+    FAIL = 'FAIL'
+    PASS = 'PASS'
+    SKIP = 'SKIP'
+
+
 @dataclass
 class ValidateStorageCredential:
     aws_iam_role: Optional[AwsIamRoleRequest] = None
@@ -6935,6 +7392,258 @@ def update(self,
         return ConnectionInfo.from_dict(res)
 
 
+class CredentialsAPI:
+    """A credential represents an authentication and authorization mechanism for accessing services on your cloud
+    tenant. Each credential is subject to Unity Catalog access-control policies that control which users and
+    groups can access the credential.
+    
+    To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE CREDENTIAL
+    privilege. The user who creates the credential can delegate ownership to another user or group to manage
+    permissions on it"""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def create_credential(self,
+                          *,
+                          aws_iam_role: Optional[AwsIamRole] = None,
+                          azure_managed_identity: Optional[AzureManagedIdentity] = None,
+                          comment: Optional[str] = None,
+                          name: Optional[str] = None,
+                          purpose: Optional[CredentialPurpose] = None,
+                          skip_validation: Optional[bool] = None) -> CredentialInfo:
+        """Create a credential.
+        
+        Creates a new credential.
+        
+        :param aws_iam_role: :class:`AwsIamRole` (optional)
+          The AWS IAM role configuration
+        :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
+          The Azure managed identity configuration.
+        :param comment: str (optional)
+          Comment associated with the credential.
+        :param name: str (optional)
+          The credential name. The name must be unique among storage and service credentials within the
+          metastore.
+        :param purpose: :class:`CredentialPurpose` (optional)
+          Indicates the purpose of the credential.
+        :param skip_validation: bool (optional)
+          Optional. Supplying true to this argument skips validation of the created set of credentials.
+        
+        :returns: :class:`CredentialInfo`
+        """
+        body = {}
+        if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict()
+        if azure_managed_identity is not None:
+            body['azure_managed_identity'] = azure_managed_identity.as_dict()
+        if comment is not None: body['comment'] = comment
+        if name is not None: body['name'] = name
+        if purpose is not None: body['purpose'] = purpose.value
+        if skip_validation is not None: body['skip_validation'] = skip_validation
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST', '/api/2.1/unity-catalog/credentials', body=body, headers=headers)
+        return CredentialInfo.from_dict(res)
+
+    def delete_credential(self, name_arg: str, *, force: Optional[bool] = None):
+        """Delete a credential.
+        
+        Deletes a credential from the metastore. The caller must be an owner of the credential.
+        
+        :param name_arg: str
+          Name of the credential.
+        :param force: bool (optional)
+          Force deletion even if there are dependent services.
+        
+        
+        """
+
+        query = {}
+        if force is not None: query['force'] = force
+        headers = {'Accept': 'application/json', }
+
+        self._api.do('DELETE', f'/api/2.1/unity-catalog/credentials/{name_arg}', query=query, headers=headers)
+
+    def generate_temporary_service_credential(
+            self,
+            *,
+            azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None,
+            credential_name: Optional[str] = None) -> TemporaryCredentials:
+        """Generate a temporary service credential.
+        
+        Returns a set of temporary credentials generated using the specified service credential. The caller
+        must be a metastore admin or have the metastore privilege **ACCESS** on the service credential.
+        
+        :param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional)
+          Options to customize the requested temporary credential
+        :param credential_name: str (optional)
+          The name of the service credential used to generate a temporary credential
+        
+        :returns: :class:`TemporaryCredentials`
+        """
+        body = {}
+        if azure_options is not None: body['azure_options'] = azure_options.as_dict()
+        if credential_name is not None: body['credential_name'] = credential_name
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST',
+                           '/api/2.1/unity-catalog/temporary-service-credentials',
+                           body=body,
+                           headers=headers)
+        return TemporaryCredentials.from_dict(res)
+
+    def get_credential(self, name_arg: str) -> CredentialInfo:
+        """Get a credential.
+        
+        Gets a credential from the metastore. The caller must be a metastore admin, the owner of the
+        credential, or have any permission on the credential.
+        
+        :param name_arg: str
+          Name of the credential.
+        
+        :returns: :class:`CredentialInfo`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET', f'/api/2.1/unity-catalog/credentials/{name_arg}', headers=headers)
+        return CredentialInfo.from_dict(res)
+
+    def list_credentials(self,
+                         *,
+                         max_results: Optional[int] = None,
+                         page_token: Optional[str] = None,
+                         purpose: Optional[CredentialPurpose] = None) -> Iterator[CredentialInfo]:
+        """List credentials.
+        
+        Gets an array of credentials (as __CredentialInfo__ objects).
+        
+        The array is limited to only the credentials that the caller has permission to access. If the caller
+        is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific
+        ordering of the elements in the array.
+        
+        :param max_results: int (optional)
+          Maximum number of credentials to return. - If not set, the default max page size is used. - When set
+          to a value greater than 0, the page length is the minimum of this value and a server-configured
+          value. - When set to 0, the page length is set to a server-configured value (recommended). - When
+          set to a value less than 0, an invalid parameter error is returned.
+        :param page_token: str (optional)
+          Opaque token to retrieve the next page of results.
+        :param purpose: :class:`CredentialPurpose` (optional)
+          Return only credentials for the specified purpose.
+        
+        :returns: Iterator over :class:`CredentialInfo`
+        """
+
+        query = {}
+        if max_results is not None: query['max_results'] = max_results
+        if page_token is not None: query['page_token'] = page_token
+        if purpose is not None: query['purpose'] = purpose.value
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET', '/api/2.1/unity-catalog/credentials', query=query, headers=headers)
+            if 'credentials' in json:
+                for v in json['credentials']:
+                    yield CredentialInfo.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+    def update_credential(self,
+                          name_arg: str,
+                          *,
+                          aws_iam_role: Optional[AwsIamRole] = None,
+                          azure_managed_identity: Optional[AzureManagedIdentity] = None,
+                          comment: Optional[str] = None,
+                          force: Optional[bool] = None,
+                          isolation_mode: Optional[IsolationMode] = None,
+                          new_name: Optional[str] = None,
+                          owner: Optional[str] = None,
+                          skip_validation: Optional[bool] = None) -> CredentialInfo:
+        """Update a credential.
+        
+        Updates a credential on the metastore.
+        
+        The caller must be the owner of the credential or a metastore admin or have the `MANAGE` permission.
+        If the caller is a metastore admin, only the __owner__ field can be changed.
+        
+        :param name_arg: str
+          Name of the credential.
+        :param aws_iam_role: :class:`AwsIamRole` (optional)
+          The AWS IAM role configuration
+        :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
+          The Azure managed identity configuration.
+        :param comment: str (optional)
+          Comment associated with the credential.
+        :param force: bool (optional)
+          Force update even if there are dependent services.
+        :param isolation_mode: :class:`IsolationMode` (optional)
+          Whether the current securable is accessible from all workspaces or a specific set of workspaces.
+        :param new_name: str (optional)
+          New name of credential.
+        :param owner: str (optional)
+          Username of current owner of credential.
+        :param skip_validation: bool (optional)
+          Supply true to this argument to skip validation of the updated credential.
+        
+        :returns: :class:`CredentialInfo`
+        """
+        body = {}
+        if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict()
+        if azure_managed_identity is not None:
+            body['azure_managed_identity'] = azure_managed_identity.as_dict()
+        if comment is not None: body['comment'] = comment
+        if force is not None: body['force'] = force
+        if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value
+        if new_name is not None: body['new_name'] = new_name
+        if owner is not None: body['owner'] = owner
+        if skip_validation is not None: body['skip_validation'] = skip_validation
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH',
+                           f'/api/2.1/unity-catalog/credentials/{name_arg}',
+                           body=body,
+                           headers=headers)
+        return CredentialInfo.from_dict(res)
+
+    def validate_credential(self,
+                            *,
+                            aws_iam_role: Optional[AwsIamRole] = None,
+                            azure_managed_identity: Optional[AzureManagedIdentity] = None,
+                            credential_name: Optional[str] = None,
+                            purpose: Optional[CredentialPurpose] = None) -> ValidateCredentialResponse:
+        """Validate a credential.
+        
+        Validates a credential.
+        
+        Either the __credential_name__ or the cloud-specific credential must be provided.
+        
+        The caller must be a metastore admin or the credential owner.
+        
+        :param aws_iam_role: :class:`AwsIamRole` (optional)
+          The AWS IAM role configuration
+        :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
+          The Azure managed identity configuration.
+        :param credential_name: str (optional)
+          Required. The name of an existing credential or long-lived cloud credential to validate.
+        :param purpose: :class:`CredentialPurpose` (optional)
+          The purpose of the credential. This should only be used when the credential is specified.
+        
+        :returns: :class:`ValidateCredentialResponse`
+        """
+        body = {}
+        if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict()
+        if azure_managed_identity is not None:
+            body['azure_managed_identity'] = azure_managed_identity.as_dict()
+        if credential_name is not None: body['credential_name'] = credential_name
+        if purpose is not None: body['purpose'] = purpose.value
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST', '/api/2.1/unity-catalog/validate-credentials', body=body, headers=headers)
+        return ValidateCredentialResponse.from_dict(res)
+
+
 class ExternalLocationsAPI:
     """An external location is an object that combines a cloud storage path with a storage credential that
     authorizes access to the cloud storage path. Each external location is subject to Unity Catalog
@@ -7134,7 +7843,6 @@ def update(self,
         :param force: bool (optional)
           Force update even if changing url invalidates dependent external tables or mounts.
         :param isolation_mode: :class:`IsolationMode` (optional)
-          Whether the current securable is accessible from all workspaces or a specific set of workspaces.
         :param new_name: str (optional)
           New name for the external location.
         :param owner: str (optional)
@@ -7890,25 +8598,61 @@ class OnlineTablesAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self, *, name: Optional[str] = None, spec: Optional[OnlineTableSpec] = None) -> OnlineTable:
+    def wait_get_online_table_active(self,
+                                     name: str,
+                                     timeout=timedelta(minutes=20),
+                                     callback: Optional[Callable[[OnlineTable], None]] = None) -> OnlineTable:
+        deadline = time.time() + timeout.total_seconds()
+        target_states = (ProvisioningInfoState.ACTIVE, )
+        failure_states = (ProvisioningInfoState.FAILED, )
+        status_message = 'polling...'
+        attempt = 1
+        while time.time() < deadline:
+            poll = self.get(name=name)
+            status = poll.unity_catalog_provisioning_state
+            status_message = f'current status: {status}'
+            if status in target_states:
+                return poll
+            if callback:
+                callback(poll)
+            if status in failure_states:
+                msg = f'failed to reach ACTIVE, got {status}: {status_message}'
+                raise OperationFailed(msg)
+            prefix = f"name={name}"
+            sleep = attempt
+            if sleep > 10:
+                # sleep 10s max per attempt
+                sleep = 10
+            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            time.sleep(sleep + random.random())
+            attempt += 1
+        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+
+    def create(self, *, table: Optional[OnlineTable] = None) -> Wait[OnlineTable]:
         """Create an Online Table.
         
         Create a new Online Table.
         
-        :param name: str (optional)
-          Full three-part (catalog, schema, table) name of the table.
-        :param spec: :class:`OnlineTableSpec` (optional)
-          Specification of the online table.
+        :param table: :class:`OnlineTable` (optional)
+          Online Table information.
         
-        :returns: :class:`OnlineTable`
+        :returns:
+          Long-running operation waiter for :class:`OnlineTable`.
+          See :method:wait_get_online_table_active for more details.
         """
-        body = {}
-        if name is not None: body['name'] = name
-        if spec is not None: body['spec'] = spec.as_dict()
+        body = table
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        res = self._api.do('POST', '/api/2.0/online-tables', body=body, headers=headers)
-        return OnlineTable.from_dict(res)
+        op_response = self._api.do('POST', '/api/2.0/online-tables', body=body, headers=headers)
+        return Wait(self.wait_get_online_table_active,
+                    response=OnlineTable.from_dict(op_response),
+                    name=op_response['name'])
+
+    def create_and_wait(self,
+                        *,
+                        table: Optional[OnlineTable] = None,
+                        timeout=timedelta(minutes=20)) -> OnlineTable:
+        return self.create(table=table).result(timeout=timeout)
 
     def delete(self, name: str):
         """Delete an Online Table.
@@ -9019,7 +9763,6 @@ def update(self,
         :param force: bool (optional)
           Force update even if there are dependent external locations or external tables.
         :param isolation_mode: :class:`IsolationMode` (optional)
-          Whether the current securable is accessible from all workspaces or a specific set of workspaces.
         :param new_name: str (optional)
           New name for the storage credential.
         :param owner: str (optional)
diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py
index 4a77496de..17567ab62 100755
--- a/databricks/sdk/service/compute.py
+++ b/databricks/sdk/service/compute.py
@@ -2661,7 +2661,7 @@ class EbsVolumeType(Enum):
 @dataclass
 class EditCluster:
     cluster_id: str
-    """ID of the cluser"""
+    """ID of the cluster"""
 
     spark_version: str
     """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can
@@ -6645,7 +6645,8 @@ def set_permissions(
     ) -> ClusterPolicyPermissions:
         """Set cluster policy permissions.
         
-        Sets permissions on a cluster policy. Cluster policies can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param cluster_policy_id: str
           The cluster policy for which to get or manage permissions.
@@ -7145,7 +7146,7 @@ def edit(self,
         Clusters created by the Databricks Jobs service cannot be edited.
         
         :param cluster_id: str
-          ID of the cluser
+          ID of the cluster
         :param spark_version: str
           The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be
           retrieved by using the :method:clusters/sparkVersions API call.
@@ -7672,7 +7673,8 @@ def set_permissions(
             access_control_list: Optional[List[ClusterAccessControlRequest]] = None) -> ClusterPermissions:
         """Set cluster permissions.
         
-        Sets permissions on a cluster. Clusters can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param cluster_id: str
           The cluster for which to get or manage permissions.
@@ -7865,20 +7867,19 @@ def wait_command_status_command_execution_cancelled(
             attempt += 1
         raise TimeoutError(f'timed out after {timeout}: {status_message}')
 
-    def wait_command_status_command_execution_finished_or_error(
+    def wait_context_status_command_execution_running(
             self,
             cluster_id: str,
-            command_id: str,
             context_id: str,
             timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[CommandStatusResponse], None]] = None) -> CommandStatusResponse:
+            callback: Optional[Callable[[ContextStatusResponse], None]] = None) -> ContextStatusResponse:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (CommandStatus.FINISHED, CommandStatus.ERROR, )
-        failure_states = (CommandStatus.CANCELLED, CommandStatus.CANCELLING, )
+        target_states = (ContextStatus.RUNNING, )
+        failure_states = (ContextStatus.ERROR, )
         status_message = 'polling...'
         attempt = 1
         while time.time() < deadline:
-            poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id)
+            poll = self.context_status(cluster_id=cluster_id, context_id=context_id)
             status = poll.status
             status_message = f'current status: {status}'
             if status in target_states:
@@ -7886,9 +7887,9 @@ def wait_command_status_command_execution_finished_or_error(
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach Finished or Error, got {status}: {status_message}'
+                msg = f'failed to reach Running, got {status}: {status_message}'
                 raise OperationFailed(msg)
-            prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}"
+            prefix = f"cluster_id={cluster_id}, context_id={context_id}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
@@ -7898,19 +7899,20 @@ def wait_command_status_command_execution_finished_or_error(
             attempt += 1
         raise TimeoutError(f'timed out after {timeout}: {status_message}')
 
-    def wait_context_status_command_execution_running(
+    def wait_command_status_command_execution_finished_or_error(
             self,
             cluster_id: str,
+            command_id: str,
             context_id: str,
             timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[ContextStatusResponse], None]] = None) -> ContextStatusResponse:
+            callback: Optional[Callable[[CommandStatusResponse], None]] = None) -> CommandStatusResponse:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (ContextStatus.RUNNING, )
-        failure_states = (ContextStatus.ERROR, )
+        target_states = (CommandStatus.FINISHED, CommandStatus.ERROR, )
+        failure_states = (CommandStatus.CANCELLED, CommandStatus.CANCELLING, )
         status_message = 'polling...'
         attempt = 1
         while time.time() < deadline:
-            poll = self.context_status(cluster_id=cluster_id, context_id=context_id)
+            poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id)
             status = poll.status
             status_message = f'current status: {status}'
             if status in target_states:
@@ -7918,9 +7920,9 @@ def wait_context_status_command_execution_running(
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach Running, got {status}: {status_message}'
+                msg = f'failed to reach Finished or Error, got {status}: {status_message}'
                 raise OperationFailed(msg)
-            prefix = f"cluster_id={cluster_id}, context_id={context_id}"
+            prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
@@ -8515,7 +8517,8 @@ def set_permissions(
     ) -> InstancePoolPermissions:
         """Set instance pool permissions.
         
-        Sets permissions on an instance pool. Instance pools can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param instance_pool_id: str
           The instance pool for which to get or manage permissions.
diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py
index 4a4c640e6..1b02d8c89 100755
--- a/databricks/sdk/service/dashboards.py
+++ b/databricks/sdk/service/dashboards.py
@@ -20,103 +20,6 @@
 # all definitions in this file are in alphabetical order
 
 
-@dataclass
-class CreateDashboardRequest:
-    display_name: str
-    """The display name of the dashboard."""
-
-    parent_path: Optional[str] = None
-    """The workspace path of the folder containing the dashboard. Includes leading slash and no
-    trailing slash. This field is excluded in List Dashboards responses."""
-
-    serialized_dashboard: Optional[str] = None
-    """The contents of the dashboard in serialized string form. This field is excluded in List
-    Dashboards responses. Use the [get dashboard API] to retrieve an example response, which
-    includes the `serialized_dashboard` field. This field provides the structure of the JSON string
-    that represents the dashboard's layout and components.
-    
-    [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get"""
-
-    warehouse_id: Optional[str] = None
-    """The warehouse ID used to run the dashboard."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CreateDashboardRequest into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.parent_path is not None: body['parent_path'] = self.parent_path
-        if self.serialized_dashboard is not None: body['serialized_dashboard'] = self.serialized_dashboard
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateDashboardRequest:
-        """Deserializes the CreateDashboardRequest from a dictionary."""
-        return cls(display_name=d.get('display_name', None),
-                   parent_path=d.get('parent_path', None),
-                   serialized_dashboard=d.get('serialized_dashboard', None),
-                   warehouse_id=d.get('warehouse_id', None))
-
-
-@dataclass
-class CreateScheduleRequest:
-    cron_schedule: CronSchedule
-    """The cron expression describing the frequency of the periodic refresh for this schedule."""
-
-    dashboard_id: Optional[str] = None
-    """UUID identifying the dashboard to which the schedule belongs."""
-
-    display_name: Optional[str] = None
-    """The display name for schedule."""
-
-    pause_status: Optional[SchedulePauseStatus] = None
-    """The status indicates whether this schedule is paused or not."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CreateScheduleRequest into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.cron_schedule: body['cron_schedule'] = self.cron_schedule.as_dict()
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.pause_status is not None: body['pause_status'] = self.pause_status.value
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateScheduleRequest:
-        """Deserializes the CreateScheduleRequest from a dictionary."""
-        return cls(cron_schedule=_from_dict(d, 'cron_schedule', CronSchedule),
-                   dashboard_id=d.get('dashboard_id', None),
-                   display_name=d.get('display_name', None),
-                   pause_status=_enum(d, 'pause_status', SchedulePauseStatus))
-
-
-@dataclass
-class CreateSubscriptionRequest:
-    subscriber: Subscriber
-    """Subscriber details for users and destinations to be added as subscribers to the schedule."""
-
-    dashboard_id: Optional[str] = None
-    """UUID identifying the dashboard to which the subscription belongs."""
-
-    schedule_id: Optional[str] = None
-    """UUID identifying the schedule to which the subscription belongs."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CreateSubscriptionRequest into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.schedule_id is not None: body['schedule_id'] = self.schedule_id
-        if self.subscriber: body['subscriber'] = self.subscriber.as_dict()
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateSubscriptionRequest:
-        """Deserializes the CreateSubscriptionRequest from a dictionary."""
-        return cls(dashboard_id=d.get('dashboard_id', None),
-                   schedule_id=d.get('schedule_id', None),
-                   subscriber=_from_dict(d, 'subscriber', Subscriber))
-
-
 @dataclass
 class CronSchedule:
     quartz_cron_expression: str
@@ -607,6 +510,7 @@ class MessageErrorType(Enum):
     LOCAL_CONTEXT_EXCEEDED_EXCEPTION = 'LOCAL_CONTEXT_EXCEEDED_EXCEPTION'
     MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION'
     MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION'
+    NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE = 'NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE'
     NO_QUERY_TO_VISUALIZE_EXCEPTION = 'NO_QUERY_TO_VISUALIZE_EXCEPTION'
     NO_TABLES_TO_QUERY_EXCEPTION = 'NO_TABLES_TO_QUERY_EXCEPTION'
     RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION = 'RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION'
@@ -839,6 +743,9 @@ class Schedule:
     update_time: Optional[str] = None
     """A timestamp indicating when the schedule was last updated."""
 
+    warehouse_id: Optional[str] = None
+    """The warehouse id to run the dashboard with for the schedule."""
+
     def as_dict(self) -> dict:
         """Serializes the Schedule into a dictionary suitable for use as a JSON request body."""
         body = {}
@@ -850,6 +757,7 @@ def as_dict(self) -> dict:
         if self.pause_status is not None: body['pause_status'] = self.pause_status.value
         if self.schedule_id is not None: body['schedule_id'] = self.schedule_id
         if self.update_time is not None: body['update_time'] = self.update_time
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
     @classmethod
@@ -862,7 +770,8 @@ def from_dict(cls, d: Dict[str, any]) -> Schedule:
                    etag=d.get('etag', None),
                    pause_status=_enum(d, 'pause_status', SchedulePauseStatus),
                    schedule_id=d.get('schedule_id', None),
-                   update_time=d.get('update_time', None))
+                   update_time=d.get('update_time', None),
+                   warehouse_id=d.get('warehouse_id', None))
 
 
 class SchedulePauseStatus(Enum):
@@ -1032,93 +941,6 @@ def from_dict(cls, d: Dict[str, any]) -> UnpublishDashboardResponse:
         return cls()
 
 
-@dataclass
-class UpdateDashboardRequest:
-    dashboard_id: Optional[str] = None
-    """UUID identifying the dashboard."""
-
-    display_name: Optional[str] = None
-    """The display name of the dashboard."""
-
-    etag: Optional[str] = None
-    """The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard
-    has not been modified since the last read. This field is excluded in List Dashboards responses."""
-
-    serialized_dashboard: Optional[str] = None
-    """The contents of the dashboard in serialized string form. This field is excluded in List
-    Dashboards responses. Use the [get dashboard API] to retrieve an example response, which
-    includes the `serialized_dashboard` field. This field provides the structure of the JSON string
-    that represents the dashboard's layout and components.
-    
-    [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get"""
-
-    warehouse_id: Optional[str] = None
-    """The warehouse ID used to run the dashboard."""
-
-    def as_dict(self) -> dict:
-        """Serializes the UpdateDashboardRequest into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.etag is not None: body['etag'] = self.etag
-        if self.serialized_dashboard is not None: body['serialized_dashboard'] = self.serialized_dashboard
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateDashboardRequest:
-        """Deserializes the UpdateDashboardRequest from a dictionary."""
-        return cls(dashboard_id=d.get('dashboard_id', None),
-                   display_name=d.get('display_name', None),
-                   etag=d.get('etag', None),
-                   serialized_dashboard=d.get('serialized_dashboard', None),
-                   warehouse_id=d.get('warehouse_id', None))
-
-
-@dataclass
-class UpdateScheduleRequest:
-    cron_schedule: CronSchedule
-    """The cron expression describing the frequency of the periodic refresh for this schedule."""
-
-    dashboard_id: Optional[str] = None
-    """UUID identifying the dashboard to which the schedule belongs."""
-
-    display_name: Optional[str] = None
-    """The display name for schedule."""
-
-    etag: Optional[str] = None
-    """The etag for the schedule. Must be left empty on create, must be provided on updates to ensure
-    that the schedule has not been modified since the last read, and can be optionally provided on
-    delete."""
-
-    pause_status: Optional[SchedulePauseStatus] = None
-    """The status indicates whether this schedule is paused or not."""
-
-    schedule_id: Optional[str] = None
-    """UUID identifying the schedule."""
-
-    def as_dict(self) -> dict:
-        """Serializes the UpdateScheduleRequest into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.cron_schedule: body['cron_schedule'] = self.cron_schedule.as_dict()
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.etag is not None: body['etag'] = self.etag
-        if self.pause_status is not None: body['pause_status'] = self.pause_status.value
-        if self.schedule_id is not None: body['schedule_id'] = self.schedule_id
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateScheduleRequest:
-        """Deserializes the UpdateScheduleRequest from a dictionary."""
-        return cls(cron_schedule=_from_dict(d, 'cron_schedule', CronSchedule),
-                   dashboard_id=d.get('dashboard_id', None),
-                   display_name=d.get('display_name', None),
-                   etag=d.get('etag', None),
-                   pause_status=_enum(d, 'pause_status', SchedulePauseStatus),
-                   schedule_id=d.get('schedule_id', None))
-
-
 class GenieAPI:
     """Genie provides a no-code experience for business users, powered by AI/BI. Analysts set up spaces that
     business users can use to ask questions using natural language. Genie uses data registered to Unity
@@ -1313,66 +1135,31 @@ class LakeviewAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               display_name: str,
-               *,
-               parent_path: Optional[str] = None,
-               serialized_dashboard: Optional[str] = None,
-               warehouse_id: Optional[str] = None) -> Dashboard:
+    def create(self, *, dashboard: Optional[Dashboard] = None) -> Dashboard:
         """Create dashboard.
         
         Create a draft dashboard.
         
-        :param display_name: str
-          The display name of the dashboard.
-        :param parent_path: str (optional)
-          The workspace path of the folder containing the dashboard. Includes leading slash and no trailing
-          slash. This field is excluded in List Dashboards responses.
-        :param serialized_dashboard: str (optional)
-          The contents of the dashboard in serialized string form. This field is excluded in List Dashboards
-          responses. Use the [get dashboard API] to retrieve an example response, which includes the
-          `serialized_dashboard` field. This field provides the structure of the JSON string that represents
-          the dashboard's layout and components.
-          
-          [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get
-        :param warehouse_id: str (optional)
-          The warehouse ID used to run the dashboard.
+        :param dashboard: :class:`Dashboard` (optional)
         
         :returns: :class:`Dashboard`
         """
-        body = {}
-        if display_name is not None: body['display_name'] = display_name
-        if parent_path is not None: body['parent_path'] = parent_path
-        if serialized_dashboard is not None: body['serialized_dashboard'] = serialized_dashboard
-        if warehouse_id is not None: body['warehouse_id'] = warehouse_id
+        body = dashboard
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST', '/api/2.0/lakeview/dashboards', body=body, headers=headers)
         return Dashboard.from_dict(res)
 
-    def create_schedule(self,
-                        dashboard_id: str,
-                        cron_schedule: CronSchedule,
-                        *,
-                        display_name: Optional[str] = None,
-                        pause_status: Optional[SchedulePauseStatus] = None) -> Schedule:
+    def create_schedule(self, dashboard_id: str, *, schedule: Optional[Schedule] = None) -> Schedule:
         """Create dashboard schedule.
         
         :param dashboard_id: str
           UUID identifying the dashboard to which the schedule belongs.
-        :param cron_schedule: :class:`CronSchedule`
-          The cron expression describing the frequency of the periodic refresh for this schedule.
-        :param display_name: str (optional)
-          The display name for schedule.
-        :param pause_status: :class:`SchedulePauseStatus` (optional)
-          The status indicates whether this schedule is paused or not.
+        :param schedule: :class:`Schedule` (optional)
         
         :returns: :class:`Schedule`
         """
-        body = {}
-        if cron_schedule is not None: body['cron_schedule'] = cron_schedule.as_dict()
-        if display_name is not None: body['display_name'] = display_name
-        if pause_status is not None: body['pause_status'] = pause_status.value
+        body = schedule
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST',
@@ -1381,21 +1168,22 @@ def create_schedule(self,
                            headers=headers)
         return Schedule.from_dict(res)
 
-    def create_subscription(self, dashboard_id: str, schedule_id: str,
-                            subscriber: Subscriber) -> Subscription:
+    def create_subscription(self,
+                            dashboard_id: str,
+                            schedule_id: str,
+                            *,
+                            subscription: Optional[Subscription] = None) -> Subscription:
         """Create schedule subscription.
         
         :param dashboard_id: str
           UUID identifying the dashboard to which the subscription belongs.
         :param schedule_id: str
           UUID identifying the schedule to which the subscription belongs.
-        :param subscriber: :class:`Subscriber`
-          Subscriber details for users and destinations to be added as subscribers to the schedule.
+        :param subscription: :class:`Subscription` (optional)
         
         :returns: :class:`Subscription`
         """
-        body = {}
-        if subscriber is not None: body['subscriber'] = subscriber.as_dict()
+        body = subscription
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do(
@@ -1729,41 +1517,18 @@ def unpublish(self, dashboard_id: str):
 
         self._api.do('DELETE', f'/api/2.0/lakeview/dashboards/{dashboard_id}/published', headers=headers)
 
-    def update(self,
-               dashboard_id: str,
-               *,
-               display_name: Optional[str] = None,
-               etag: Optional[str] = None,
-               serialized_dashboard: Optional[str] = None,
-               warehouse_id: Optional[str] = None) -> Dashboard:
+    def update(self, dashboard_id: str, *, dashboard: Optional[Dashboard] = None) -> Dashboard:
         """Update dashboard.
         
         Update a draft dashboard.
         
         :param dashboard_id: str
           UUID identifying the dashboard.
-        :param display_name: str (optional)
-          The display name of the dashboard.
-        :param etag: str (optional)
-          The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard has
-          not been modified since the last read. This field is excluded in List Dashboards responses.
-        :param serialized_dashboard: str (optional)
-          The contents of the dashboard in serialized string form. This field is excluded in List Dashboards
-          responses. Use the [get dashboard API] to retrieve an example response, which includes the
-          `serialized_dashboard` field. This field provides the structure of the JSON string that represents
-          the dashboard's layout and components.
-          
-          [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get
-        :param warehouse_id: str (optional)
-          The warehouse ID used to run the dashboard.
+        :param dashboard: :class:`Dashboard` (optional)
         
         :returns: :class:`Dashboard`
         """
-        body = {}
-        if display_name is not None: body['display_name'] = display_name
-        if etag is not None: body['etag'] = etag
-        if serialized_dashboard is not None: body['serialized_dashboard'] = serialized_dashboard
-        if warehouse_id is not None: body['warehouse_id'] = warehouse_id
+        body = dashboard
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('PATCH',
@@ -1775,34 +1540,19 @@ def update(self,
     def update_schedule(self,
                         dashboard_id: str,
                         schedule_id: str,
-                        cron_schedule: CronSchedule,
                         *,
-                        display_name: Optional[str] = None,
-                        etag: Optional[str] = None,
-                        pause_status: Optional[SchedulePauseStatus] = None) -> Schedule:
+                        schedule: Optional[Schedule] = None) -> Schedule:
         """Update dashboard schedule.
         
         :param dashboard_id: str
           UUID identifying the dashboard to which the schedule belongs.
         :param schedule_id: str
           UUID identifying the schedule.
-        :param cron_schedule: :class:`CronSchedule`
-          The cron expression describing the frequency of the periodic refresh for this schedule.
-        :param display_name: str (optional)
-          The display name for schedule.
-        :param etag: str (optional)
-          The etag for the schedule. Must be left empty on create, must be provided on updates to ensure that
-          the schedule has not been modified since the last read, and can be optionally provided on delete.
-        :param pause_status: :class:`SchedulePauseStatus` (optional)
-          The status indicates whether this schedule is paused or not.
+        :param schedule: :class:`Schedule` (optional)
         
         :returns: :class:`Schedule`
         """
-        body = {}
-        if cron_schedule is not None: body['cron_schedule'] = cron_schedule.as_dict()
-        if display_name is not None: body['display_name'] = display_name
-        if etag is not None: body['etag'] = etag
-        if pause_status is not None: body['pause_status'] = pause_status.value
+        body = schedule
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('PUT',
diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py
index f1c56a1a9..05d1ccce3 100755
--- a/databricks/sdk/service/iam.py
+++ b/databricks/sdk/service/iam.py
@@ -2643,7 +2643,8 @@ def set(self,
             access_control_list: Optional[List[AccessControlRequest]] = None) -> ObjectPermissions:
         """Set object permissions.
         
-        Sets permissions on an object. Objects can inherit permissions from their parent objects or root
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their parent objects or root
         object.
         
         :param request_object_type: str
@@ -3205,7 +3206,8 @@ def set_permissions(
             access_control_list: Optional[List[PasswordAccessControlRequest]] = None) -> PasswordPermissions:
         """Set password permissions.
         
-        Sets permissions on all passwords. Passwords can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional)
         
diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index a4f138d6b..82d3bac65 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -2482,8 +2482,9 @@ class RepairRun:
     be specified in conjunction with notebook_params. The JSON representation of this field (for
     example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
     
-    Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
-    information about job runs."""
+    Use [Task parameter variables] to set parameters containing information about job runs.
+    
+    [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
 
     job_parameters: Optional[Dict[str, str]] = None
     """Job-level parameters used in the run. for example `"param": "overriding_val"`"""
@@ -2916,9 +2917,6 @@ class Run:
     overriding_parameters: Optional[RunParameters] = None
     """The parameters used for this run."""
 
-    prev_page_token: Optional[str] = None
-    """A token that can be used to list the previous page of sub-resources."""
-
     queue_duration: Optional[int] = None
     """The time in milliseconds that the run has spent in the queue."""
 
@@ -3005,7 +3003,6 @@ def as_dict(self) -> dict:
         if self.original_attempt_run_id is not None:
             body['original_attempt_run_id'] = self.original_attempt_run_id
         if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters.as_dict()
-        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
         if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
         if self.repair_history: body['repair_history'] = [v.as_dict() for v in self.repair_history]
         if self.run_duration is not None: body['run_duration'] = self.run_duration
@@ -3044,7 +3041,6 @@ def from_dict(cls, d: Dict[str, any]) -> Run:
                    number_in_job=d.get('number_in_job', None),
                    original_attempt_run_id=d.get('original_attempt_run_id', None),
                    overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters),
-                   prev_page_token=d.get('prev_page_token', None),
                    queue_duration=d.get('queue_duration', None),
                    repair_history=_repeated_dict(d, 'repair_history', RepairHistoryItem),
                    run_duration=d.get('run_duration', None),
@@ -3190,8 +3186,9 @@ class RunJobTask:
     be specified in conjunction with notebook_params. The JSON representation of this field (for
     example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
     
-    Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
-    information about job runs."""
+    Use [Task parameter variables] to set parameters containing information about job runs.
+    
+    [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
 
     job_parameters: Optional[Dict[str, str]] = None
     """Job-level parameters used to trigger the job."""
@@ -3350,8 +3347,9 @@ class RunNow:
     be specified in conjunction with notebook_params. The JSON representation of this field (for
     example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
     
-    Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
-    information about job runs."""
+    Use [Task parameter variables] to set parameters containing information about job runs.
+    
+    [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
 
     job_parameters: Optional[Dict[str, str]] = None
     """Job-level parameters used in the run. for example `"param": "overriding_val"`"""
@@ -3563,8 +3561,9 @@ class RunParameters:
     be specified in conjunction with notebook_params. The JSON representation of this field (for
     example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
     
-    Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
-    information about job runs."""
+    Use [Task parameter variables] to set parameters containing information about job runs.
+    
+    [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
 
     notebook_params: Optional[Dict[str, str]] = None
     """A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
@@ -3774,13 +3773,13 @@ class RunTask:
     once the Jobs service has requested a cluster for the run."""
 
     condition_task: Optional[RunConditionTask] = None
-    """If condition_task, specifies a condition with an outcome that can be used to control the
-    execution of other tasks. Does not require a cluster to execute and does not support retries or
-    notifications."""
+    """The task evaluates a condition that can be used to control the execution of other tasks when the
+    `condition_task` field is present. The condition task does not require a cluster to execute and
+    does not support retries or notifications."""
 
     dbt_task: Optional[DbtTask] = None
-    """If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and
-    the ability to use a serverless or a pro SQL warehouse."""
+    """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task
+    requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse."""
 
     depends_on: Optional[List[TaskDependency]] = None
     """An optional array of objects specifying the dependency graph of the task. All tasks specified in
@@ -3815,7 +3814,8 @@ class RunTask:
     responding. We suggest running jobs and tasks on new clusters for greater reliability"""
 
     for_each_task: Optional[RunForEachTask] = None
-    """If for_each_task, indicates that this task must execute the nested task within it."""
+    """The task executes a nested task for every input provided when the `for_each_task` field is
+    present."""
 
     git_source: Optional[GitSource] = None
     """An optional specification for a remote Git repository containing the source code used by tasks.
@@ -3837,18 +3837,18 @@ class RunTask:
     """If new_cluster, a description of a new cluster that is created for each run."""
 
     notebook_task: Optional[NotebookTask] = None
-    """If notebook_task, indicates that this task must run a notebook. This field may not be specified
-    in conjunction with spark_jar_task."""
+    """The task runs a notebook when the `notebook_task` field is present."""
 
     notification_settings: Optional[TaskNotificationSettings] = None
     """Optional notification settings that are used when sending notifications to each of the
     `email_notifications` and `webhook_notifications` for this task run."""
 
     pipeline_task: Optional[PipelineTask] = None
-    """If pipeline_task, indicates that this task must execute a Pipeline."""
+    """The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines
+    configured to use triggered more are supported."""
 
     python_wheel_task: Optional[PythonWheelTask] = None
-    """If python_wheel_task, indicates that this job must execute a PythonWheel."""
+    """The task runs a Python wheel when the `python_wheel_task` field is present."""
 
     queue_duration: Optional[int] = None
     """The time in milliseconds that the run has spent in the queue."""
@@ -3868,7 +3868,7 @@ class RunTask:
     :method:jobs/create for a list of possible values."""
 
     run_job_task: Optional[RunJobTask] = None
-    """If run_job_task, indicates that this task must execute another job."""
+    """The task triggers another job when the `run_job_task` field is present."""
 
     run_page_url: Optional[str] = None
 
@@ -3880,14 +3880,14 @@ class RunTask:
     duration of a multitask job run is the value of the `run_duration` field."""
 
     spark_jar_task: Optional[SparkJarTask] = None
-    """If spark_jar_task, indicates that this task must run a JAR."""
+    """The task runs a JAR when the `spark_jar_task` field is present."""
 
     spark_python_task: Optional[SparkPythonTask] = None
-    """If spark_python_task, indicates that this task must run a Python file."""
+    """The task runs a Python file when the `spark_python_task` field is present."""
 
     spark_submit_task: Optional[SparkSubmitTask] = None
-    """If `spark_submit_task`, indicates that this task must be launched by the spark submit script.
-    This task can run only on new clusters.
+    """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
+    This task can run only on new clusters and is not compatible with serverless compute.
     
     In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
     `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
@@ -3903,7 +3903,8 @@ class RunTask:
     The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
 
     sql_task: Optional[SqlTask] = None
-    """If sql_task, indicates that this job must execute a SQL task."""
+    """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
+    the `sql_task` field is present."""
 
     start_time: Optional[int] = None
     """The time at which this run was started in epoch milliseconds (milliseconds since 1/1/1970 UTC).
@@ -4664,13 +4665,13 @@ class SubmitTask:
     used to reference the tasks to be updated or reset."""
 
     condition_task: Optional[ConditionTask] = None
-    """If condition_task, specifies a condition with an outcome that can be used to control the
-    execution of other tasks. Does not require a cluster to execute and does not support retries or
-    notifications."""
+    """The task evaluates a condition that can be used to control the execution of other tasks when the
+    `condition_task` field is present. The condition task does not require a cluster to execute and
+    does not support retries or notifications."""
 
     dbt_task: Optional[DbtTask] = None
-    """If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and
-    the ability to use a serverless or a pro SQL warehouse."""
+    """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task
+    requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse."""
 
     depends_on: Optional[List[TaskDependency]] = None
     """An optional array of objects specifying the dependency graph of the task. All tasks specified in
@@ -4694,7 +4695,8 @@ class SubmitTask:
     responding. We suggest running jobs and tasks on new clusters for greater reliability"""
 
     for_each_task: Optional[ForEachTask] = None
-    """If for_each_task, indicates that this task must execute the nested task within it."""
+    """The task executes a nested task for every input provided when the `for_each_task` field is
+    present."""
 
     health: Optional[JobsHealthRules] = None
     """An optional set of health rules that can be defined for this job."""
@@ -4707,18 +4709,18 @@ class SubmitTask:
     """If new_cluster, a description of a new cluster that is created for each run."""
 
     notebook_task: Optional[NotebookTask] = None
-    """If notebook_task, indicates that this task must run a notebook. This field may not be specified
-    in conjunction with spark_jar_task."""
+    """The task runs a notebook when the `notebook_task` field is present."""
 
     notification_settings: Optional[TaskNotificationSettings] = None
     """Optional notification settings that are used when sending notifications to each of the
     `email_notifications` and `webhook_notifications` for this task run."""
 
     pipeline_task: Optional[PipelineTask] = None
-    """If pipeline_task, indicates that this task must execute a Pipeline."""
+    """The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines
+    configured to use triggered more are supported."""
 
     python_wheel_task: Optional[PythonWheelTask] = None
-    """If python_wheel_task, indicates that this job must execute a PythonWheel."""
+    """The task runs a Python wheel when the `python_wheel_task` field is present."""
 
     run_if: Optional[RunIf] = None
     """An optional value indicating the condition that determines whether the task should be run once
@@ -4726,17 +4728,17 @@ class SubmitTask:
     :method:jobs/create for a list of possible values."""
 
     run_job_task: Optional[RunJobTask] = None
-    """If run_job_task, indicates that this task must execute another job."""
+    """The task triggers another job when the `run_job_task` field is present."""
 
     spark_jar_task: Optional[SparkJarTask] = None
-    """If spark_jar_task, indicates that this task must run a JAR."""
+    """The task runs a JAR when the `spark_jar_task` field is present."""
 
     spark_python_task: Optional[SparkPythonTask] = None
-    """If spark_python_task, indicates that this task must run a Python file."""
+    """The task runs a Python file when the `spark_python_task` field is present."""
 
     spark_submit_task: Optional[SparkSubmitTask] = None
-    """If `spark_submit_task`, indicates that this task must be launched by the spark submit script.
-    This task can run only on new clusters.
+    """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
+    This task can run only on new clusters and is not compatible with serverless compute.
     
     In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
     `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
@@ -4752,7 +4754,8 @@ class SubmitTask:
     The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
 
     sql_task: Optional[SqlTask] = None
-    """If sql_task, indicates that this job must execute a SQL task."""
+    """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
+    the `sql_task` field is present."""
 
     timeout_seconds: Optional[int] = None
     """An optional timeout applied to each run of this job task. A value of `0` means no timeout."""
@@ -4866,13 +4869,13 @@ class Task:
     used to reference the tasks to be updated or reset."""
 
     condition_task: Optional[ConditionTask] = None
-    """If condition_task, specifies a condition with an outcome that can be used to control the
-    execution of other tasks. Does not require a cluster to execute and does not support retries or
-    notifications."""
+    """The task evaluates a condition that can be used to control the execution of other tasks when the
+    `condition_task` field is present. The condition task does not require a cluster to execute and
+    does not support retries or notifications."""
 
     dbt_task: Optional[DbtTask] = None
-    """If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and
-    the ability to use a serverless or a pro SQL warehouse."""
+    """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task
+    requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse."""
 
     depends_on: Optional[List[TaskDependency]] = None
     """An optional array of objects specifying the dependency graph of the task. All tasks specified in
@@ -4900,7 +4903,8 @@ class Task:
     responding. We suggest running jobs and tasks on new clusters for greater reliability"""
 
     for_each_task: Optional[ForEachTask] = None
-    """If for_each_task, indicates that this task must execute the nested task within it."""
+    """The task executes a nested task for every input provided when the `for_each_task` field is
+    present."""
 
     health: Optional[JobsHealthRules] = None
     """An optional set of health rules that can be defined for this job."""
@@ -4927,18 +4931,18 @@ class Task:
     """If new_cluster, a description of a new cluster that is created for each run."""
 
     notebook_task: Optional[NotebookTask] = None
-    """If notebook_task, indicates that this task must run a notebook. This field may not be specified
-    in conjunction with spark_jar_task."""
+    """The task runs a notebook when the `notebook_task` field is present."""
 
     notification_settings: Optional[TaskNotificationSettings] = None
     """Optional notification settings that are used when sending notifications to each of the
     `email_notifications` and `webhook_notifications` for this task."""
 
     pipeline_task: Optional[PipelineTask] = None
-    """If pipeline_task, indicates that this task must execute a Pipeline."""
+    """The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines
+    configured to use triggered more are supported."""
 
     python_wheel_task: Optional[PythonWheelTask] = None
-    """If python_wheel_task, indicates that this job must execute a PythonWheel."""
+    """The task runs a Python wheel when the `python_wheel_task` field is present."""
 
     retry_on_timeout: Optional[bool] = None
     """An optional policy to specify whether to retry a job when it times out. The default behavior is
@@ -4954,17 +4958,17 @@ class Task:
     least one dependency failed * `ALL_FAILED`: ALl dependencies have failed"""
 
     run_job_task: Optional[RunJobTask] = None
-    """If run_job_task, indicates that this task must execute another job."""
+    """The task triggers another job when the `run_job_task` field is present."""
 
     spark_jar_task: Optional[SparkJarTask] = None
-    """If spark_jar_task, indicates that this task must run a JAR."""
+    """The task runs a JAR when the `spark_jar_task` field is present."""
 
     spark_python_task: Optional[SparkPythonTask] = None
-    """If spark_python_task, indicates that this task must run a Python file."""
+    """The task runs a Python file when the `spark_python_task` field is present."""
 
     spark_submit_task: Optional[SparkSubmitTask] = None
-    """If `spark_submit_task`, indicates that this task must be launched by the spark submit script.
-    This task can run only on new clusters.
+    """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
+    This task can run only on new clusters and is not compatible with serverless compute.
     
     In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
     `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
@@ -4980,7 +4984,8 @@ class Task:
     The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
 
     sql_task: Optional[SqlTask] = None
-    """If sql_task, indicates that this job must execute a SQL task."""
+    """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
+    the `sql_task` field is present."""
 
     timeout_seconds: Optional[int] = None
     """An optional timeout applied to each run of this job task. A value of `0` means no timeout."""
@@ -5922,8 +5927,8 @@ def get_run(self,
         :param include_resolved_values: bool (optional)
           Whether to include resolved parameter values in the response.
         :param page_token: str (optional)
-          To list the next page or the previous page of job tasks, set this field to the value of the
-          `next_page_token` or `prev_page_token` returned in the GetJob response.
+          To list the next page of job tasks, set this field to the value of the `next_page_token` returned in
+          the GetJob response.
         
         :returns: :class:`Run`
         """
@@ -6111,8 +6116,9 @@ def repair_run(self,
           in conjunction with notebook_params. The JSON representation of this field (for example
           `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
           
-          Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
-          information about job runs.
+          Use [Task parameter variables] to set parameters containing information about job runs.
+          
+          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
         :param job_parameters: Dict[str,str] (optional)
           Job-level parameters used in the run. for example `"param": "overriding_val"`
         :param latest_repair_id: int (optional)
@@ -6304,8 +6310,9 @@ def run_now(self,
           in conjunction with notebook_params. The JSON representation of this field (for example
           `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
           
-          Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
-          information about job runs.
+          Use [Task parameter variables] to set parameters containing information about job runs.
+          
+          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
         :param job_parameters: Dict[str,str] (optional)
           Job-level parameters used in the run. for example `"param": "overriding_val"`
         :param notebook_params: Dict[str,str] (optional)
@@ -6423,7 +6430,8 @@ def set_permissions(
             access_control_list: Optional[List[JobAccessControlRequest]] = None) -> JobPermissions:
         """Set job permissions.
         
-        Sets permissions on a job. Jobs can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param job_id: str
           The job for which to get or manage permissions.
diff --git a/databricks/sdk/service/marketplace.py b/databricks/sdk/service/marketplace.py
index 1a2dedf31..242e3bf0c 100755
--- a/databricks/sdk/service/marketplace.py
+++ b/databricks/sdk/service/marketplace.py
@@ -56,6 +56,7 @@ class AssetType(Enum):
     ASSET_TYPE_MEDIA = 'ASSET_TYPE_MEDIA'
     ASSET_TYPE_MODEL = 'ASSET_TYPE_MODEL'
     ASSET_TYPE_NOTEBOOK = 'ASSET_TYPE_NOTEBOOK'
+    ASSET_TYPE_PARTNER_INTEGRATION = 'ASSET_TYPE_PARTNER_INTEGRATION'
 
 
 @dataclass
diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py
index b2cec8126..c44edbe48 100755
--- a/databricks/sdk/service/ml.py
+++ b/databricks/sdk/service/ml.py
@@ -4596,7 +4596,8 @@ def set_permissions(
     ) -> ExperimentPermissions:
         """Set experiment permissions.
         
-        Sets permissions on an experiment. Experiments can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param experiment_id: str
           The experiment for which to get or manage permissions.
@@ -5571,8 +5572,8 @@ def set_permissions(
     ) -> RegisteredModelPermissions:
         """Set registered model permissions.
         
-        Sets permissions on a registered model. Registered models can inherit permissions from their root
-        object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param registered_model_id: str
           The registered model for which to get or manage permissions.
diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py
index 0c439ae7e..01edcdf50 100755
--- a/databricks/sdk/service/oauth2.py
+++ b/databricks/sdk/service/oauth2.py
@@ -389,19 +389,24 @@ def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppsOutput:
 
 @dataclass
 class ListServicePrincipalSecretsResponse:
+    next_page_token: Optional[str] = None
+    """A token, which can be sent as `page_token` to retrieve the next page."""
+
     secrets: Optional[List[SecretInfo]] = None
     """List of the secrets"""
 
     def as_dict(self) -> dict:
         """Serializes the ListServicePrincipalSecretsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         if self.secrets: body['secrets'] = [v.as_dict() for v in self.secrets]
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListServicePrincipalSecretsResponse:
         """Deserializes the ListServicePrincipalSecretsResponse from a dictionary."""
-        return cls(secrets=_repeated_dict(d, 'secrets', SecretInfo))
+        return cls(next_page_token=d.get('next_page_token', None),
+                   secrets=_repeated_dict(d, 'secrets', SecretInfo))
 
 
 @dataclass
@@ -960,7 +965,7 @@ def delete(self, service_principal_id: int, secret_id: str):
             f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets/{secret_id}',
             headers=headers)
 
-    def list(self, service_principal_id: int) -> Iterator[SecretInfo]:
+    def list(self, service_principal_id: int, *, page_token: Optional[str] = None) -> Iterator[SecretInfo]:
         """List service principal secrets.
         
         List all secrets associated with the given service principal. This operation only returns information
@@ -968,15 +973,30 @@ def list(self, service_principal_id: int) -> Iterator[SecretInfo]:
         
         :param service_principal_id: int
           The service principal ID.
+        :param page_token: str (optional)
+          An opaque page token which was the `next_page_token` in the response of the previous request to list
+          the secrets for this service principal. Provide this token to retrieve the next page of secret
+          entries. When providing a `page_token`, all other parameters provided to the request must match the
+          previous request. To list all of the secrets for a service principal, it is necessary to continue
+          requesting pages of entries until the response contains no `next_page_token`. Note that the number
+          of entries returned must not be used to determine when the listing is complete.
         
         :returns: Iterator over :class:`SecretInfo`
         """
 
+        query = {}
+        if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
-        json = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets',
-            headers=headers)
-        parsed = ListServicePrincipalSecretsResponse.from_dict(json).secrets
-        return parsed if parsed is not None else []
+        while True:
+            json = self._api.do(
+                'GET',
+                f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets',
+                query=query,
+                headers=headers)
+            if 'secrets' in json:
+                for v in json['secrets']:
+                    yield SecretInfo.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py
index 9c12f8788..26461d088 100755
--- a/databricks/sdk/service/pipelines.py
+++ b/databricks/sdk/service/pipelines.py
@@ -615,6 +615,10 @@ def from_dict(cls, d: Dict[str, any]) -> IngestionConfig:
 @dataclass
 class IngestionGatewayPipelineDefinition:
     connection_id: Optional[str] = None
+    """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection this gateway
+    pipeline uses to communicate with the source."""
+
+    connection_name: Optional[str] = None
     """Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the
     source."""
 
@@ -633,6 +637,7 @@ def as_dict(self) -> dict:
         """Serializes the IngestionGatewayPipelineDefinition into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.connection_id is not None: body['connection_id'] = self.connection_id
+        if self.connection_name is not None: body['connection_name'] = self.connection_name
         if self.gateway_storage_catalog is not None:
             body['gateway_storage_catalog'] = self.gateway_storage_catalog
         if self.gateway_storage_name is not None: body['gateway_storage_name'] = self.gateway_storage_name
@@ -644,6 +649,7 @@ def as_dict(self) -> dict:
     def from_dict(cls, d: Dict[str, any]) -> IngestionGatewayPipelineDefinition:
         """Deserializes the IngestionGatewayPipelineDefinition from a dictionary."""
         return cls(connection_id=d.get('connection_id', None),
+                   connection_name=d.get('connection_name', None),
                    gateway_storage_catalog=d.get('gateway_storage_catalog', None),
                    gateway_storage_name=d.get('gateway_storage_name', None),
                    gateway_storage_schema=d.get('gateway_storage_schema', None))
@@ -2122,13 +2128,13 @@ class PipelinesAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def wait_get_pipeline_idle(
+    def wait_get_pipeline_running(
             self,
             pipeline_id: str,
             timeout=timedelta(minutes=20),
             callback: Optional[Callable[[GetPipelineResponse], None]] = None) -> GetPipelineResponse:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (PipelineState.IDLE, )
+        target_states = (PipelineState.RUNNING, )
         failure_states = (PipelineState.FAILED, )
         status_message = 'polling...'
         attempt = 1
@@ -2141,7 +2147,7 @@ def wait_get_pipeline_idle(
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach IDLE, got {status}: {status_message}'
+                msg = f'failed to reach RUNNING, got {status}: {status_message}'
                 raise OperationFailed(msg)
             prefix = f"pipeline_id={pipeline_id}"
             sleep = attempt
@@ -2153,13 +2159,13 @@ def wait_get_pipeline_idle(
             attempt += 1
         raise TimeoutError(f'timed out after {timeout}: {status_message}')
 
-    def wait_get_pipeline_running(
+    def wait_get_pipeline_idle(
             self,
             pipeline_id: str,
             timeout=timedelta(minutes=20),
             callback: Optional[Callable[[GetPipelineResponse], None]] = None) -> GetPipelineResponse:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (PipelineState.RUNNING, )
+        target_states = (PipelineState.IDLE, )
         failure_states = (PipelineState.FAILED, )
         status_message = 'polling...'
         attempt = 1
@@ -2172,7 +2178,7 @@ def wait_get_pipeline_running(
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach RUNNING, got {status}: {status_message}'
+                msg = f'failed to reach IDLE, got {status}: {status_message}'
                 raise OperationFailed(msg)
             prefix = f"pipeline_id={pipeline_id}"
             sleep = attempt
@@ -2518,7 +2524,8 @@ def set_permissions(
             access_control_list: Optional[List[PipelineAccessControlRequest]] = None) -> PipelinePermissions:
         """Set pipeline permissions.
         
-        Sets permissions on a pipeline. Pipelines can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param pipeline_id: str
           The pipeline for which to get or manage permissions.
diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py
index 1dc6f3b8d..b1d825d1a 100755
--- a/databricks/sdk/service/provisioning.py
+++ b/databricks/sdk/service/provisioning.py
@@ -412,6 +412,9 @@ class CreateWorkspaceRequest:
     gke_config: Optional[GkeConfig] = None
     """The configurations for the GKE cluster of a Databricks workspace."""
 
+    is_no_public_ip_enabled: Optional[bool] = None
+    """Whether no public IP is enabled for the workspace."""
+
     location: Optional[str] = None
     """The Google Cloud region of the workspace data plane in your Google account. For example,
     `us-east4`."""
@@ -460,6 +463,8 @@ def as_dict(self) -> dict:
         if self.gcp_managed_network_config:
             body['gcp_managed_network_config'] = self.gcp_managed_network_config.as_dict()
         if self.gke_config: body['gke_config'] = self.gke_config.as_dict()
+        if self.is_no_public_ip_enabled is not None:
+            body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled
         if self.location is not None: body['location'] = self.location
         if self.managed_services_customer_managed_key_id is not None:
             body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id
@@ -486,6 +491,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateWorkspaceRequest:
                    gcp_managed_network_config=_from_dict(d, 'gcp_managed_network_config',
                                                          GcpManagedNetworkConfig),
                    gke_config=_from_dict(d, 'gke_config', GkeConfig),
+                   is_no_public_ip_enabled=d.get('is_no_public_ip_enabled', None),
                    location=d.get('location', None),
                    managed_services_customer_managed_key_id=d.get('managed_services_customer_managed_key_id',
                                                                   None),
@@ -632,6 +638,35 @@ class ErrorType(Enum):
     VPC = 'vpc'
 
 
+@dataclass
+class ExternalCustomerInfo:
+    authoritative_user_email: Optional[str] = None
+    """Email of the authoritative user."""
+
+    authoritative_user_full_name: Optional[str] = None
+    """The authoritative user full name."""
+
+    customer_name: Optional[str] = None
+    """The legal entity name for the external workspace"""
+
+    def as_dict(self) -> dict:
+        """Serializes the ExternalCustomerInfo into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.authoritative_user_email is not None:
+            body['authoritative_user_email'] = self.authoritative_user_email
+        if self.authoritative_user_full_name is not None:
+            body['authoritative_user_full_name'] = self.authoritative_user_full_name
+        if self.customer_name is not None: body['customer_name'] = self.customer_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ExternalCustomerInfo:
+        """Deserializes the ExternalCustomerInfo from a dictionary."""
+        return cls(authoritative_user_email=d.get('authoritative_user_email', None),
+                   authoritative_user_full_name=d.get('authoritative_user_full_name', None),
+                   customer_name=d.get('customer_name', None))
+
+
 @dataclass
 class GcpKeyInfo:
     kms_key_id: str
@@ -1443,6 +1478,10 @@ class Workspace:
     
     This value must be unique across all non-deleted deployments across all AWS regions."""
 
+    external_customer_info: Optional[ExternalCustomerInfo] = None
+    """If this workspace is for a external customer, then external_customer_info is populated. If this
+    workspace is not for a external customer, then external_customer_info is empty."""
+
     gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None
     """The network settings for the workspace. The configurations are only for Databricks-managed VPCs.
     It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP
@@ -1466,6 +1505,9 @@ class Workspace:
     gke_config: Optional[GkeConfig] = None
     """The configurations for the GKE cluster of a Databricks workspace."""
 
+    is_no_public_ip_enabled: Optional[bool] = None
+    """Whether no public IP is enabled for the workspace."""
+
     location: Optional[str] = None
     """The Google Cloud region of the workspace data plane in your Google account (for example,
     `us-east4`)."""
@@ -1524,9 +1566,12 @@ def as_dict(self) -> dict:
         if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
         if self.custom_tags: body['custom_tags'] = self.custom_tags
         if self.deployment_name is not None: body['deployment_name'] = self.deployment_name
+        if self.external_customer_info: body['external_customer_info'] = self.external_customer_info.as_dict()
         if self.gcp_managed_network_config:
             body['gcp_managed_network_config'] = self.gcp_managed_network_config.as_dict()
         if self.gke_config: body['gke_config'] = self.gke_config.as_dict()
+        if self.is_no_public_ip_enabled is not None:
+            body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled
         if self.location is not None: body['location'] = self.location
         if self.managed_services_customer_managed_key_id is not None:
             body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id
@@ -1557,9 +1602,11 @@ def from_dict(cls, d: Dict[str, any]) -> Workspace:
                    credentials_id=d.get('credentials_id', None),
                    custom_tags=d.get('custom_tags', None),
                    deployment_name=d.get('deployment_name', None),
+                   external_customer_info=_from_dict(d, 'external_customer_info', ExternalCustomerInfo),
                    gcp_managed_network_config=_from_dict(d, 'gcp_managed_network_config',
                                                          GcpManagedNetworkConfig),
                    gke_config=_from_dict(d, 'gke_config', GkeConfig),
+                   is_no_public_ip_enabled=d.get('is_no_public_ip_enabled', None),
                    location=d.get('location', None),
                    managed_services_customer_managed_key_id=d.get('managed_services_customer_managed_key_id',
                                                                   None),
@@ -2399,6 +2446,7 @@ def create(self,
                deployment_name: Optional[str] = None,
                gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None,
                gke_config: Optional[GkeConfig] = None,
+               is_no_public_ip_enabled: Optional[bool] = None,
                location: Optional[str] = None,
                managed_services_customer_managed_key_id: Optional[str] = None,
                network_id: Optional[str] = None,
@@ -2477,6 +2525,8 @@ def create(self,
           [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html
         :param gke_config: :class:`GkeConfig` (optional)
           The configurations for the GKE cluster of a Databricks workspace.
+        :param is_no_public_ip_enabled: bool (optional)
+          Whether no public IP is enabled for the workspace.
         :param location: str (optional)
           The Google Cloud region of the workspace data plane in your Google account. For example, `us-east4`.
         :param managed_services_customer_managed_key_id: str (optional)
@@ -2519,6 +2569,7 @@ def create(self,
         if gcp_managed_network_config is not None:
             body['gcp_managed_network_config'] = gcp_managed_network_config.as_dict()
         if gke_config is not None: body['gke_config'] = gke_config.as_dict()
+        if is_no_public_ip_enabled is not None: body['is_no_public_ip_enabled'] = is_no_public_ip_enabled
         if location is not None: body['location'] = location
         if managed_services_customer_managed_key_id is not None:
             body['managed_services_customer_managed_key_id'] = managed_services_customer_managed_key_id
@@ -2552,6 +2603,7 @@ def create_and_wait(
         deployment_name: Optional[str] = None,
         gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None,
         gke_config: Optional[GkeConfig] = None,
+        is_no_public_ip_enabled: Optional[bool] = None,
         location: Optional[str] = None,
         managed_services_customer_managed_key_id: Optional[str] = None,
         network_id: Optional[str] = None,
@@ -2568,6 +2620,7 @@ def create_and_wait(
                            deployment_name=deployment_name,
                            gcp_managed_network_config=gcp_managed_network_config,
                            gke_config=gke_config,
+                           is_no_public_ip_enabled=is_no_public_ip_enabled,
                            location=location,
                            managed_services_customer_managed_key_id=managed_services_customer_managed_key_id,
                            network_id=network_id,
diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py
index 7639d96fb..b00420a08 100755
--- a/databricks/sdk/service/serving.py
+++ b/databricks/sdk/service/serving.py
@@ -2994,8 +2994,8 @@ def set_permissions(
     ) -> ServingEndpointPermissions:
         """Set serving endpoint permissions.
         
-        Sets permissions on a serving endpoint. Serving endpoints can inherit permissions from their root
-        object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param serving_endpoint_id: str
           The serving endpoint for which to get or manage permissions.
diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py
index a6a235158..607cc3085 100755
--- a/databricks/sdk/service/settings.py
+++ b/databricks/sdk/service/settings.py
@@ -14,6 +14,122 @@
 # all definitions in this file are in alphabetical order
 
 
+@dataclass
+class AibiDashboardEmbeddingAccessPolicy:
+    access_policy_type: AibiDashboardEmbeddingAccessPolicyAccessPolicyType
+
+    def as_dict(self) -> dict:
+        """Serializes the AibiDashboardEmbeddingAccessPolicy into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.access_policy_type is not None: body['access_policy_type'] = self.access_policy_type.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingAccessPolicy:
+        """Deserializes the AibiDashboardEmbeddingAccessPolicy from a dictionary."""
+        return cls(access_policy_type=_enum(d, 'access_policy_type',
+                                            AibiDashboardEmbeddingAccessPolicyAccessPolicyType))
+
+
+class AibiDashboardEmbeddingAccessPolicyAccessPolicyType(Enum):
+
+    ALLOW_ALL_DOMAINS = 'ALLOW_ALL_DOMAINS'
+    ALLOW_APPROVED_DOMAINS = 'ALLOW_APPROVED_DOMAINS'
+    DENY_ALL_DOMAINS = 'DENY_ALL_DOMAINS'
+
+
+@dataclass
+class AibiDashboardEmbeddingAccessPolicySetting:
+    aibi_dashboard_embedding_access_policy: AibiDashboardEmbeddingAccessPolicy
+
+    etag: Optional[str] = None
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+    etag from a GET request, and pass it with the PATCH request to identify the setting version you
+    are updating."""
+
+    setting_name: Optional[str] = None
+    """Name of the corresponding setting. This field is populated in the response, but it will not be
+    respected even if it's set in the request body. The setting name in the path parameter will be
+    respected instead. Setting name is required to be 'default' if the setting only has one instance
+    per workspace."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AibiDashboardEmbeddingAccessPolicySetting into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.aibi_dashboard_embedding_access_policy:
+            body[
+                'aibi_dashboard_embedding_access_policy'] = self.aibi_dashboard_embedding_access_policy.as_dict(
+                )
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingAccessPolicySetting:
+        """Deserializes the AibiDashboardEmbeddingAccessPolicySetting from a dictionary."""
+        return cls(aibi_dashboard_embedding_access_policy=_from_dict(
+            d, 'aibi_dashboard_embedding_access_policy', AibiDashboardEmbeddingAccessPolicy),
+                   etag=d.get('etag', None),
+                   setting_name=d.get('setting_name', None))
+
+
+@dataclass
+class AibiDashboardEmbeddingApprovedDomains:
+    approved_domains: Optional[List[str]] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the AibiDashboardEmbeddingApprovedDomains into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.approved_domains: body['approved_domains'] = [v for v in self.approved_domains]
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingApprovedDomains:
+        """Deserializes the AibiDashboardEmbeddingApprovedDomains from a dictionary."""
+        return cls(approved_domains=d.get('approved_domains', None))
+
+
+@dataclass
+class AibiDashboardEmbeddingApprovedDomainsSetting:
+    aibi_dashboard_embedding_approved_domains: AibiDashboardEmbeddingApprovedDomains
+
+    etag: Optional[str] = None
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+    etag from a GET request, and pass it with the PATCH request to identify the setting version you
+    are updating."""
+
+    setting_name: Optional[str] = None
+    """Name of the corresponding setting. This field is populated in the response, but it will not be
+    respected even if it's set in the request body. The setting name in the path parameter will be
+    respected instead. Setting name is required to be 'default' if the setting only has one instance
+    per workspace."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AibiDashboardEmbeddingApprovedDomainsSetting into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.aibi_dashboard_embedding_approved_domains:
+            body[
+                'aibi_dashboard_embedding_approved_domains'] = self.aibi_dashboard_embedding_approved_domains.as_dict(
+                )
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingApprovedDomainsSetting:
+        """Deserializes the AibiDashboardEmbeddingApprovedDomainsSetting from a dictionary."""
+        return cls(aibi_dashboard_embedding_approved_domains=_from_dict(
+            d, 'aibi_dashboard_embedding_approved_domains', AibiDashboardEmbeddingApprovedDomains),
+                   etag=d.get('etag', None),
+                   setting_name=d.get('setting_name', None))
+
+
 @dataclass
 class AutomaticClusterUpdateSetting:
     automatic_cluster_update_workspace: ClusterAutoRestartMessage
@@ -2299,6 +2415,9 @@ class TokenInfo:
     expiry_time: Optional[int] = None
     """Timestamp when the token expires."""
 
+    last_used_day: Optional[int] = None
+    """Approximate timestamp for the day the token was last used. Accurate up to 1 day."""
+
     owner_id: Optional[int] = None
     """User ID of the user that owns the token."""
 
@@ -2316,6 +2435,7 @@ def as_dict(self) -> dict:
         if self.created_by_username is not None: body['created_by_username'] = self.created_by_username
         if self.creation_time is not None: body['creation_time'] = self.creation_time
         if self.expiry_time is not None: body['expiry_time'] = self.expiry_time
+        if self.last_used_day is not None: body['last_used_day'] = self.last_used_day
         if self.owner_id is not None: body['owner_id'] = self.owner_id
         if self.token_id is not None: body['token_id'] = self.token_id
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
@@ -2329,6 +2449,7 @@ def from_dict(cls, d: Dict[str, any]) -> TokenInfo:
                    created_by_username=d.get('created_by_username', None),
                    creation_time=d.get('creation_time', None),
                    expiry_time=d.get('expiry_time', None),
+                   last_used_day=d.get('last_used_day', None),
                    owner_id=d.get('owner_id', None),
                    token_id=d.get('token_id', None),
                    workspace_id=d.get('workspace_id', None))
@@ -2435,6 +2556,66 @@ class TokenType(Enum):
     AZURE_ACTIVE_DIRECTORY_TOKEN = 'AZURE_ACTIVE_DIRECTORY_TOKEN'
 
 
+@dataclass
+class UpdateAibiDashboardEmbeddingAccessPolicySettingRequest:
+    """Details required to update a setting."""
+
+    allow_missing: bool
+    """This should always be set to true for Settings API. Added for AIP compliance."""
+
+    setting: AibiDashboardEmbeddingAccessPolicySetting
+
+    field_mask: str
+    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
+    the setting payload will be updated. The field mask needs to be supplied as single string. To
+    specify multiple fields in the field mask, use comma as the separator (no space)."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateAibiDashboardEmbeddingAccessPolicySettingRequest:
+        """Deserializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest from a dictionary."""
+        return cls(allow_missing=d.get('allow_missing', None),
+                   field_mask=d.get('field_mask', None),
+                   setting=_from_dict(d, 'setting', AibiDashboardEmbeddingAccessPolicySetting))
+
+
+@dataclass
+class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest:
+    """Details required to update a setting."""
+
+    allow_missing: bool
+    """This should always be set to true for Settings API. Added for AIP compliance."""
+
+    setting: AibiDashboardEmbeddingApprovedDomainsSetting
+
+    field_mask: str
+    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
+    the setting payload will be updated. The field mask needs to be supplied as single string. To
+    specify multiple fields in the field mask, use comma as the separator (no space)."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting.as_dict()
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest:
+        """Deserializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest from a dictionary."""
+        return cls(allow_missing=d.get('allow_missing', None),
+                   field_mask=d.get('field_mask', None),
+                   setting=_from_dict(d, 'setting', AibiDashboardEmbeddingApprovedDomainsSetting))
+
+
 @dataclass
 class UpdateAutomaticClusterUpdateSettingRequest:
     """Details required to update a setting."""
@@ -3103,6 +3284,130 @@ def personal_compute(self) -> PersonalComputeAPI:
         return self._personal_compute
 
 
+class AibiDashboardEmbeddingAccessPolicyAPI:
+    """Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the
+    workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS)."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def get(self, *, etag: Optional[str] = None) -> AibiDashboardEmbeddingAccessPolicySetting:
+        """Retrieve the AI/BI dashboard embedding access policy.
+        
+        Retrieves the AI/BI dashboard embedding access policy. The default setting is ALLOW_APPROVED_DOMAINS,
+        permitting AI/BI dashboards to be embedded on approved domains.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET',
+                           '/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default',
+                           query=query,
+                           headers=headers)
+        return AibiDashboardEmbeddingAccessPolicySetting.from_dict(res)
+
+    def update(self, allow_missing: bool, setting: AibiDashboardEmbeddingAccessPolicySetting,
+               field_mask: str) -> AibiDashboardEmbeddingAccessPolicySetting:
+        """Update the AI/BI dashboard embedding access policy.
+        
+        Updates the AI/BI dashboard embedding access policy at the workspace level.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`AibiDashboardEmbeddingAccessPolicySetting`
+        :param field_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        
+        :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
+        """
+        body = {}
+        if allow_missing is not None: body['allow_missing'] = allow_missing
+        if field_mask is not None: body['field_mask'] = field_mask
+        if setting is not None: body['setting'] = setting.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH',
+                           '/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default',
+                           body=body,
+                           headers=headers)
+        return AibiDashboardEmbeddingAccessPolicySetting.from_dict(res)
+
+
+class AibiDashboardEmbeddingApprovedDomainsAPI:
+    """Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains list
+    can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def get(self, *, etag: Optional[str] = None) -> AibiDashboardEmbeddingApprovedDomainsSetting:
+        """Retrieve the list of domains approved to host embedded AI/BI dashboards.
+        
+        Retrieves the list of domains approved to host embedded AI/BI dashboards.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET',
+                           '/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default',
+                           query=query,
+                           headers=headers)
+        return AibiDashboardEmbeddingApprovedDomainsSetting.from_dict(res)
+
+    def update(self, allow_missing: bool, setting: AibiDashboardEmbeddingApprovedDomainsSetting,
+               field_mask: str) -> AibiDashboardEmbeddingApprovedDomainsSetting:
+        """Update the list of domains approved to host embedded AI/BI dashboards.
+        
+        Updates the list of domains approved to host embedded AI/BI dashboards. This update will fail if the
+        current workspace access policy is not ALLOW_APPROVED_DOMAINS.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
+        :param field_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        
+        :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
+        """
+        body = {}
+        if allow_missing is not None: body['allow_missing'] = allow_missing
+        if field_mask is not None: body['field_mask'] = field_mask
+        if setting is not None: body['setting'] = setting.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH',
+                           '/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default',
+                           body=body,
+                           headers=headers)
+        return AibiDashboardEmbeddingApprovedDomainsSetting.from_dict(res)
+
+
 class AutomaticClusterUpdateAPI:
     """Controls whether automatic cluster update is enabled for the current workspace. By default, it is turned
     off."""
@@ -4580,6 +4885,8 @@ class SettingsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
+        self._aibi_dashboard_embedding_access_policy = AibiDashboardEmbeddingAccessPolicyAPI(self._api)
+        self._aibi_dashboard_embedding_approved_domains = AibiDashboardEmbeddingApprovedDomainsAPI(self._api)
         self._automatic_cluster_update = AutomaticClusterUpdateAPI(self._api)
         self._compliance_security_profile = ComplianceSecurityProfileAPI(self._api)
         self._default_namespace = DefaultNamespaceAPI(self._api)
@@ -4588,6 +4895,16 @@ def __init__(self, api_client):
         self._enhanced_security_monitoring = EnhancedSecurityMonitoringAPI(self._api)
         self._restrict_workspace_admins = RestrictWorkspaceAdminsAPI(self._api)
 
+    @property
+    def aibi_dashboard_embedding_access_policy(self) -> AibiDashboardEmbeddingAccessPolicyAPI:
+        """Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the workspace level."""
+        return self._aibi_dashboard_embedding_access_policy
+
+    @property
+    def aibi_dashboard_embedding_approved_domains(self) -> AibiDashboardEmbeddingApprovedDomainsAPI:
+        """Controls the list of domains approved to host the embedded AI/BI dashboards."""
+        return self._aibi_dashboard_embedding_approved_domains
+
     @property
     def automatic_cluster_update(self) -> AutomaticClusterUpdateAPI:
         """Controls whether automatic cluster update is enabled for the current workspace."""
@@ -4751,7 +5068,8 @@ def set_permissions(
             access_control_list: Optional[List[TokenAccessControlRequest]] = None) -> TokenPermissions:
         """Set token permissions.
         
-        Sets permissions on all tokens. Tokens can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional)
         
diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py
index 772bc7aee..091fa9e82 100755
--- a/databricks/sdk/service/sharing.py
+++ b/databricks/sdk/service/sharing.py
@@ -23,418 +23,6 @@ class AuthenticationType(Enum):
     TOKEN = 'TOKEN'
 
 
-@dataclass
-class CentralCleanRoomInfo:
-    clean_room_assets: Optional[List[CleanRoomAssetInfo]] = None
-    """All assets from all collaborators that are available in the clean room. Only one of table_info
-    or notebook_info will be filled in."""
-
-    collaborators: Optional[List[CleanRoomCollaboratorInfo]] = None
-    """All collaborators who are in the clean room."""
-
-    creator: Optional[CleanRoomCollaboratorInfo] = None
-    """The collaborator who created the clean room."""
-
-    station_cloud: Optional[str] = None
-    """The cloud where clean room tasks will be run."""
-
-    station_region: Optional[str] = None
-    """The region where clean room tasks will be run."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CentralCleanRoomInfo into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.clean_room_assets: body['clean_room_assets'] = [v.as_dict() for v in self.clean_room_assets]
-        if self.collaborators: body['collaborators'] = [v.as_dict() for v in self.collaborators]
-        if self.creator: body['creator'] = self.creator.as_dict()
-        if self.station_cloud is not None: body['station_cloud'] = self.station_cloud
-        if self.station_region is not None: body['station_region'] = self.station_region
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CentralCleanRoomInfo:
-        """Deserializes the CentralCleanRoomInfo from a dictionary."""
-        return cls(clean_room_assets=_repeated_dict(d, 'clean_room_assets', CleanRoomAssetInfo),
-                   collaborators=_repeated_dict(d, 'collaborators', CleanRoomCollaboratorInfo),
-                   creator=_from_dict(d, 'creator', CleanRoomCollaboratorInfo),
-                   station_cloud=d.get('station_cloud', None),
-                   station_region=d.get('station_region', None))
-
-
-@dataclass
-class CleanRoomAssetInfo:
-    added_at: Optional[int] = None
-    """Time at which this asset was added, in epoch milliseconds."""
-
-    notebook_info: Optional[CleanRoomNotebookInfo] = None
-    """Details about the notebook asset."""
-
-    owner: Optional[CleanRoomCollaboratorInfo] = None
-    """The collaborator who owns the asset."""
-
-    table_info: Optional[CleanRoomTableInfo] = None
-    """Details about the table asset."""
-
-    updated_at: Optional[int] = None
-    """Time at which this asset was updated, in epoch milliseconds."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CleanRoomAssetInfo into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.added_at is not None: body['added_at'] = self.added_at
-        if self.notebook_info: body['notebook_info'] = self.notebook_info.as_dict()
-        if self.owner: body['owner'] = self.owner.as_dict()
-        if self.table_info: body['table_info'] = self.table_info.as_dict()
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetInfo:
-        """Deserializes the CleanRoomAssetInfo from a dictionary."""
-        return cls(added_at=d.get('added_at', None),
-                   notebook_info=_from_dict(d, 'notebook_info', CleanRoomNotebookInfo),
-                   owner=_from_dict(d, 'owner', CleanRoomCollaboratorInfo),
-                   table_info=_from_dict(d, 'table_info', CleanRoomTableInfo),
-                   updated_at=d.get('updated_at', None))
-
-
-@dataclass
-class CleanRoomCatalog:
-    catalog_name: Optional[str] = None
-    """Name of the catalog in the clean room station. Empty for notebooks."""
-
-    notebook_files: Optional[List[SharedDataObject]] = None
-    """The details of the shared notebook files."""
-
-    tables: Optional[List[SharedDataObject]] = None
-    """The details of the shared tables."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CleanRoomCatalog into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.notebook_files: body['notebook_files'] = [v.as_dict() for v in self.notebook_files]
-        if self.tables: body['tables'] = [v.as_dict() for v in self.tables]
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CleanRoomCatalog:
-        """Deserializes the CleanRoomCatalog from a dictionary."""
-        return cls(catalog_name=d.get('catalog_name', None),
-                   notebook_files=_repeated_dict(d, 'notebook_files', SharedDataObject),
-                   tables=_repeated_dict(d, 'tables', SharedDataObject))
-
-
-@dataclass
-class CleanRoomCatalogUpdate:
-    catalog_name: Optional[str] = None
-    """The name of the catalog to update assets."""
-
-    updates: Optional[SharedDataObjectUpdate] = None
-    """The updates to the assets in the catalog."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CleanRoomCatalogUpdate into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.updates: body['updates'] = self.updates.as_dict()
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CleanRoomCatalogUpdate:
-        """Deserializes the CleanRoomCatalogUpdate from a dictionary."""
-        return cls(catalog_name=d.get('catalog_name', None),
-                   updates=_from_dict(d, 'updates', SharedDataObjectUpdate))
-
-
-@dataclass
-class CleanRoomCollaboratorInfo:
-    global_metastore_id: Optional[str] = None
-    """The global Unity Catalog metastore id of the collaborator. Also known as the sharing identifier.
-    The identifier is of format __cloud__:__region__:__metastore-uuid__."""
-
-    organization_name: Optional[str] = None
-    """The organization name of the collaborator. This is configured in the metastore for Delta Sharing
-    and is used to identify the organization to other collaborators."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CleanRoomCollaboratorInfo into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
-        if self.organization_name is not None: body['organization_name'] = self.organization_name
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CleanRoomCollaboratorInfo:
-        """Deserializes the CleanRoomCollaboratorInfo from a dictionary."""
-        return cls(global_metastore_id=d.get('global_metastore_id', None),
-                   organization_name=d.get('organization_name', None))
-
-
-@dataclass
-class CleanRoomInfo:
-    comment: Optional[str] = None
-    """User-provided free-form text description."""
-
-    created_at: Optional[int] = None
-    """Time at which this clean room was created, in epoch milliseconds."""
-
-    created_by: Optional[str] = None
-    """Username of clean room creator."""
-
-    local_catalogs: Optional[List[CleanRoomCatalog]] = None
-    """Catalog aliases shared by the current collaborator with asset details."""
-
-    name: Optional[str] = None
-    """Name of the clean room."""
-
-    owner: Optional[str] = None
-    """Username of current owner of clean room."""
-
-    remote_detailed_info: Optional[CentralCleanRoomInfo] = None
-    """Central clean room details."""
-
-    updated_at: Optional[int] = None
-    """Time at which this clean room was updated, in epoch milliseconds."""
-
-    updated_by: Optional[str] = None
-    """Username of clean room updater."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CleanRoomInfo into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.local_catalogs: body['local_catalogs'] = [v.as_dict() for v in self.local_catalogs]
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.remote_detailed_info: body['remote_detailed_info'] = self.remote_detailed_info.as_dict()
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CleanRoomInfo:
-        """Deserializes the CleanRoomInfo from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   local_catalogs=_repeated_dict(d, 'local_catalogs', CleanRoomCatalog),
-                   name=d.get('name', None),
-                   owner=d.get('owner', None),
-                   remote_detailed_info=_from_dict(d, 'remote_detailed_info', CentralCleanRoomInfo),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None))
-
-
-@dataclass
-class CleanRoomNotebookInfo:
-    notebook_content: Optional[str] = None
-    """The base64 representation of the notebook content in HTML."""
-
-    notebook_name: Optional[str] = None
-    """The name of the notebook."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CleanRoomNotebookInfo into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.notebook_content is not None: body['notebook_content'] = self.notebook_content
-        if self.notebook_name is not None: body['notebook_name'] = self.notebook_name
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CleanRoomNotebookInfo:
-        """Deserializes the CleanRoomNotebookInfo from a dictionary."""
-        return cls(notebook_content=d.get('notebook_content', None),
-                   notebook_name=d.get('notebook_name', None))
-
-
-@dataclass
-class CleanRoomTableInfo:
-    catalog_name: Optional[str] = None
-    """Name of parent catalog."""
-
-    columns: Optional[List[ColumnInfo]] = None
-    """The array of __ColumnInfo__ definitions of the table's columns."""
-
-    full_name: Optional[str] = None
-    """Full name of table, in form of __catalog_name__.__schema_name__.__table_name__"""
-
-    name: Optional[str] = None
-    """Name of table, relative to parent schema."""
-
-    schema_name: Optional[str] = None
-    """Name of parent schema relative to its parent catalog."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CleanRoomTableInfo into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.name is not None: body['name'] = self.name
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CleanRoomTableInfo:
-        """Deserializes the CleanRoomTableInfo from a dictionary."""
-        return cls(catalog_name=d.get('catalog_name', None),
-                   columns=_repeated_dict(d, 'columns', ColumnInfo),
-                   full_name=d.get('full_name', None),
-                   name=d.get('name', None),
-                   schema_name=d.get('schema_name', None))
-
-
-@dataclass
-class ColumnInfo:
-    comment: Optional[str] = None
-    """User-provided free-form text description."""
-
-    mask: Optional[ColumnMask] = None
-
-    name: Optional[str] = None
-    """Name of Column."""
-
-    nullable: Optional[bool] = None
-    """Whether field may be Null (default: true)."""
-
-    partition_index: Optional[int] = None
-    """Partition index for column."""
-
-    position: Optional[int] = None
-    """Ordinal position of column (starting at position 0)."""
-
-    type_interval_type: Optional[str] = None
-    """Format of IntervalType."""
-
-    type_json: Optional[str] = None
-    """Full data type specification, JSON-serialized."""
-
-    type_name: Optional[ColumnTypeName] = None
-    """Name of type (INT, STRUCT, MAP, etc.)."""
-
-    type_precision: Optional[int] = None
-    """Digits of precision; required for DecimalTypes."""
-
-    type_scale: Optional[int] = None
-    """Digits to right of decimal; Required for DecimalTypes."""
-
-    type_text: Optional[str] = None
-    """Full data type specification as SQL/catalogString text."""
-
-    def as_dict(self) -> dict:
-        """Serializes the ColumnInfo into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.mask: body['mask'] = self.mask.as_dict()
-        if self.name is not None: body['name'] = self.name
-        if self.nullable is not None: body['nullable'] = self.nullable
-        if self.partition_index is not None: body['partition_index'] = self.partition_index
-        if self.position is not None: body['position'] = self.position
-        if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type
-        if self.type_json is not None: body['type_json'] = self.type_json
-        if self.type_name is not None: body['type_name'] = self.type_name.value
-        if self.type_precision is not None: body['type_precision'] = self.type_precision
-        if self.type_scale is not None: body['type_scale'] = self.type_scale
-        if self.type_text is not None: body['type_text'] = self.type_text
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ColumnInfo:
-        """Deserializes the ColumnInfo from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   mask=_from_dict(d, 'mask', ColumnMask),
-                   name=d.get('name', None),
-                   nullable=d.get('nullable', None),
-                   partition_index=d.get('partition_index', None),
-                   position=d.get('position', None),
-                   type_interval_type=d.get('type_interval_type', None),
-                   type_json=d.get('type_json', None),
-                   type_name=_enum(d, 'type_name', ColumnTypeName),
-                   type_precision=d.get('type_precision', None),
-                   type_scale=d.get('type_scale', None),
-                   type_text=d.get('type_text', None))
-
-
-@dataclass
-class ColumnMask:
-    function_name: Optional[str] = None
-    """The full name of the column mask SQL UDF."""
-
-    using_column_names: Optional[List[str]] = None
-    """The list of additional table columns to be passed as input to the column mask function. The
-    first arg of the mask function should be of the type of the column being masked and the types of
-    the rest of the args should match the types of columns in 'using_column_names'."""
-
-    def as_dict(self) -> dict:
-        """Serializes the ColumnMask into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.function_name is not None: body['function_name'] = self.function_name
-        if self.using_column_names: body['using_column_names'] = [v for v in self.using_column_names]
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ColumnMask:
-        """Deserializes the ColumnMask from a dictionary."""
-        return cls(function_name=d.get('function_name', None),
-                   using_column_names=d.get('using_column_names', None))
-
-
-class ColumnTypeName(Enum):
-    """Name of type (INT, STRUCT, MAP, etc.)."""
-
-    ARRAY = 'ARRAY'
-    BINARY = 'BINARY'
-    BOOLEAN = 'BOOLEAN'
-    BYTE = 'BYTE'
-    CHAR = 'CHAR'
-    DATE = 'DATE'
-    DECIMAL = 'DECIMAL'
-    DOUBLE = 'DOUBLE'
-    FLOAT = 'FLOAT'
-    INT = 'INT'
-    INTERVAL = 'INTERVAL'
-    LONG = 'LONG'
-    MAP = 'MAP'
-    NULL = 'NULL'
-    SHORT = 'SHORT'
-    STRING = 'STRING'
-    STRUCT = 'STRUCT'
-    TABLE_TYPE = 'TABLE_TYPE'
-    TIMESTAMP = 'TIMESTAMP'
-    TIMESTAMP_NTZ = 'TIMESTAMP_NTZ'
-    USER_DEFINED_TYPE = 'USER_DEFINED_TYPE'
-
-
-@dataclass
-class CreateCleanRoom:
-    name: str
-    """Name of the clean room."""
-
-    remote_detailed_info: CentralCleanRoomInfo
-    """Central clean room details."""
-
-    comment: Optional[str] = None
-    """User-provided free-form text description."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CreateCleanRoom into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.remote_detailed_info: body['remote_detailed_info'] = self.remote_detailed_info.as_dict()
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateCleanRoom:
-        """Deserializes the CreateCleanRoom from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   name=d.get('name', None),
-                   remote_detailed_info=_from_dict(d, 'remote_detailed_info', CentralCleanRoomInfo))
-
-
 @dataclass
 class CreateProvider:
     name: str
@@ -623,29 +211,6 @@ def from_dict(cls, d: Dict[str, any]) -> IpAccessList:
         return cls(allowed_ip_addresses=d.get('allowed_ip_addresses', None))
 
 
-@dataclass
-class ListCleanRoomsResponse:
-    clean_rooms: Optional[List[CleanRoomInfo]] = None
-    """An array of clean rooms. Remote details (central) are not included."""
-
-    next_page_token: Optional[str] = None
-    """Opaque token to retrieve the next page of results. Absent if there are no more pages.
-    __page_token__ should be set to this value for the next request (for the next page of results)."""
-
-    def as_dict(self) -> dict:
-        """Serializes the ListCleanRoomsResponse into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.clean_rooms: body['clean_rooms'] = [v.as_dict() for v in self.clean_rooms]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ListCleanRoomsResponse:
-        """Deserializes the ListCleanRoomsResponse from a dictionary."""
-        return cls(clean_rooms=_repeated_dict(d, 'clean_rooms', CleanRoomInfo),
-                   next_page_token=d.get('next_page_token', None))
-
-
 @dataclass
 class ListProviderSharesResponse:
     next_page_token: Optional[str] = None
@@ -1473,38 +1038,6 @@ class SharedDataObjectUpdateAction(Enum):
     UPDATE = 'UPDATE'
 
 
-@dataclass
-class UpdateCleanRoom:
-    catalog_updates: Optional[List[CleanRoomCatalogUpdate]] = None
-    """Array of shared data object updates."""
-
-    comment: Optional[str] = None
-    """User-provided free-form text description."""
-
-    name: Optional[str] = None
-    """The name of the clean room."""
-
-    owner: Optional[str] = None
-    """Username of current owner of clean room."""
-
-    def as_dict(self) -> dict:
-        """Serializes the UpdateCleanRoom into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.catalog_updates: body['catalog_updates'] = [v.as_dict() for v in self.catalog_updates]
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateCleanRoom:
-        """Deserializes the UpdateCleanRoom from a dictionary."""
-        return cls(catalog_updates=_repeated_dict(d, 'catalog_updates', CleanRoomCatalogUpdate),
-                   comment=d.get('comment', None),
-                   name=d.get('name', None),
-                   owner=d.get('owner', None))
-
-
 @dataclass
 class UpdatePermissionsResponse:
 
@@ -1699,157 +1232,6 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateSharePermissions:
                    page_token=d.get('page_token', None))
 
 
-class CleanRoomsAPI:
-    """A clean room is a secure, privacy-protecting environment where two or more parties can share sensitive
-    enterprise data, including customer data, for measurements, insights, activation and other use cases.
-    
-    To create clean rooms, you must be a metastore admin or a user with the **CREATE_CLEAN_ROOM** privilege."""
-
-    def __init__(self, api_client):
-        self._api = api_client
-
-    def create(self,
-               name: str,
-               remote_detailed_info: CentralCleanRoomInfo,
-               *,
-               comment: Optional[str] = None) -> CleanRoomInfo:
-        """Create a clean room.
-        
-        Creates a new clean room with specified colaborators. The caller must be a metastore admin or have the
-        **CREATE_CLEAN_ROOM** privilege on the metastore.
-        
-        :param name: str
-          Name of the clean room.
-        :param remote_detailed_info: :class:`CentralCleanRoomInfo`
-          Central clean room details.
-        :param comment: str (optional)
-          User-provided free-form text description.
-        
-        :returns: :class:`CleanRoomInfo`
-        """
-        body = {}
-        if comment is not None: body['comment'] = comment
-        if name is not None: body['name'] = name
-        if remote_detailed_info is not None: body['remote_detailed_info'] = remote_detailed_info.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.1/unity-catalog/clean-rooms', body=body, headers=headers)
-        return CleanRoomInfo.from_dict(res)
-
-    def delete(self, name: str):
-        """Delete a clean room.
-        
-        Deletes a data object clean room from the metastore. The caller must be an owner of the clean room.
-        
-        :param name: str
-          The name of the clean room.
-        
-        
-        """
-
-        headers = {'Accept': 'application/json', }
-
-        self._api.do('DELETE', f'/api/2.1/unity-catalog/clean-rooms/{name}', headers=headers)
-
-    def get(self, name: str, *, include_remote_details: Optional[bool] = None) -> CleanRoomInfo:
-        """Get a clean room.
-        
-        Gets a data object clean room from the metastore. The caller must be a metastore admin or the owner of
-        the clean room.
-        
-        :param name: str
-          The name of the clean room.
-        :param include_remote_details: bool (optional)
-          Whether to include remote details (central) on the clean room.
-        
-        :returns: :class:`CleanRoomInfo`
-        """
-
-        query = {}
-        if include_remote_details is not None: query['include_remote_details'] = include_remote_details
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', f'/api/2.1/unity-catalog/clean-rooms/{name}', query=query, headers=headers)
-        return CleanRoomInfo.from_dict(res)
-
-    def list(self,
-             *,
-             max_results: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[CleanRoomInfo]:
-        """List clean rooms.
-        
-        Gets an array of data object clean rooms from the metastore. The caller must be a metastore admin or
-        the owner of the clean room. There is no guarantee of a specific ordering of the elements in the
-        array.
-        
-        :param max_results: int (optional)
-          Maximum number of clean rooms to return. If not set, all the clean rooms are returned (not
-          recommended). - when set to a value greater than 0, the page length is the minimum of this value and
-          a server configured value; - when set to 0, the page length is set to a server configured value
-          (recommended); - when set to a value less than 0, an invalid parameter error is returned;
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`CleanRoomInfo`
-        """
-
-        query = {}
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
-
-        while True:
-            json = self._api.do('GET', '/api/2.1/unity-catalog/clean-rooms', query=query, headers=headers)
-            if 'clean_rooms' in json:
-                for v in json['clean_rooms']:
-                    yield CleanRoomInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
-                return
-            query['page_token'] = json['next_page_token']
-
-    def update(self,
-               name: str,
-               *,
-               catalog_updates: Optional[List[CleanRoomCatalogUpdate]] = None,
-               comment: Optional[str] = None,
-               owner: Optional[str] = None) -> CleanRoomInfo:
-        """Update a clean room.
-        
-        Updates the clean room with the changes and data objects in the request. The caller must be the owner
-        of the clean room or a metastore admin.
-        
-        When the caller is a metastore admin, only the __owner__ field can be updated.
-        
-        In the case that the clean room name is changed **updateCleanRoom** requires that the caller is both
-        the clean room owner and a metastore admin.
-        
-        For each table that is added through this method, the clean room owner must also have **SELECT**
-        privilege on the table. The privilege must be maintained indefinitely for recipients to be able to
-        access the table. Typically, you should use a group as the clean room owner.
-        
-        Table removals through **update** do not require additional privileges.
-        
-        :param name: str
-          The name of the clean room.
-        :param catalog_updates: List[:class:`CleanRoomCatalogUpdate`] (optional)
-          Array of shared data object updates.
-        :param comment: str (optional)
-          User-provided free-form text description.
-        :param owner: str (optional)
-          Username of current owner of clean room.
-        
-        :returns: :class:`CleanRoomInfo`
-        """
-        body = {}
-        if catalog_updates is not None: body['catalog_updates'] = [v.as_dict() for v in catalog_updates]
-        if comment is not None: body['comment'] = comment
-        if owner is not None: body['owner'] = owner
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH', f'/api/2.1/unity-catalog/clean-rooms/{name}', body=body, headers=headers)
-        return CleanRoomInfo.from_dict(res)
-
-
 class ProvidersAPI:
     """A data provider is an object representing the organization in the real world who shares the data. A
     provider contains shares which further contain the shared data."""
diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py
index 7a224feeb..390aee5ee 100755
--- a/databricks/sdk/service/sql.py
+++ b/databricks/sdk/service/sql.py
@@ -507,7 +507,7 @@ class ChannelName(Enum):
     CHANNEL_NAME_CURRENT = 'CHANNEL_NAME_CURRENT'
     CHANNEL_NAME_CUSTOM = 'CHANNEL_NAME_CUSTOM'
     CHANNEL_NAME_PREVIEW = 'CHANNEL_NAME_PREVIEW'
-    CHANNEL_NAME_UNSPECIFIED = 'CHANNEL_NAME_UNSPECIFIED'
+    CHANNEL_NAME_PREVIOUS = 'CHANNEL_NAME_PREVIOUS'
 
 
 @dataclass
@@ -6579,11 +6579,10 @@ class StatementExecutionAPI:
     outstanding statement might have already completed execution when the cancel request arrives. Polling for
     status until a terminal state is reached is a reliable way to determine the final state. - Wait timeouts
     are approximate, occur server-side, and cannot account for things such as caller delays and network
-    latency from caller to service. - The system will auto-close a statement after one hour if the client
-    stops polling and thus you must poll at least once an hour. - The results are only available for one hour
-    after success; polling does not extend this. - The SQL Execution API must be used for the entire lifecycle
-    of the statement. For example, you cannot use the Jobs API to execute the command, and then the SQL
-    Execution API to cancel it.
+    latency from caller to service. - To guarantee that the statement is kept alive, you must poll at least
+    once every 15 minutes. - The results are only available for one hour after success; polling does not
+    extend this. - The SQL Execution API must be used for the entire lifecycle of the statement. For example,
+    you cannot use the Jobs API to execute the command, and then the SQL Execution API to cancel it.
     
     [Apache Arrow Columnar]: https://arrow.apache.org/overview/
     [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html"""
@@ -7243,7 +7242,8 @@ def set_permissions(self,
                         ) -> WarehousePermissions:
         """Set SQL warehouse permissions.
         
-        Sets permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param warehouse_id: str
           The SQL warehouse for which to get or manage permissions.
diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py
index 7c8bfbd5e..01c463a0d 100755
--- a/databricks/sdk/service/workspace.py
+++ b/databricks/sdk/service/workspace.py
@@ -1897,7 +1897,8 @@ def set_permissions(
             access_control_list: Optional[List[RepoAccessControlRequest]] = None) -> RepoPermissions:
         """Set repo permissions.
         
-        Sets permissions on a repo. Repos can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param repo_id: str
           The repo for which to get or manage permissions.
@@ -2527,8 +2528,9 @@ def set_permissions(
     ) -> WorkspaceObjectPermissions:
         """Set workspace object permissions.
         
-        Sets permissions on a workspace object. Workspace objects can inherit permissions from their parent
-        objects or root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their parent objects or root
+        object.
         
         :param workspace_object_type: str
           The workspace object type for which to get or manage permissions.
diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py
index aae5aca67..8935b5b5d 100644
--- a/databricks/sdk/version.py
+++ b/databricks/sdk/version.py
@@ -1 +1 @@
-__version__ = '0.36.0'
+__version__ = '0.37.0'
diff --git a/docs/account/oauth2/service_principal_secrets.rst b/docs/account/oauth2/service_principal_secrets.rst
index 4249b9dea..955d6da53 100644
--- a/docs/account/oauth2/service_principal_secrets.rst
+++ b/docs/account/oauth2/service_principal_secrets.rst
@@ -42,7 +42,7 @@
         
         
 
-    .. py:method:: list(service_principal_id: int) -> Iterator[SecretInfo]
+    .. py:method:: list(service_principal_id: int [, page_token: Optional[str]]) -> Iterator[SecretInfo]
 
         List service principal secrets.
         
@@ -51,6 +51,13 @@
         
         :param service_principal_id: int
           The service principal ID.
+        :param page_token: str (optional)
+          An opaque page token which was the `next_page_token` in the response of the previous request to list
+          the secrets for this service principal. Provide this token to retrieve the next page of secret
+          entries. When providing a `page_token`, all other parameters provided to the request must match the
+          previous request. To list all of the secrets for a service principal, it is necessary to continue
+          requesting pages of entries until the response contains no `next_page_token`. Note that the number
+          of entries returned must not be used to determine when the listing is complete.
         
         :returns: Iterator over :class:`SecretInfo`
         
\ No newline at end of file
diff --git a/docs/account/provisioning/workspaces.rst b/docs/account/provisioning/workspaces.rst
index 98c47cc9b..fa1d130b1 100644
--- a/docs/account/provisioning/workspaces.rst
+++ b/docs/account/provisioning/workspaces.rst
@@ -11,7 +11,7 @@
     These endpoints are available if your account is on the E2 version of the platform or on a select custom
     plan that allows multiple workspaces per account.
 
-    .. py:method:: create(workspace_name: str [, aws_region: Optional[str], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str]]) -> Wait[Workspace]
+    .. py:method:: create(workspace_name: str [, aws_region: Optional[str], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], is_no_public_ip_enabled: Optional[bool], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str]]) -> Wait[Workspace]
 
 
         Usage:
@@ -116,6 +116,8 @@
           [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html
         :param gke_config: :class:`GkeConfig` (optional)
           The configurations for the GKE cluster of a Databricks workspace.
+        :param is_no_public_ip_enabled: bool (optional)
+          Whether no public IP is enabled for the workspace.
         :param location: str (optional)
           The Google Cloud region of the workspace data plane in your Google account. For example, `us-east4`.
         :param managed_services_customer_managed_key_id: str (optional)
@@ -148,7 +150,7 @@
           See :method:wait_get_workspace_running for more details.
         
 
-    .. py:method:: create_and_wait(workspace_name: str [, aws_region: Optional[str], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace
+    .. py:method:: create_and_wait(workspace_name: str [, aws_region: Optional[str], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], is_no_public_ip_enabled: Optional[bool], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace
 
 
     .. py:method:: delete(workspace_id: int)
diff --git a/docs/dbdataclasses/apps.rst b/docs/dbdataclasses/apps.rst
index 2d522c625..2214e2ac9 100644
--- a/docs/dbdataclasses/apps.rst
+++ b/docs/dbdataclasses/apps.rst
@@ -190,14 +190,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: CreateAppDeploymentRequest
-   :members:
-   :undoc-members:
-
-.. autoclass:: CreateAppRequest
-   :members:
-   :undoc-members:
-
 .. autoclass:: GetAppPermissionLevelsResponse
    :members:
    :undoc-members:
@@ -217,7 +209,3 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 .. autoclass:: StopAppRequest
    :members:
    :undoc-members:
-
-.. autoclass:: UpdateAppRequest
-   :members:
-   :undoc-members:
diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst
index cb6399348..9f5fef3bc 100644
--- a/docs/dbdataclasses/catalog.rst
+++ b/docs/dbdataclasses/catalog.rst
@@ -69,6 +69,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: AwsIamRole
+   :members:
+   :undoc-members:
+
 .. autoclass:: AwsIamRoleRequest
    :members:
    :undoc-members:
@@ -77,6 +81,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: AzureActiveDirectoryToken
+   :members:
+   :undoc-members:
+
+.. autoclass:: AzureManagedIdentity
+   :members:
+   :undoc-members:
+
 .. autoclass:: AzureManagedIdentityRequest
    :members:
    :undoc-members:
@@ -246,6 +258,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: USER_DEFINED_TYPE
       :value: "USER_DEFINED_TYPE"
 
+   .. py:attribute:: VARIANT
+      :value: "VARIANT"
+
 .. autoclass:: ConnectionInfo
    :members:
    :undoc-members:
@@ -342,6 +357,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: CreateCredentialRequest
+   :members:
+   :undoc-members:
+
 .. autoclass:: CreateExternalLocation
    :members:
    :undoc-members:
@@ -373,7 +392,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: CreateFunctionSecurityType
 
-   Function security type.
+   The security type of the function.
 
    .. py:attribute:: DEFINER
       :value: "DEFINER"
@@ -403,10 +422,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: CreateOnlineTableRequest
-   :members:
-   :undoc-members:
-
 .. autoclass:: CreateRegisteredModelRequest
    :members:
    :undoc-members:
@@ -431,6 +446,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: CredentialInfo
+   :members:
+   :undoc-members:
+
+.. py:class:: CredentialPurpose
+
+   .. py:attribute:: SERVICE
+      :value: "SERVICE"
+
 .. py:class:: CredentialType
 
    The type of credential.
@@ -441,6 +465,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: USERNAME_PASSWORD
       :value: "USERNAME_PASSWORD"
 
+.. autoclass:: CredentialValidationResult
+   :members:
+   :undoc-members:
+
 .. autoclass:: CurrentWorkspaceBindings
    :members:
    :undoc-members:
@@ -530,6 +558,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: DeleteCredentialResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: DeleteResponse
    :members:
    :undoc-members:
@@ -636,7 +668,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: FunctionInfoSecurityType
 
-   Function security type.
+   The security type of the function.
 
    .. py:attribute:: DEFINER
       :value: "DEFINER"
@@ -683,6 +715,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: GenerateTemporaryServiceCredentialAzureOptions
+   :members:
+   :undoc-members:
+
+.. autoclass:: GenerateTemporaryServiceCredentialRequest
+   :members:
+   :undoc-members:
+
 .. autoclass:: GenerateTemporaryTableCredentialRequest
    :members:
    :undoc-members:
@@ -699,6 +739,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: EXTERNAL_LOCATION
       :value: "EXTERNAL_LOCATION"
 
+   .. py:attribute:: SERVICE_CREDENTIAL
+      :value: "SERVICE_CREDENTIAL"
+
    .. py:attribute:: STORAGE_CREDENTIAL
       :value: "STORAGE_CREDENTIAL"
 
@@ -722,8 +765,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: IsolationMode
 
-   Whether the current securable is accessible from all workspaces or a specific set of workspaces.
-
    .. py:attribute:: ISOLATION_MODE_ISOLATED
       :value: "ISOLATION_MODE_ISOLATED"
 
@@ -746,6 +787,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: ListCredentialsResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: ListExternalLocationsResponse
    :members:
    :undoc-members:
@@ -1239,6 +1284,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CONNECTION
       :value: "CONNECTION"
 
+   .. py:attribute:: CREDENTIAL
+      :value: "CREDENTIAL"
+
    .. py:attribute:: EXTERNAL_LOCATION
       :value: "EXTERNAL_LOCATION"
 
@@ -1379,6 +1427,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: VIEW
       :value: "VIEW"
 
+.. autoclass:: TemporaryCredentials
+   :members:
+   :undoc-members:
+
 .. autoclass:: TriggeredUpdateStatus
    :members:
    :undoc-members:
@@ -1399,6 +1451,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: EXTERNAL_LOCATION
       :value: "EXTERNAL_LOCATION"
 
+   .. py:attribute:: SERVICE_CREDENTIAL
+      :value: "SERVICE_CREDENTIAL"
+
    .. py:attribute:: STORAGE_CREDENTIAL
       :value: "STORAGE_CREDENTIAL"
 
@@ -1410,6 +1465,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: UpdateCredentialRequest
+   :members:
+   :undoc-members:
+
 .. autoclass:: UpdateExternalLocation
    :members:
    :undoc-members:
@@ -1476,6 +1535,27 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: ValidateCredentialRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: ValidateCredentialResponse
+   :members:
+   :undoc-members:
+
+.. py:class:: ValidateCredentialResult
+
+   A enum represents the result of the file operation
+
+   .. py:attribute:: FAIL
+      :value: "FAIL"
+
+   .. py:attribute:: PASS
+      :value: "PASS"
+
+   .. py:attribute:: SKIP
+      :value: "SKIP"
+
 .. autoclass:: ValidateStorageCredential
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/dashboards.rst b/docs/dbdataclasses/dashboards.rst
index 91de6ccb2..3d07ed346 100644
--- a/docs/dbdataclasses/dashboards.rst
+++ b/docs/dbdataclasses/dashboards.rst
@@ -4,18 +4,6 @@ Dashboards
 These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.dashboards`` module.
 
 .. py:currentmodule:: databricks.sdk.service.dashboards
-.. autoclass:: CreateDashboardRequest
-   :members:
-   :undoc-members:
-
-.. autoclass:: CreateScheduleRequest
-   :members:
-   :undoc-members:
-
-.. autoclass:: CreateSubscriptionRequest
-   :members:
-   :undoc-members:
-
 .. autoclass:: CronSchedule
    :members:
    :undoc-members:
@@ -166,6 +154,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION
       :value: "MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION"
 
+   .. py:attribute:: NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE
+      :value: "NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE"
+
    .. py:attribute:: NO_QUERY_TO_VISUALIZE_EXCEPTION
       :value: "NO_QUERY_TO_VISUALIZE_EXCEPTION"
 
@@ -298,11 +289,3 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 .. autoclass:: UnpublishDashboardResponse
    :members:
    :undoc-members:
-
-.. autoclass:: UpdateDashboardRequest
-   :members:
-   :undoc-members:
-
-.. autoclass:: UpdateScheduleRequest
-   :members:
-   :undoc-members:
diff --git a/docs/dbdataclasses/marketplace.rst b/docs/dbdataclasses/marketplace.rst
index bb48967db..c1029d842 100644
--- a/docs/dbdataclasses/marketplace.rst
+++ b/docs/dbdataclasses/marketplace.rst
@@ -29,6 +29,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: ASSET_TYPE_NOTEBOOK
       :value: "ASSET_TYPE_NOTEBOOK"
 
+   .. py:attribute:: ASSET_TYPE_PARTNER_INTEGRATION
+      :value: "ASSET_TYPE_PARTNER_INTEGRATION"
+
 .. autoclass:: BatchGetListingsResponse
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/provisioning.rst b/docs/dbdataclasses/provisioning.rst
index 7990eae96..4c909d488 100644
--- a/docs/dbdataclasses/provisioning.rst
+++ b/docs/dbdataclasses/provisioning.rst
@@ -106,6 +106,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: VPC
       :value: "VPC"
 
+.. autoclass:: ExternalCustomerInfo
+   :members:
+   :undoc-members:
+
 .. autoclass:: GcpKeyInfo
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst
index 12043e3c5..7d556f8ad 100644
--- a/docs/dbdataclasses/settings.rst
+++ b/docs/dbdataclasses/settings.rst
@@ -4,6 +4,33 @@ Settings
 These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.settings`` module.
 
 .. py:currentmodule:: databricks.sdk.service.settings
+.. autoclass:: AibiDashboardEmbeddingAccessPolicy
+   :members:
+   :undoc-members:
+
+.. py:class:: AibiDashboardEmbeddingAccessPolicyAccessPolicyType
+
+   .. py:attribute:: ALLOW_ALL_DOMAINS
+      :value: "ALLOW_ALL_DOMAINS"
+
+   .. py:attribute:: ALLOW_APPROVED_DOMAINS
+      :value: "ALLOW_APPROVED_DOMAINS"
+
+   .. py:attribute:: DENY_ALL_DOMAINS
+      :value: "DENY_ALL_DOMAINS"
+
+.. autoclass:: AibiDashboardEmbeddingAccessPolicySetting
+   :members:
+   :undoc-members:
+
+.. autoclass:: AibiDashboardEmbeddingApprovedDomains
+   :members:
+   :undoc-members:
+
+.. autoclass:: AibiDashboardEmbeddingApprovedDomainsSetting
+   :members:
+   :undoc-members:
+
 .. autoclass:: AutomaticClusterUpdateSetting
    :members:
    :undoc-members:
@@ -543,6 +570,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: AZURE_ACTIVE_DIRECTORY_TOKEN
       :value: "AZURE_ACTIVE_DIRECTORY_TOKEN"
 
+.. autoclass:: UpdateAibiDashboardEmbeddingAccessPolicySettingRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest
+   :members:
+   :undoc-members:
+
 .. autoclass:: UpdateAutomaticClusterUpdateSettingRequest
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/sharing.rst b/docs/dbdataclasses/sharing.rst
index ded587fe5..cd4c2dcea 100644
--- a/docs/dbdataclasses/sharing.rst
+++ b/docs/dbdataclasses/sharing.rst
@@ -14,117 +14,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: TOKEN
       :value: "TOKEN"
 
-.. autoclass:: CentralCleanRoomInfo
-   :members:
-   :undoc-members:
-
-.. autoclass:: CleanRoomAssetInfo
-   :members:
-   :undoc-members:
-
-.. autoclass:: CleanRoomCatalog
-   :members:
-   :undoc-members:
-
-.. autoclass:: CleanRoomCatalogUpdate
-   :members:
-   :undoc-members:
-
-.. autoclass:: CleanRoomCollaboratorInfo
-   :members:
-   :undoc-members:
-
-.. autoclass:: CleanRoomInfo
-   :members:
-   :undoc-members:
-
-.. autoclass:: CleanRoomNotebookInfo
-   :members:
-   :undoc-members:
-
-.. autoclass:: CleanRoomTableInfo
-   :members:
-   :undoc-members:
-
-.. autoclass:: ColumnInfo
-   :members:
-   :undoc-members:
-
-.. autoclass:: ColumnMask
-   :members:
-   :undoc-members:
-
-.. py:class:: ColumnTypeName
-
-   Name of type (INT, STRUCT, MAP, etc.).
-
-   .. py:attribute:: ARRAY
-      :value: "ARRAY"
-
-   .. py:attribute:: BINARY
-      :value: "BINARY"
-
-   .. py:attribute:: BOOLEAN
-      :value: "BOOLEAN"
-
-   .. py:attribute:: BYTE
-      :value: "BYTE"
-
-   .. py:attribute:: CHAR
-      :value: "CHAR"
-
-   .. py:attribute:: DATE
-      :value: "DATE"
-
-   .. py:attribute:: DECIMAL
-      :value: "DECIMAL"
-
-   .. py:attribute:: DOUBLE
-      :value: "DOUBLE"
-
-   .. py:attribute:: FLOAT
-      :value: "FLOAT"
-
-   .. py:attribute:: INT
-      :value: "INT"
-
-   .. py:attribute:: INTERVAL
-      :value: "INTERVAL"
-
-   .. py:attribute:: LONG
-      :value: "LONG"
-
-   .. py:attribute:: MAP
-      :value: "MAP"
-
-   .. py:attribute:: NULL
-      :value: "NULL"
-
-   .. py:attribute:: SHORT
-      :value: "SHORT"
-
-   .. py:attribute:: STRING
-      :value: "STRING"
-
-   .. py:attribute:: STRUCT
-      :value: "STRUCT"
-
-   .. py:attribute:: TABLE_TYPE
-      :value: "TABLE_TYPE"
-
-   .. py:attribute:: TIMESTAMP
-      :value: "TIMESTAMP"
-
-   .. py:attribute:: TIMESTAMP_NTZ
-      :value: "TIMESTAMP_NTZ"
-
-   .. py:attribute:: USER_DEFINED_TYPE
-      :value: "USER_DEFINED_TYPE"
-
-.. autoclass:: CreateCleanRoom
-   :members:
-   :undoc-members:
-
 .. autoclass:: CreateProvider
    :members:
    :undoc-members:
@@ -153,10 +42,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: ListCleanRoomsResponse
-   :members:
-   :undoc-members:
-
 .. autoclass:: ListProviderSharesResponse
    :members:
    :undoc-members:
@@ -435,10 +320,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: UPDATE
       :value: "UPDATE"
 
-.. autoclass:: UpdateCleanRoom
-   :members:
-   :undoc-members:
-
 .. autoclass:: UpdatePermissionsResponse
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst
index 1657146c3..1a252f7c6 100644
--- a/docs/dbdataclasses/sql.rst
+++ b/docs/dbdataclasses/sql.rst
@@ -114,8 +114,8 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CHANNEL_NAME_PREVIEW
       :value: "CHANNEL_NAME_PREVIEW"
 
-   .. py:attribute:: CHANNEL_NAME_UNSPECIFIED
-      :value: "CHANNEL_NAME_UNSPECIFIED"
+   .. py:attribute:: CHANNEL_NAME_PREVIOUS
+      :value: "CHANNEL_NAME_PREVIOUS"
 
 .. autoclass:: ColumnInfo
    :members:
diff --git a/docs/workspace/apps/apps.rst b/docs/workspace/apps/apps.rst
index 774e75b8b..a24941242 100644
--- a/docs/workspace/apps/apps.rst
+++ b/docs/workspace/apps/apps.rst
@@ -7,26 +7,20 @@
     Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend
     Databricks services, and enable users to interact through single sign-on.
 
-    .. py:method:: create(name: str [, description: Optional[str], resources: Optional[List[AppResource]]]) -> Wait[App]
+    .. py:method:: create( [, app: Optional[App]]) -> Wait[App]
 
         Create an app.
         
         Creates a new app.
         
-        :param name: str
-          The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
-          must be unique within the workspace.
-        :param description: str (optional)
-          The description of the app.
-        :param resources: List[:class:`AppResource`] (optional)
-          Resources for the app.
+        :param app: :class:`App` (optional)
         
         :returns:
           Long-running operation waiter for :class:`App`.
           See :method:wait_get_app_active for more details.
         
 
-    .. py:method:: create_and_wait(name: str [, description: Optional[str], resources: Optional[List[AppResource]], timeout: datetime.timedelta = 0:20:00]) -> App
+    .. py:method:: create_and_wait( [, app: Optional[App], timeout: datetime.timedelta = 0:20:00]) -> App
 
 
     .. py:method:: delete(name: str) -> App
@@ -41,7 +35,7 @@
         :returns: :class:`App`
         
 
-    .. py:method:: deploy(app_name: str [, deployment_id: Optional[str], mode: Optional[AppDeploymentMode], source_code_path: Optional[str]]) -> Wait[AppDeployment]
+    .. py:method:: deploy(app_name: str [, app_deployment: Optional[AppDeployment]]) -> Wait[AppDeployment]
 
         Create an app deployment.
         
@@ -49,23 +43,14 @@
         
         :param app_name: str
           The name of the app.
-        :param deployment_id: str (optional)
-          The unique id of the deployment.
-        :param mode: :class:`AppDeploymentMode` (optional)
-          The mode of which the deployment will manage the source code.
-        :param source_code_path: str (optional)
-          The workspace file system path of the source code used to create the app deployment. This is
-          different from `deployment_artifacts.source_code_path`, which is the path used by the deployed app.
-          The former refers to the original source code location of the app in the workspace during deployment
-          creation, whereas the latter provides a system generated stable snapshotted source code path used by
-          the deployment.
+        :param app_deployment: :class:`AppDeployment` (optional)
         
         :returns:
           Long-running operation waiter for :class:`AppDeployment`.
           See :method:wait_get_deployment_app_succeeded for more details.
         
 
-    .. py:method:: deploy_and_wait(app_name: str [, deployment_id: Optional[str], mode: Optional[AppDeploymentMode], source_code_path: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> AppDeployment
+    .. py:method:: deploy_and_wait(app_name: str [, app_deployment: Optional[AppDeployment], timeout: datetime.timedelta = 0:20:00]) -> AppDeployment
 
 
     .. py:method:: get(name: str) -> App
@@ -152,7 +137,8 @@
 
         Set app permissions.
         
-        Sets permissions on an app. Apps can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param app_name: str
           The app for which to get or manage permissions.
@@ -195,19 +181,15 @@
     .. py:method:: stop_and_wait(name: str, timeout: datetime.timedelta = 0:20:00) -> App
 
 
-    .. py:method:: update(name: str [, description: Optional[str], resources: Optional[List[AppResource]]]) -> App
+    .. py:method:: update(name: str [, app: Optional[App]]) -> App
 
         Update an app.
         
         Updates the app with the supplied name.
         
         :param name: str
-          The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
-          must be unique within the workspace.
-        :param description: str (optional)
-          The description of the app.
-        :param resources: List[:class:`AppResource`] (optional)
-          Resources for the app.
+          The name of the app.
+        :param app: :class:`App` (optional)
         
         :returns: :class:`App`
         
diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst
index 365007b09..fc60b18f6 100644
--- a/docs/workspace/catalog/external_locations.rst
+++ b/docs/workspace/catalog/external_locations.rst
@@ -221,7 +221,6 @@
         :param force: bool (optional)
           Force update even if changing url invalidates dependent external tables or mounts.
         :param isolation_mode: :class:`IsolationMode` (optional)
-          Whether the current securable is accessible from all workspaces or a specific set of workspaces.
         :param new_name: str (optional)
           New name for the external location.
         :param owner: str (optional)
diff --git a/docs/workspace/catalog/online_tables.rst b/docs/workspace/catalog/online_tables.rst
index 164832b0f..d0119657f 100644
--- a/docs/workspace/catalog/online_tables.rst
+++ b/docs/workspace/catalog/online_tables.rst
@@ -6,20 +6,23 @@
 
     Online tables provide lower latency and higher QPS access to data from Delta tables.
 
-    .. py:method:: create( [, name: Optional[str], spec: Optional[OnlineTableSpec]]) -> OnlineTable
+    .. py:method:: create( [, table: Optional[OnlineTable]]) -> Wait[OnlineTable]
 
         Create an Online Table.
         
         Create a new Online Table.
         
-        :param name: str (optional)
-          Full three-part (catalog, schema, table) name of the table.
-        :param spec: :class:`OnlineTableSpec` (optional)
-          Specification of the online table.
+        :param table: :class:`OnlineTable` (optional)
+          Online Table information.
         
-        :returns: :class:`OnlineTable`
+        :returns:
+          Long-running operation waiter for :class:`OnlineTable`.
+          See :method:wait_get_online_table_active for more details.
         
 
+    .. py:method:: create_and_wait( [, table: Optional[OnlineTable], timeout: datetime.timedelta = 0:20:00]) -> OnlineTable
+
+
     .. py:method:: delete(name: str)
 
         Delete an Online Table.
@@ -44,4 +47,6 @@
           Full three-part (catalog, schema, table) name of the table.
         
         :returns: :class:`OnlineTable`
-        
\ No newline at end of file
+        
+
+    .. py:method:: wait_get_online_table_active(name: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[OnlineTable], None]]) -> OnlineTable
diff --git a/docs/workspace/catalog/storage_credentials.rst b/docs/workspace/catalog/storage_credentials.rst
index 30b04654c..cac70a944 100644
--- a/docs/workspace/catalog/storage_credentials.rst
+++ b/docs/workspace/catalog/storage_credentials.rst
@@ -193,7 +193,6 @@
         :param force: bool (optional)
           Force update even if there are dependent external locations or external tables.
         :param isolation_mode: :class:`IsolationMode` (optional)
-          Whether the current securable is accessible from all workspaces or a specific set of workspaces.
         :param new_name: str (optional)
           New name for the storage credential.
         :param owner: str (optional)
diff --git a/docs/workspace/compute/cluster_policies.rst b/docs/workspace/compute/cluster_policies.rst
index 1cefc8ca6..65066964c 100644
--- a/docs/workspace/compute/cluster_policies.rst
+++ b/docs/workspace/compute/cluster_policies.rst
@@ -267,7 +267,8 @@
 
         Set cluster policy permissions.
         
-        Sets permissions on a cluster policy. Cluster policies can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param cluster_policy_id: str
           The cluster policy for which to get or manage permissions.
diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst
index ac52edecb..24fe2d253 100644
--- a/docs/workspace/compute/clusters.rst
+++ b/docs/workspace/compute/clusters.rst
@@ -341,7 +341,7 @@
         Clusters created by the Databricks Jobs service cannot be edited.
         
         :param cluster_id: str
-          ID of the cluser
+          ID of the cluster
         :param spark_version: str
           The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be
           retrieved by using the :method:clusters/sparkVersions API call.
@@ -906,7 +906,8 @@
 
         Set cluster permissions.
         
-        Sets permissions on a cluster. Clusters can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param cluster_id: str
           The cluster for which to get or manage permissions.
diff --git a/docs/workspace/compute/instance_pools.rst b/docs/workspace/compute/instance_pools.rst
index 277844170..333c44938 100644
--- a/docs/workspace/compute/instance_pools.rst
+++ b/docs/workspace/compute/instance_pools.rst
@@ -245,7 +245,8 @@
 
         Set instance pool permissions.
         
-        Sets permissions on an instance pool. Instance pools can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param instance_pool_id: str
           The instance pool for which to get or manage permissions.
diff --git a/docs/workspace/dashboards/lakeview.rst b/docs/workspace/dashboards/lakeview.rst
index fe358063c..0fe55542a 100644
--- a/docs/workspace/dashboards/lakeview.rst
+++ b/docs/workspace/dashboards/lakeview.rst
@@ -7,47 +7,29 @@
     These APIs provide specific management operations for Lakeview dashboards. Generic resource management can
     be done with Workspace API (import, export, get-status, list, delete).
 
-    .. py:method:: create(display_name: str [, parent_path: Optional[str], serialized_dashboard: Optional[str], warehouse_id: Optional[str]]) -> Dashboard
+    .. py:method:: create( [, dashboard: Optional[Dashboard]]) -> Dashboard
 
         Create dashboard.
         
         Create a draft dashboard.
         
-        :param display_name: str
-          The display name of the dashboard.
-        :param parent_path: str (optional)
-          The workspace path of the folder containing the dashboard. Includes leading slash and no trailing
-          slash. This field is excluded in List Dashboards responses.
-        :param serialized_dashboard: str (optional)
-          The contents of the dashboard in serialized string form. This field is excluded in List Dashboards
-          responses. Use the [get dashboard API] to retrieve an example response, which includes the
-          `serialized_dashboard` field. This field provides the structure of the JSON string that represents
-          the dashboard's layout and components.
-          
-          [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get
-        :param warehouse_id: str (optional)
-          The warehouse ID used to run the dashboard.
+        :param dashboard: :class:`Dashboard` (optional)
         
         :returns: :class:`Dashboard`
         
 
-    .. py:method:: create_schedule(dashboard_id: str, cron_schedule: CronSchedule [, display_name: Optional[str], pause_status: Optional[SchedulePauseStatus]]) -> Schedule
+    .. py:method:: create_schedule(dashboard_id: str [, schedule: Optional[Schedule]]) -> Schedule
 
         Create dashboard schedule.
         
         :param dashboard_id: str
           UUID identifying the dashboard to which the schedule belongs.
-        :param cron_schedule: :class:`CronSchedule`
-          The cron expression describing the frequency of the periodic refresh for this schedule.
-        :param display_name: str (optional)
-          The display name for schedule.
-        :param pause_status: :class:`SchedulePauseStatus` (optional)
-          The status indicates whether this schedule is paused or not.
+        :param schedule: :class:`Schedule` (optional)
         
         :returns: :class:`Schedule`
         
 
-    .. py:method:: create_subscription(dashboard_id: str, schedule_id: str, subscriber: Subscriber) -> Subscription
+    .. py:method:: create_subscription(dashboard_id: str, schedule_id: str [, subscription: Optional[Subscription]]) -> Subscription
 
         Create schedule subscription.
         
@@ -55,8 +37,7 @@
           UUID identifying the dashboard to which the subscription belongs.
         :param schedule_id: str
           UUID identifying the schedule to which the subscription belongs.
-        :param subscriber: :class:`Subscriber`
-          Subscriber details for users and destinations to be added as subscribers to the schedule.
+        :param subscription: :class:`Subscription` (optional)
         
         :returns: :class:`Subscription`
         
@@ -250,7 +231,7 @@
         
         
 
-    .. py:method:: update(dashboard_id: str [, display_name: Optional[str], etag: Optional[str], serialized_dashboard: Optional[str], warehouse_id: Optional[str]]) -> Dashboard
+    .. py:method:: update(dashboard_id: str [, dashboard: Optional[Dashboard]]) -> Dashboard
 
         Update dashboard.
         
@@ -258,25 +239,12 @@
         
         :param dashboard_id: str
           UUID identifying the dashboard.
-        :param display_name: str (optional)
-          The display name of the dashboard.
-        :param etag: str (optional)
-          The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard has
-          not been modified since the last read. This field is excluded in List Dashboards responses.
-        :param serialized_dashboard: str (optional)
-          The contents of the dashboard in serialized string form. This field is excluded in List Dashboards
-          responses. Use the [get dashboard API] to retrieve an example response, which includes the
-          `serialized_dashboard` field. This field provides the structure of the JSON string that represents
-          the dashboard's layout and components.
-          
-          [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get
-        :param warehouse_id: str (optional)
-          The warehouse ID used to run the dashboard.
+        :param dashboard: :class:`Dashboard` (optional)
         
         :returns: :class:`Dashboard`
         
 
-    .. py:method:: update_schedule(dashboard_id: str, schedule_id: str, cron_schedule: CronSchedule [, display_name: Optional[str], etag: Optional[str], pause_status: Optional[SchedulePauseStatus]]) -> Schedule
+    .. py:method:: update_schedule(dashboard_id: str, schedule_id: str [, schedule: Optional[Schedule]]) -> Schedule
 
         Update dashboard schedule.
         
@@ -284,15 +252,7 @@
           UUID identifying the dashboard to which the schedule belongs.
         :param schedule_id: str
           UUID identifying the schedule.
-        :param cron_schedule: :class:`CronSchedule`
-          The cron expression describing the frequency of the periodic refresh for this schedule.
-        :param display_name: str (optional)
-          The display name for schedule.
-        :param etag: str (optional)
-          The etag for the schedule. Must be left empty on create, must be provided on updates to ensure that
-          the schedule has not been modified since the last read, and can be optionally provided on delete.
-        :param pause_status: :class:`SchedulePauseStatus` (optional)
-          The status indicates whether this schedule is paused or not.
+        :param schedule: :class:`Schedule` (optional)
         
         :returns: :class:`Schedule`
         
\ No newline at end of file
diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst
index 1f2fd2851..bf8f8e77f 100644
--- a/docs/workspace/iam/permissions.rst
+++ b/docs/workspace/iam/permissions.rst
@@ -153,7 +153,8 @@
 
         Set object permissions.
         
-        Sets permissions on an object. Objects can inherit permissions from their parent objects or root
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their parent objects or root
         object.
         
         :param request_object_type: str
diff --git a/docs/workspace/iam/users.rst b/docs/workspace/iam/users.rst
index 2eae834a2..616ef7b86 100644
--- a/docs/workspace/iam/users.rst
+++ b/docs/workspace/iam/users.rst
@@ -239,7 +239,8 @@
 
         Set password permissions.
         
-        Sets permissions on all passwords. Passwords can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional)
         
diff --git a/docs/workspace/index.rst b/docs/workspace/index.rst
index 1b6c5708c..d9ca84197 100644
--- a/docs/workspace/index.rst
+++ b/docs/workspace/index.rst
@@ -17,6 +17,7 @@ These APIs are available from WorkspaceClient
    marketplace/index
    ml/index
    pipelines/index
+   provisioning/index
    serving/index
    settings/index
    sharing/index
diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst
index b097c94c8..0c6d51439 100644
--- a/docs/workspace/jobs/jobs.rst
+++ b/docs/workspace/jobs/jobs.rst
@@ -425,8 +425,8 @@
         :param include_resolved_values: bool (optional)
           Whether to include resolved parameter values in the response.
         :param page_token: str (optional)
-          To list the next page or the previous page of job tasks, set this field to the value of the
-          `next_page_token` or `prev_page_token` returned in the GetJob response.
+          To list the next page of job tasks, set this field to the value of the `next_page_token` returned in
+          the GetJob response.
         
         :returns: :class:`Run`
         
@@ -661,8 +661,9 @@
           in conjunction with notebook_params. The JSON representation of this field (for example
           `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
           
-          Use [Task parameter variables](/jobs.html"#parameter-variables") to set parameters containing
-          information about job runs.
+          Use [Task parameter variables] to set parameters containing information about job runs.
+          
+          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
         :param job_parameters: Dict[str,str] (optional)
           Job-level parameters used in the run. for example `"param": "overriding_val"`
         :param latest_repair_id: int (optional)
@@ -854,8 +855,9 @@
           in conjunction with notebook_params. The JSON representation of this field (for example
           `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
           
-          Use [Task parameter variables](/jobs.html"#parameter-variables") to set parameters containing
-          information about job runs.
+          Use [Task parameter variables] to set parameters containing information about job runs.
+          
+          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
         :param job_parameters: Dict[str,str] (optional)
           Job-level parameters used in the run. for example `"param": "overriding_val"`
         :param notebook_params: Dict[str,str] (optional)
@@ -926,7 +928,8 @@
 
         Set job permissions.
         
-        Sets permissions on a job. Jobs can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param job_id: str
           The job for which to get or manage permissions.
diff --git a/docs/workspace/ml/experiments.rst b/docs/workspace/ml/experiments.rst
index c09cfe353..44ceeef8c 100644
--- a/docs/workspace/ml/experiments.rst
+++ b/docs/workspace/ml/experiments.rst
@@ -578,7 +578,8 @@
 
         Set experiment permissions.
         
-        Sets permissions on an experiment. Experiments can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param experiment_id: str
           The experiment for which to get or manage permissions.
diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst
index 8ac52916f..d08a85415 100644
--- a/docs/workspace/ml/model_registry.rst
+++ b/docs/workspace/ml/model_registry.rst
@@ -658,8 +658,8 @@
 
         Set registered model permissions.
         
-        Sets permissions on a registered model. Registered models can inherit permissions from their root
-        object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param registered_model_id: str
           The registered model for which to get or manage permissions.
diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst
index 9801a200e..39b5c9d77 100644
--- a/docs/workspace/pipelines/pipelines.rst
+++ b/docs/workspace/pipelines/pipelines.rst
@@ -324,7 +324,8 @@
 
         Set pipeline permissions.
         
-        Sets permissions on a pipeline. Pipelines can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param pipeline_id: str
           The pipeline for which to get or manage permissions.
diff --git a/docs/workspace/provisioning/credentials.rst b/docs/workspace/provisioning/credentials.rst
new file mode 100644
index 000000000..8f38d13c4
--- /dev/null
+++ b/docs/workspace/provisioning/credentials.rst
@@ -0,0 +1,123 @@
+``w.credentials``: Credential configurations
+============================================
+.. currentmodule:: databricks.sdk.service.provisioning
+
+.. py:class:: CredentialsAPI
+
+    These APIs manage credential configurations for this workspace. Databricks needs access to a cross-account
+    service IAM role in your AWS account so that Databricks can deploy clusters in the appropriate VPC for the
+    new workspace. A credential configuration encapsulates this role information, and its ID is used when
+    creating a new workspace.
+
+    .. py:method:: create(credentials_name: str, aws_credentials: CreateCredentialAwsCredentials) -> Credential
+
+
+        Usage:
+
+        .. code-block::
+
+            import os
+            import time
+            
+            from databricks.sdk import AccountClient
+            from databricks.sdk.service import provisioning
+            
+            a = AccountClient()
+            
+            role = a.credentials.create(
+                credentials_name=f'sdk-{time.time_ns()}',
+                aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
+                    role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])))
+            
+            # cleanup
+            a.credentials.delete(credentials_id=role.credentials_id)
+
+        Create credential configuration.
+        
+        Creates a Databricks credential configuration that represents cloud cross-account credentials for a
+        specified account. Databricks uses this to set up network infrastructure properly to host Databricks
+        clusters. For your AWS IAM role, you need to trust the External ID (the Databricks Account API account
+        ID) in the returned credential object, and configure the required access policy.
+        
+        Save the response's `credentials_id` field, which is the ID for your new credential configuration
+        object.
+        
+        For information about how to create a new workspace with this API, see [Create a new workspace using
+        the Account API]
+        
+        [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
+        
+        :param credentials_name: str
+          The human-readable name of the credential configuration object.
+        :param aws_credentials: :class:`CreateCredentialAwsCredentials`
+        
+        :returns: :class:`Credential`
+        
+
+    .. py:method:: delete(credentials_id: str)
+
+        Delete credential configuration.
+        
+        Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot
+        delete a credential that is associated with any workspace.
+        
+        :param credentials_id: str
+          Databricks Account API credential configuration ID
+        
+        
+        
+
+    .. py:method:: get(credentials_id: str) -> Credential
+
+
+        Usage:
+
+        .. code-block::
+
+            import os
+            import time
+            
+            from databricks.sdk import AccountClient
+            from databricks.sdk.service import provisioning
+            
+            a = AccountClient()
+            
+            role = a.credentials.create(
+                credentials_name=f'sdk-{time.time_ns()}',
+                aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
+                    role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])))
+            
+            by_id = a.credentials.get(credentials_id=role.credentials_id)
+            
+            # cleanup
+            a.credentials.delete(credentials_id=role.credentials_id)
+
+        Get credential configuration.
+        
+        Gets a Databricks credential configuration object for an account, both specified by ID.
+        
+        :param credentials_id: str
+          Databricks Account API credential configuration ID
+        
+        :returns: :class:`Credential`
+        
+
+    .. py:method:: list() -> Iterator[Credential]
+
+
+        Usage:
+
+        .. code-block::
+
+            from databricks.sdk import AccountClient
+            
+            a = AccountClient()
+            
+            configs = a.credentials.list()
+
+        Get all credential configurations.
+        
+        Gets all Databricks credential configurations associated with an account specified by ID.
+        
+        :returns: Iterator over :class:`Credential`
+        
\ No newline at end of file
diff --git a/docs/workspace/provisioning/index.rst b/docs/workspace/provisioning/index.rst
new file mode 100644
index 000000000..efe541424
--- /dev/null
+++ b/docs/workspace/provisioning/index.rst
@@ -0,0 +1,10 @@
+
+Provisioning
+============
+
+Resource management for secure Databricks Workspace deployment, cross-account IAM roles, storage, encryption, networking and private access.
+
+.. toctree::
+   :maxdepth: 1
+
+   credentials
\ No newline at end of file
diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst
index cbcbca964..430a13182 100644
--- a/docs/workspace/serving/serving_endpoints.rst
+++ b/docs/workspace/serving/serving_endpoints.rst
@@ -266,8 +266,8 @@
 
         Set serving endpoint permissions.
         
-        Sets permissions on a serving endpoint. Serving endpoints can inherit permissions from their root
-        object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param serving_endpoint_id: str
           The serving endpoint for which to get or manage permissions.
diff --git a/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst
new file mode 100644
index 000000000..1480fc978
--- /dev/null
+++ b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst
@@ -0,0 +1,42 @@
+``w.settings.aibi_dashboard_embedding_access_policy``: AI/BI Dashboard Embedding Access Policy
+==============================================================================================
+.. currentmodule:: databricks.sdk.service.settings
+
+.. py:class:: AibiDashboardEmbeddingAccessPolicyAPI
+
+    Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the
+    workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS).
+
+    .. py:method:: get( [, etag: Optional[str]]) -> AibiDashboardEmbeddingAccessPolicySetting
+
+        Retrieve the AI/BI dashboard embedding access policy.
+        
+        Retrieves the AI/BI dashboard embedding access policy. The default setting is ALLOW_APPROVED_DOMAINS,
+        permitting AI/BI dashboards to be embedded on approved domains.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
+        
+
+    .. py:method:: update(allow_missing: bool, setting: AibiDashboardEmbeddingAccessPolicySetting, field_mask: str) -> AibiDashboardEmbeddingAccessPolicySetting
+
+        Update the AI/BI dashboard embedding access policy.
+        
+        Updates the AI/BI dashboard embedding access policy at the workspace level.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`AibiDashboardEmbeddingAccessPolicySetting`
+        :param field_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        
+        :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst
new file mode 100644
index 000000000..09b12056e
--- /dev/null
+++ b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst
@@ -0,0 +1,42 @@
+``w.settings.aibi_dashboard_embedding_approved_domains``: AI/BI Dashboard Embedding Approved Domains
+====================================================================================================
+.. currentmodule:: databricks.sdk.service.settings
+
+.. py:class:: AibiDashboardEmbeddingApprovedDomainsAPI
+
+    Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains list
+    can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS.
+
+    .. py:method:: get( [, etag: Optional[str]]) -> AibiDashboardEmbeddingApprovedDomainsSetting
+
+        Retrieve the list of domains approved to host embedded AI/BI dashboards.
+        
+        Retrieves the list of domains approved to host embedded AI/BI dashboards.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
+        
+
+    .. py:method:: update(allow_missing: bool, setting: AibiDashboardEmbeddingApprovedDomainsSetting, field_mask: str) -> AibiDashboardEmbeddingApprovedDomainsSetting
+
+        Update the list of domains approved to host embedded AI/BI dashboards.
+        
+        Updates the list of domains approved to host embedded AI/BI dashboards. This update will fail if the
+        current workspace access policy is not ALLOW_APPROVED_DOMAINS.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
+        :param field_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        
+        :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/index.rst b/docs/workspace/settings/index.rst
index 22655853b..c9e4f335d 100644
--- a/docs/workspace/settings/index.rst
+++ b/docs/workspace/settings/index.rst
@@ -11,6 +11,8 @@ Manage security settings for Accounts and Workspaces
    ip_access_lists
    notification_destinations
    settings
+   aibi_dashboard_embedding_access_policy
+   aibi_dashboard_embedding_approved_domains
    automatic_cluster_update
    compliance_security_profile
    default_namespace
diff --git a/docs/workspace/settings/settings.rst b/docs/workspace/settings/settings.rst
index 588031926..aa806280e 100644
--- a/docs/workspace/settings/settings.rst
+++ b/docs/workspace/settings/settings.rst
@@ -6,6 +6,18 @@
 
     Workspace Settings API allows users to manage settings at the workspace level.
 
+    .. py:property:: aibi_dashboard_embedding_access_policy
+        :type: AibiDashboardEmbeddingAccessPolicyAPI
+
+        Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the
+        workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS).
+
+    .. py:property:: aibi_dashboard_embedding_approved_domains
+        :type: AibiDashboardEmbeddingApprovedDomainsAPI
+
+        Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains list
+        can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS.
+
     .. py:property:: automatic_cluster_update
         :type: AutomaticClusterUpdateAPI
 
diff --git a/docs/workspace/settings/token_management.rst b/docs/workspace/settings/token_management.rst
index d030a432f..9c938ce3e 100644
--- a/docs/workspace/settings/token_management.rst
+++ b/docs/workspace/settings/token_management.rst
@@ -143,7 +143,8 @@
 
         Set token permissions.
         
-        Sets permissions on all tokens. Tokens can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional)
         
diff --git a/docs/workspace/sharing/index.rst b/docs/workspace/sharing/index.rst
index e012eb548..09452b490 100644
--- a/docs/workspace/sharing/index.rst
+++ b/docs/workspace/sharing/index.rst
@@ -7,7 +7,6 @@ Configure data sharing with Unity Catalog for providers, recipients, and shares
 .. toctree::
    :maxdepth: 1
 
-   clean_rooms
    providers
    recipient_activation
    recipients
diff --git a/docs/workspace/sql/statement_execution.rst b/docs/workspace/sql/statement_execution.rst
index 716fa4fdc..44f64b512 100644
--- a/docs/workspace/sql/statement_execution.rst
+++ b/docs/workspace/sql/statement_execution.rst
@@ -80,11 +80,10 @@
     outstanding statement might have already completed execution when the cancel request arrives. Polling for
     status until a terminal state is reached is a reliable way to determine the final state. - Wait timeouts
     are approximate, occur server-side, and cannot account for things such as caller delays and network
-    latency from caller to service. - The system will auto-close a statement after one hour if the client
-    stops polling and thus you must poll at least once an hour. - The results are only available for one hour
-    after success; polling does not extend this. - The SQL Execution API must be used for the entire lifecycle
-    of the statement. For example, you cannot use the Jobs API to execute the command, and then the SQL
-    Execution API to cancel it.
+    latency from caller to service. - To guarantee that the statement is kept alive, you must poll at least
+    once every 15 minutes. - The results are only available for one hour after success; polling does not
+    extend this. - The SQL Execution API must be used for the entire lifecycle of the statement. For example,
+    you cannot use the Jobs API to execute the command, and then the SQL Execution API to cancel it.
     
     [Apache Arrow Columnar]: https://arrow.apache.org/overview/
     [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html
diff --git a/docs/workspace/sql/warehouses.rst b/docs/workspace/sql/warehouses.rst
index 58b8a3fc0..fd55d5b0c 100644
--- a/docs/workspace/sql/warehouses.rst
+++ b/docs/workspace/sql/warehouses.rst
@@ -315,7 +315,8 @@
 
         Set SQL warehouse permissions.
         
-        Sets permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param warehouse_id: str
           The SQL warehouse for which to get or manage permissions.
diff --git a/docs/workspace/workspace/repos.rst b/docs/workspace/workspace/repos.rst
index 01b1c875f..3e826a064 100644
--- a/docs/workspace/workspace/repos.rst
+++ b/docs/workspace/workspace/repos.rst
@@ -157,7 +157,8 @@
 
         Set repo permissions.
         
-        Sets permissions on a repo. Repos can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param repo_id: str
           The repo for which to get or manage permissions.
diff --git a/docs/workspace/workspace/workspace.rst b/docs/workspace/workspace/workspace.rst
index 4aee0a2b6..595872deb 100644
--- a/docs/workspace/workspace/workspace.rst
+++ b/docs/workspace/workspace/workspace.rst
@@ -272,8 +272,9 @@
 
         Set workspace object permissions.
         
-        Sets permissions on a workspace object. Workspace objects can inherit permissions from their parent
-        objects or root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their parent objects or root
+        object.
         
         :param workspace_object_type: str
           The workspace object type for which to get or manage permissions.

From f7f9a685c0f11d3bac1ebfe9ef829b3d061f8501 Mon Sep 17 00:00:00 2001
From: Renaud Hartert 
Date: Tue, 5 Nov 2024 18:23:17 +0100
Subject: [PATCH 067/136] [Internal] Update PR template (#814)

## What changes are proposed in this pull request?

This PR updates the PR template to remove outdated check boxes and
emphasize testing.

## How is this tested?

N/A
---
 .github/PULL_REQUEST_TEMPLATE.md | 34 +++++++++++++++++++++++---------
 1 file changed, 25 insertions(+), 9 deletions(-)

diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index e2d7ab0db..91e519ede 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -1,12 +1,28 @@
-## Changes
-
+## What changes are proposed in this pull request?
 
-## Tests
-
+Provide the readers and reviewers with the information they need to understand
+this PR in a comprehensive manner. 
 
-- [ ] `make test` run locally
-- [ ] `make fmt` applied
-- [ ] relevant integration tests applied
+Specifically, try to answer the two following questions:
 
+- **WHAT** changes are being made in the PR? This should be a summary of the 
+  major changes to allow the reader to quickly understand the PR without having
+  to look at the code. 
+- **WHY** are these changes needed? This should provide the context that the 
+  reader might be missing. For example, were there any decisions behind the 
+  change that are not reflected in the code itself? 
+
+The “why part” is the most important of the two as it usually cannot be 
+inferred from the code itself. A well-written PR description will help future
+developers (including your future self) to know how to interact and update your
+code.
+
+## How is this tested?
+
+Describe any tests you have done; especially if test tests are not part of
+the unit tests (e.g. local tests).
+
+**ALWAYS ANSWER THIS QUESTION:** Answer with "N/A" if tests are not applicable
+to your PR (e.g. if the PR only modifies comments). Do not be afraid of 
+answering "Not tested" if the PR has not been tested. Being clear about what 
+has been done and not done provides important context to the reviewers. 
\ No newline at end of file

From 2143e35cab88e22c2d4e1d79be9dd09422b30983 Mon Sep 17 00:00:00 2001
From: Renaud Hartert 
Date: Fri, 8 Nov 2024 13:10:46 +0100
Subject: [PATCH 068/136] [Feature] Read streams by 1MB chunks by default.
 (#817)

## What changes are proposed in this pull request?

This PR changes the `_BaseClient` to read streams by chunks of 1MB by
default. 1MB was chosen as a good compromise between speed and memory
usage (see PR #319).

Note that this is not a new feature per se as it was possible to
configure chunk size on the returned `_StreamResponse` before calling
its read method. However, the functionality was not easy to discover and
led several users to experience memory issues. The new default behavior
is more defensive.

## How is this tested?

Added a few test cases to verify that streams are chunked as expected.

---------

Signed-off-by: Renaud Hartert 
---
 databricks/sdk/_base_client.py | 14 +++++++++++--
 tests/test_base_client.py      | 38 ++++++++++++++++++++++++++++++++++
 2 files changed, 50 insertions(+), 2 deletions(-)

diff --git a/databricks/sdk/_base_client.py b/databricks/sdk/_base_client.py
index 95ce39cbe..6424fc1bb 100644
--- a/databricks/sdk/_base_client.py
+++ b/databricks/sdk/_base_client.py
@@ -50,7 +50,8 @@ def __init__(self,
                  http_timeout_seconds: float = None,
                  extra_error_customizers: List[_ErrorCustomizer] = None,
                  debug_headers: bool = False,
-                 clock: Clock = None):
+                 clock: Clock = None,
+                 streaming_buffer_size: int = 1024 * 1024): # 1MB
         """
         :param debug_truncate_bytes:
         :param retry_timeout_seconds:
@@ -68,6 +69,7 @@ def __init__(self,
         :param extra_error_customizers:
         :param debug_headers: Whether to include debug headers in the request log.
         :param clock: Clock object to use for time-related operations.
+        :param streaming_buffer_size: The size of the buffer to use for streaming responses.
         """
 
         self._debug_truncate_bytes = debug_truncate_bytes or 96
@@ -78,6 +80,7 @@ def __init__(self,
         self._clock = clock or RealClock()
         self._session = requests.Session()
         self._session.auth = self._authenticate
+        self._streaming_buffer_size = streaming_buffer_size
 
         # We don't use `max_retries` from HTTPAdapter to align with a more production-ready
         # retry strategy established in the Databricks SDK for Go. See _is_retryable and
@@ -158,7 +161,9 @@ def do(self,
         for header in response_headers if response_headers else []:
             resp[header] = response.headers.get(Casing.to_header_case(header))
         if raw:
-            resp["contents"] = _StreamingResponse(response)
+            streaming_response = _StreamingResponse(response)
+            streaming_response.set_chunk_size(self._streaming_buffer_size)
+            resp["contents"] = streaming_response
             return resp
         if not len(response.content):
             return resp
@@ -283,6 +288,11 @@ def isatty(self) -> bool:
         return False
 
     def read(self, n: int = -1) -> bytes:
+        """
+        Read up to n bytes from the response stream. If n is negative, read 
+        until the end of the stream. 
+        """
+
         self._open()
         read_everything = n < 0
         remaining_bytes = n
diff --git a/tests/test_base_client.py b/tests/test_base_client.py
index e9e7324a9..b55f4e7f8 100644
--- a/tests/test_base_client.py
+++ b/tests/test_base_client.py
@@ -1,5 +1,7 @@
+import random
 from http.server import BaseHTTPRequestHandler
 from typing import Iterator, List
+from unittest.mock import Mock
 
 import pytest
 import requests
@@ -276,3 +278,39 @@ def inner(h: BaseHTTPRequestHandler):
         assert 'foo' in res
 
     assert len(requests) == 2
+
+
+@pytest.mark.parametrize('chunk_size,expected_chunks,data_size',
+                         [(5, 20, 100), # 100 / 5 bytes per chunk = 20 chunks
+                          (10, 10, 100), # 100 / 10 bytes per chunk = 10 chunks
+                          (200, 1, 100), # 100 / 200 bytes per chunk = 1 chunk
+                          ])
+def test_streaming_response_chunk_size(chunk_size, expected_chunks, data_size):
+    rng = random.Random(42)
+    test_data = bytes(rng.getrandbits(8) for _ in range(data_size))
+
+    content_chunks = []
+    mock_response = Mock(spec=requests.Response)
+
+    def mock_iter_content(chunk_size):
+        # Simulate how requests would chunk the data.
+        for i in range(0, len(test_data), chunk_size):
+            chunk = test_data[i:i + chunk_size]
+            content_chunks.append(chunk) # track chunks for verification
+            yield chunk
+
+    mock_response.iter_content = mock_iter_content
+    stream = _StreamingResponse(mock_response)
+    stream.set_chunk_size(chunk_size)
+
+    # Read all data one byte at a time.
+    received_data = b""
+    while True:
+        chunk = stream.read(1)
+        if not chunk:
+            break
+        received_data += chunk
+
+    assert received_data == test_data # all data was received correctly
+    assert len(content_chunks) == expected_chunks # correct number of chunks
+    assert all(len(c) <= chunk_size for c in content_chunks) # chunks don't exceed size

From 271502bd55e916f245c6e1563f1528938db0774e Mon Sep 17 00:00:00 2001
From: Giorgi Kikolashvili <47174341+gkiko10@users.noreply.github.com>
Date: Sat, 9 Nov 2024 12:07:12 +0100
Subject: [PATCH 069/136] [Internal] Update Jobs GetRun API to support
 paginated responses for jobs and ForEach tasks (#819)

## What changes are proposed in this pull request?

Introduces extension for jobs get_run call that paginates tasks and
iterations arrays in the response and returns aggregated response to the
caller. This change is necessary to prepare for jobs API 2.2 release
that serves paginated response. Pagination is over once the
next_page_token is absent from the response. The pagination logic is not
exposed to the customer.

## How is this tested?

Unit tests and manual test
---
 databricks/sdk/__init__.py    |   5 +-
 databricks/sdk/mixins/jobs.py |  49 ++++++++++++++
 tests/test_jobs_mixin.py      | 123 ++++++++++++++++++++++++++++++++++
 3 files changed, 175 insertions(+), 2 deletions(-)
 create mode 100644 databricks/sdk/mixins/jobs.py
 create mode 100644 tests/test_jobs_mixin.py

diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py
index 746f8d7e9..4f4689af2 100755
--- a/databricks/sdk/__init__.py
+++ b/databricks/sdk/__init__.py
@@ -6,6 +6,7 @@
 from databricks.sdk.credentials_provider import CredentialsStrategy
 from databricks.sdk.mixins.compute import ClustersExt
 from databricks.sdk.mixins.files import DbfsExt
+from databricks.sdk.mixins.jobs import JobsExt
 from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt
 from databricks.sdk.mixins.workspace import WorkspaceExt
 from databricks.sdk.service.apps import AppsAPI
@@ -204,7 +205,7 @@ def __init__(self,
         self._instance_pools = InstancePoolsAPI(self._api_client)
         self._instance_profiles = InstanceProfilesAPI(self._api_client)
         self._ip_access_lists = IpAccessListsAPI(self._api_client)
-        self._jobs = JobsAPI(self._api_client)
+        self._jobs = JobsExt(self._api_client)
         self._lakeview = LakeviewAPI(self._api_client)
         self._libraries = LibrariesAPI(self._api_client)
         self._metastores = MetastoresAPI(self._api_client)
@@ -450,7 +451,7 @@ def ip_access_lists(self) -> IpAccessListsAPI:
         return self._ip_access_lists
 
     @property
-    def jobs(self) -> JobsAPI:
+    def jobs(self) -> JobsExt:
         """The Jobs API allows you to create, edit, and delete jobs."""
         return self._jobs
 
diff --git a/databricks/sdk/mixins/jobs.py b/databricks/sdk/mixins/jobs.py
new file mode 100644
index 000000000..01fb013be
--- /dev/null
+++ b/databricks/sdk/mixins/jobs.py
@@ -0,0 +1,49 @@
+from typing import Optional
+
+from databricks.sdk.service import jobs
+
+
+class JobsExt(jobs.JobsAPI):
+
+    def get_run(self,
+                run_id: int,
+                *,
+                include_history: Optional[bool] = None,
+                include_resolved_values: Optional[bool] = None,
+                page_token: Optional[str] = None) -> jobs.Run:
+        """
+        This method fetches the details of a run identified by `run_id`. If the run has multiple pages of tasks or iterations,
+        it will paginate through all pages and aggregate the results.
+        :param run_id: int
+          The canonical identifier of the run for which to retrieve the metadata. This field is required.
+        :param include_history: bool (optional)
+          Whether to include the repair history in the response.
+        :param include_resolved_values: bool (optional)
+          Whether to include resolved parameter values in the response.
+        :param page_token: str (optional)
+          To list the next page or the previous page of job tasks, set this field to the value of the
+          `next_page_token` or `prev_page_token` returned in the GetJob response.
+        :returns: :class:`Run`
+        """
+        run = super().get_run(run_id,
+                              include_history=include_history,
+                              include_resolved_values=include_resolved_values,
+                              page_token=page_token)
+
+        # When querying a Job run, a page token is returned when there are more than 100 tasks. No iterations are defined for a Job run. Therefore, the next page in the response only includes the next page of tasks.
+        # When querying a ForEach task run, a page token is returned when there are more than 100 iterations. Only a single task is returned, corresponding to the ForEach task itself. Therefore, the client only reads the iterations from the next page and not the tasks.
+        is_paginating_iterations = run.iterations is not None and len(run.iterations) > 0
+
+        while run.next_page_token is not None:
+            next_run = super().get_run(run_id,
+                                       include_history=include_history,
+                                       include_resolved_values=include_resolved_values,
+                                       page_token=run.next_page_token)
+            if is_paginating_iterations:
+                run.iterations.extend(next_run.iterations)
+            else:
+                run.tasks.extend(next_run.tasks)
+            run.next_page_token = next_run.next_page_token
+
+        run.prev_page_token = None
+        return run
\ No newline at end of file
diff --git a/tests/test_jobs_mixin.py b/tests/test_jobs_mixin.py
new file mode 100644
index 000000000..9b5f27138
--- /dev/null
+++ b/tests/test_jobs_mixin.py
@@ -0,0 +1,123 @@
+import json
+import re
+from typing import Pattern
+
+from databricks.sdk import WorkspaceClient
+
+
+def make_path_pattern(run_id: int, page_token: str) -> Pattern[str]:
+    return re.compile(
+        rf'{re.escape("http://localhost/api/")}2.\d{re.escape(f"/jobs/runs/get?page_token={page_token}&run_id={run_id}")}'
+    )
+
+
+def test_get_run_with_no_pagination(config, requests_mock):
+    run1 = {"tasks": [{"run_id": 0}, {"run_id": 1}], }
+    requests_mock.get(make_path_pattern(1337, "initialToken"), text=json.dumps(run1))
+    w = WorkspaceClient(config=config)
+
+    run = w.jobs.get_run(1337, page_token="initialToken")
+
+    assert run.as_dict() == {"tasks": [{'run_id': 0}, {'run_id': 1}], }
+
+
+def test_get_run_pagination_with_tasks(config, requests_mock):
+    run1 = {
+        "tasks": [{
+            "run_id": 0
+        }, {
+            "run_id": 1
+        }],
+        "next_page_token": "tokenToSecondPage",
+        "prev_page_token": "tokenToPreviousPage"
+    }
+    run2 = {
+        "tasks": [{
+            "run_id": 2
+        }, {
+            "run_id": 3
+        }],
+        "next_page_token": "tokenToThirdPage",
+        "prev_page_token": "initialToken"
+    }
+    run3 = {"tasks": [{"run_id": 4}], "next_page_token": None, "prev_page_token": "tokenToSecondPage"}
+    requests_mock.get(make_path_pattern(1337, "initialToken"), text=json.dumps(run1))
+    requests_mock.get(make_path_pattern(1337, "tokenToSecondPage"), text=json.dumps(run2))
+    requests_mock.get(make_path_pattern(1337, "tokenToThirdPage"), text=json.dumps(run3))
+    w = WorkspaceClient(config=config)
+
+    run = w.jobs.get_run(1337, page_token="initialToken")
+
+    assert run.as_dict() == {
+        "tasks": [{
+            'run_id': 0
+        }, {
+            'run_id': 1
+        }, {
+            'run_id': 2
+        }, {
+            'run_id': 3
+        }, {
+            'run_id': 4
+        }],
+    }
+
+
+def test_get_run_pagination_with_iterations(config, requests_mock):
+    run1 = {
+        "tasks": [{
+            "run_id": 1337
+        }],
+        "iterations": [{
+            "run_id": 0
+        }, {
+            "run_id": 1
+        }],
+        "next_page_token": "tokenToSecondPage",
+        "prev_page_token": "tokenToPreviousPage"
+    }
+    run2 = {
+        "tasks": [{
+            "run_id": 1337
+        }],
+        "iterations": [{
+            "run_id": 2
+        }, {
+            "run_id": 3
+        }],
+        "next_page_token": "tokenToThirdPage",
+        "prev_page_token": "initialToken"
+    }
+    run3 = {
+        "tasks": [{
+            "run_id": 1337
+        }],
+        "iterations": [{
+            "run_id": 4
+        }],
+        "next_page_token": None,
+        "prev_page_token": "tokenToSecondPage"
+    }
+    requests_mock.get(make_path_pattern(1337, "initialToken"), text=json.dumps(run1))
+    requests_mock.get(make_path_pattern(1337, "tokenToSecondPage"), text=json.dumps(run2))
+    requests_mock.get(make_path_pattern(1337, "tokenToThirdPage"), text=json.dumps(run3))
+    w = WorkspaceClient(config=config)
+
+    run = w.jobs.get_run(1337, page_token="initialToken")
+
+    assert run.as_dict() == {
+        "tasks": [{
+            'run_id': 1337
+        }],
+        "iterations": [{
+            'run_id': 0
+        }, {
+            'run_id': 1
+        }, {
+            'run_id': 2
+        }, {
+            'run_id': 3
+        }, {
+            'run_id': 4
+        }],
+    }
\ No newline at end of file

From ee6e70a1e3a38b465405d41f39f4447ad7dd3090 Mon Sep 17 00:00:00 2001
From: Renaud Hartert 
Date: Thu, 14 Nov 2024 20:53:33 +0100
Subject: [PATCH 070/136] [Internal] Reformat SDK with YAPF 0.43. (#822)

## What changes are proposed in this pull request?

This PR is a no-op that reformats the SDK with the new version of `yapf`
(0.43.0) which changed some formatting rules. This fixes a current issue
with the `fmt` CI test which is failing on the `main`'s head.

## How is this tested?

N/A
---
 databricks/sdk/config.py               | 19 ++++++++++---------
 databricks/sdk/credentials_provider.py | 11 ++++++-----
 tests/integration/test_auth.py         | 19 ++++++++++---------
 tests/integration/test_jobs.py         | 25 +++++++++++++------------
 tests/test_base_client.py              | 12 +++++++-----
 tests/test_core.py                     | 22 ++++++++++++++--------
 tests/test_model_serving_auth.py       | 17 ++++++++++-------
 7 files changed, 70 insertions(+), 55 deletions(-)

diff --git a/databricks/sdk/config.py b/databricks/sdk/config.py
index b4efdf603..387fa65c5 100644
--- a/databricks/sdk/config.py
+++ b/databricks/sdk/config.py
@@ -92,15 +92,16 @@ class Config:
     max_connections_per_pool: int = ConfigAttribute()
     databricks_environment: Optional[DatabricksEnvironment] = None
 
-    def __init__(self,
-                 *,
-                 # Deprecated. Use credentials_strategy instead.
-                 credentials_provider: Optional[CredentialsStrategy] = None,
-                 credentials_strategy: Optional[CredentialsStrategy] = None,
-                 product=None,
-                 product_version=None,
-                 clock: Optional[Clock] = None,
-                 **kwargs):
+    def __init__(
+            self,
+            *,
+            # Deprecated. Use credentials_strategy instead.
+            credentials_provider: Optional[CredentialsStrategy] = None,
+            credentials_strategy: Optional[CredentialsStrategy] = None,
+            product=None,
+            product_version=None,
+            clock: Optional[Clock] = None,
+            **kwargs):
         self._header_factory = None
         self._inner = {}
         self._user_agent_other_info = []
diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py
index a79151b5a..e91e37af4 100644
--- a/databricks/sdk/credentials_provider.py
+++ b/databricks/sdk/credentials_provider.py
@@ -304,11 +304,12 @@ def github_oidc_azure(cfg: 'Config') -> Optional[CredentialsProvider]:
         # detect Azure AD Tenant ID if it's not specified directly
         token_endpoint = cfg.oidc_endpoints.token_endpoint
         cfg.azure_tenant_id = token_endpoint.replace(aad_endpoint, '').split('/')[0]
-    inner = ClientCredentials(client_id=cfg.azure_client_id,
-                              client_secret="", # we have no (rotatable) secrets in OIDC flow
-                              token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
-                              endpoint_params=params,
-                              use_params=True)
+    inner = ClientCredentials(
+        client_id=cfg.azure_client_id,
+        client_secret="", # we have no (rotatable) secrets in OIDC flow
+        token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
+        endpoint_params=params,
+        use_params=True)
 
     def refreshed_headers() -> Dict[str, str]:
         token = inner.token()
diff --git a/tests/integration/test_auth.py b/tests/integration/test_auth.py
index 0bf7f951d..3ee271778 100644
--- a/tests/integration/test_auth.py
+++ b/tests/integration/test_auth.py
@@ -133,15 +133,16 @@ def _test_runtime_auth_from_jobs_inner(w, env_or_skip, random, dbr_versions, lib
 
     tasks = []
     for v in dbr_versions:
-        t = Task(task_key=f'test_{v.key.replace(".", "_")}',
-                 notebook_task=NotebookTask(notebook_path=notebook_path),
-                 new_cluster=ClusterSpec(
-                     spark_version=v.key,
-                     num_workers=1,
-                     instance_pool_id=instance_pool_id,
-                     # GCP uses "custom" data security mode by default, which does not support UC.
-                     data_security_mode=DataSecurityMode.SINGLE_USER),
-                 libraries=[library])
+        t = Task(
+            task_key=f'test_{v.key.replace(".", "_")}',
+            notebook_task=NotebookTask(notebook_path=notebook_path),
+            new_cluster=ClusterSpec(
+                spark_version=v.key,
+                num_workers=1,
+                instance_pool_id=instance_pool_id,
+                # GCP uses "custom" data security mode by default, which does not support UC.
+                data_security_mode=DataSecurityMode.SINGLE_USER),
+            libraries=[library])
         tasks.append(t)
 
     waiter = w.jobs.submit(run_name=f'Runtime Native Auth {random(10)}', tasks=tasks)
diff --git a/tests/integration/test_jobs.py b/tests/integration/test_jobs.py
index 8fd5f8820..768752a75 100644
--- a/tests/integration/test_jobs.py
+++ b/tests/integration/test_jobs.py
@@ -17,18 +17,19 @@ def test_submitting_jobs(w, random, env_or_skip):
     with w.dbfs.open(py_on_dbfs, write=True, overwrite=True) as f:
         f.write(b'import time; time.sleep(10); print("Hello, World!")')
 
-    waiter = w.jobs.submit(run_name=f'py-sdk-{random(8)}',
-                           tasks=[
-                               jobs.SubmitTask(
-                                   task_key='pi',
-                                   new_cluster=compute.ClusterSpec(
-                                       spark_version=w.clusters.select_spark_version(long_term_support=True),
-                                       # node_type_id=w.clusters.select_node_type(local_disk=True),
-                                       instance_pool_id=env_or_skip('TEST_INSTANCE_POOL_ID'),
-                                       num_workers=1),
-                                   spark_python_task=jobs.SparkPythonTask(python_file=f'dbfs:{py_on_dbfs}'),
-                               )
-                           ])
+    waiter = w.jobs.submit(
+        run_name=f'py-sdk-{random(8)}',
+        tasks=[
+            jobs.SubmitTask(
+                task_key='pi',
+                new_cluster=compute.ClusterSpec(
+                    spark_version=w.clusters.select_spark_version(long_term_support=True),
+                    # node_type_id=w.clusters.select_node_type(local_disk=True),
+                    instance_pool_id=env_or_skip('TEST_INSTANCE_POOL_ID'),
+                    num_workers=1),
+                spark_python_task=jobs.SparkPythonTask(python_file=f'dbfs:{py_on_dbfs}'),
+            )
+        ])
 
     logging.info(f'starting to poll: {waiter.run_id}')
 
diff --git a/tests/test_base_client.py b/tests/test_base_client.py
index b55f4e7f8..1e133b8fc 100644
--- a/tests/test_base_client.py
+++ b/tests/test_base_client.py
@@ -280,11 +280,13 @@ def inner(h: BaseHTTPRequestHandler):
     assert len(requests) == 2
 
 
-@pytest.mark.parametrize('chunk_size,expected_chunks,data_size',
-                         [(5, 20, 100), # 100 / 5 bytes per chunk = 20 chunks
-                          (10, 10, 100), # 100 / 10 bytes per chunk = 10 chunks
-                          (200, 1, 100), # 100 / 200 bytes per chunk = 1 chunk
-                          ])
+@pytest.mark.parametrize(
+    'chunk_size,expected_chunks,data_size',
+    [
+        (5, 20, 100), # 100 / 5 bytes per chunk = 20 chunks
+        (10, 10, 100), # 100 / 10 bytes per chunk = 10 chunks
+        (200, 1, 100), # 100 / 200 bytes per chunk = 1 chunk
+    ])
 def test_streaming_response_chunk_size(chunk_size, expected_chunks, data_size):
     rng = random.Random(42)
     test_data = bytes(rng.getrandbits(8) for _ in range(data_size))
diff --git a/tests/test_core.py b/tests/test_core.py
index 16a4c2ad6..1cca428cb 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -370,14 +370,20 @@ def inner(h: BaseHTTPRequestHandler):
         assert {'Authorization': 'Taker this-is-it'} == headers
 
 
-@pytest.mark.parametrize(['azure_environment', 'expected'],
-                         [('PUBLIC', ENVIRONMENTS['PUBLIC']), ('USGOVERNMENT', ENVIRONMENTS['USGOVERNMENT']),
-                          ('CHINA', ENVIRONMENTS['CHINA']), ('public', ENVIRONMENTS['PUBLIC']),
-                          ('usgovernment', ENVIRONMENTS['USGOVERNMENT']), ('china', ENVIRONMENTS['CHINA']),
-                          # Kept for historical compatibility
-                          ('AzurePublicCloud', ENVIRONMENTS['PUBLIC']),
-                          ('AzureUSGovernment', ENVIRONMENTS['USGOVERNMENT']),
-                          ('AzureChinaCloud', ENVIRONMENTS['CHINA']), ])
+@pytest.mark.parametrize(
+    ['azure_environment', 'expected'],
+    [
+        ('PUBLIC', ENVIRONMENTS['PUBLIC']),
+        ('USGOVERNMENT', ENVIRONMENTS['USGOVERNMENT']),
+        ('CHINA', ENVIRONMENTS['CHINA']),
+        ('public', ENVIRONMENTS['PUBLIC']),
+        ('usgovernment', ENVIRONMENTS['USGOVERNMENT']),
+        ('china', ENVIRONMENTS['CHINA']),
+        # Kept for historical compatibility
+        ('AzurePublicCloud', ENVIRONMENTS['PUBLIC']),
+        ('AzureUSGovernment', ENVIRONMENTS['USGOVERNMENT']),
+        ('AzureChinaCloud', ENVIRONMENTS['CHINA']),
+    ])
 def test_azure_environment(azure_environment, expected):
     c = Config(credentials_strategy=noop_credentials,
                azure_workspace_resource_id='...',
diff --git a/tests/test_model_serving_auth.py b/tests/test_model_serving_auth.py
index e0e368fae..13f55668c 100644
--- a/tests/test_model_serving_auth.py
+++ b/tests/test_model_serving_auth.py
@@ -47,13 +47,16 @@ def test_model_serving_auth(env_values, del_env_values, oauth_file_name, monkeyp
     assert headers.get("Authorization") == 'Bearer databricks_sdk_unit_test_token'
 
 
-@pytest.mark.parametrize("env_values, oauth_file_name", [
-    ([], "invalid_file_name"), # Not in Model Serving and Invalid File Name
-    ([('IS_IN_DB_MODEL_SERVING_ENV', 'true')], "invalid_file_name"), # In Model Serving and Invalid File Name
-    ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true')
-      ], "invalid_file_name"), # In Model Serving and Invalid File Name
-    ([], "tests/testdata/model-serving-test-token") # Not in Model Serving and Valid File Name
-])
+@pytest.mark.parametrize(
+    "env_values, oauth_file_name",
+    [
+        ([], "invalid_file_name"), # Not in Model Serving and Invalid File Name
+        ([('IS_IN_DB_MODEL_SERVING_ENV', 'true')
+          ], "invalid_file_name"), # In Model Serving and Invalid File Name
+        ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true')
+          ], "invalid_file_name"), # In Model Serving and Invalid File Name
+        ([], "tests/testdata/model-serving-test-token") # Not in Model Serving and Valid File Name
+    ])
 @raises(default_auth_base_error_message)
 def test_model_serving_auth_errors(env_values, oauth_file_name, monkeypatch):
     # Guarantee that the tests defaults to env variables rather than config file.

From e8b79166503fc2daf80bc8d64df7802bb8705c0e Mon Sep 17 00:00:00 2001
From: Renaud Hartert 
Date: Fri, 15 Nov 2024 14:00:07 +0100
Subject: [PATCH 071/136] [Fix] Rewind seekable streams before retrying (#821)

## What changes are proposed in this pull request?

This PR adapts the retry mechanism of `BaseClient` to only retry if (i)
the request is not a stream or (ii) the stream is seekable and can be
reset to its initial position. This fixes a bug that led retries to
ignore part of the request that were already processed in previous
attempts.

## How is this tested?

Added unit tests to verify that (i) non-seekable streams are not
retried, and (ii) seekable streams are properly reset before retrying.
---
 databricks/sdk/_base_client.py |  61 +++++++++++++---
 tests/test_base_client.py      | 129 +++++++++++++++++++++++++++++++++
 2 files changed, 178 insertions(+), 12 deletions(-)

diff --git a/databricks/sdk/_base_client.py b/databricks/sdk/_base_client.py
index 6424fc1bb..ed85dc470 100644
--- a/databricks/sdk/_base_client.py
+++ b/databricks/sdk/_base_client.py
@@ -1,3 +1,4 @@
+import io
 import logging
 import urllib.parse
 from datetime import timedelta
@@ -130,6 +131,14 @@ def flatten_dict(d: Dict[str, Any]) -> Dict[str, Any]:
         flattened = dict(flatten_dict(with_fixed_bools))
         return flattened
 
+    @staticmethod
+    def _is_seekable_stream(data) -> bool:
+        if data is None:
+            return False
+        if not isinstance(data, io.IOBase):
+            return False
+        return data.seekable()
+
     def do(self,
            method: str,
            url: str,
@@ -144,18 +153,31 @@ def do(self,
         if headers is None:
             headers = {}
         headers['User-Agent'] = self._user_agent_base
-        retryable = retried(timeout=timedelta(seconds=self._retry_timeout_seconds),
-                            is_retryable=self._is_retryable,
-                            clock=self._clock)
-        response = retryable(self._perform)(method,
-                                            url,
-                                            query=query,
-                                            headers=headers,
-                                            body=body,
-                                            raw=raw,
-                                            files=files,
-                                            data=data,
-                                            auth=auth)
+
+        # Wrap strings and bytes in a seekable stream so that we can rewind them.
+        if isinstance(data, (str, bytes)):
+            data = io.BytesIO(data.encode('utf-8') if isinstance(data, str) else data)
+
+        # Only retry if the request is not a stream or if the stream is seekable and
+        # we can rewind it. This is necessary to avoid bugs where the retry doesn't
+        # re-read already read data from the body.
+        if data is not None and not self._is_seekable_stream(data):
+            logger.debug(f"Retry disabled for non-seekable stream: type={type(data)}")
+            call = self._perform
+        else:
+            call = retried(timeout=timedelta(seconds=self._retry_timeout_seconds),
+                           is_retryable=self._is_retryable,
+                           clock=self._clock)(self._perform)
+
+        response = call(method,
+                        url,
+                        query=query,
+                        headers=headers,
+                        body=body,
+                        raw=raw,
+                        files=files,
+                        data=data,
+                        auth=auth)
 
         resp = dict()
         for header in response_headers if response_headers else []:
@@ -226,6 +248,12 @@ def _perform(self,
                  files=None,
                  data=None,
                  auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None):
+        # Keep track of the initial position of the stream so that we can rewind it if
+        # we need to retry the request.
+        initial_data_position = 0
+        if self._is_seekable_stream(data):
+            initial_data_position = data.tell()
+
         response = self._session.request(method,
                                          url,
                                          params=self._fix_query_string(query),
@@ -237,9 +265,18 @@ def _perform(self,
                                          stream=raw,
                                          timeout=self._http_timeout_seconds)
         self._record_request_log(response, raw=raw or data is not None or files is not None)
+
         error = self._error_parser.get_api_error(response)
         if error is not None:
+            # If the request body is a seekable stream, rewind it so that it is ready
+            # to be read again in case of a retry.
+            #
+            # TODO: This should be moved into a "before-retry" hook to avoid one
+            # unnecessary seek on the last failed retry before aborting.
+            if self._is_seekable_stream(data):
+                data.seek(initial_data_position)
             raise error from None
+
         return response
 
     def _record_request_log(self, response: requests.Response, raw: bool = False) -> None:
diff --git a/tests/test_base_client.py b/tests/test_base_client.py
index 1e133b8fc..4b6aaa714 100644
--- a/tests/test_base_client.py
+++ b/tests/test_base_client.py
@@ -1,3 +1,4 @@
+import io
 import random
 from http.server import BaseHTTPRequestHandler
 from typing import Iterator, List
@@ -316,3 +317,131 @@ def mock_iter_content(chunk_size):
     assert received_data == test_data # all data was received correctly
     assert len(content_chunks) == expected_chunks # correct number of chunks
     assert all(len(c) <= chunk_size for c in content_chunks) # chunks don't exceed size
+
+
+def test_is_seekable_stream():
+    client = _BaseClient()
+
+    # Test various input types that are not streams.
+    assert not client._is_seekable_stream(None) # None
+    assert not client._is_seekable_stream("string data") # str
+    assert not client._is_seekable_stream(b"binary data") # bytes
+    assert not client._is_seekable_stream(["list", "data"]) # list
+    assert not client._is_seekable_stream(42) # int
+
+    # Test non-seekable stream.
+    non_seekable = io.BytesIO(b"test data")
+    non_seekable.seekable = lambda: False
+    assert not client._is_seekable_stream(non_seekable)
+
+    # Test seekable streams.
+    assert client._is_seekable_stream(io.BytesIO(b"test data")) # BytesIO
+    assert client._is_seekable_stream(io.StringIO("test data")) # StringIO
+
+    # Test file objects.
+    with open(__file__, 'rb') as f:
+        assert client._is_seekable_stream(f) # File object
+
+    # Test custom seekable stream.
+    class CustomSeekableStream(io.IOBase):
+
+        def seekable(self):
+            return True
+
+        def seek(self, offset, whence=0):
+            return 0
+
+        def tell(self):
+            return 0
+
+    assert client._is_seekable_stream(CustomSeekableStream())
+
+
+@pytest.mark.parametrize(
+    'input_data',
+    [
+        b"0123456789", # bytes -> BytesIO
+        "0123456789", # str -> BytesIO
+        io.BytesIO(b"0123456789"), # BytesIO directly
+        io.StringIO("0123456789"), # StringIO
+    ])
+def test_reset_seekable_stream_on_retry(input_data):
+    received_data = []
+
+    # Retry two times before succeeding.
+    def inner(h: BaseHTTPRequestHandler):
+        if len(received_data) == 2:
+            h.send_response(200)
+            h.end_headers()
+        else:
+            h.send_response(429)
+            h.end_headers()
+
+        content_length = int(h.headers.get('Content-Length', 0))
+        if content_length > 0:
+            received_data.append(h.rfile.read(content_length))
+
+    with http_fixture_server(inner) as host:
+        client = _BaseClient()
+
+        # Retries should reset the stream.
+        client.do('POST', f'{host}/foo', data=input_data)
+
+        assert received_data == [b"0123456789", b"0123456789", b"0123456789"]
+
+
+def test_reset_seekable_stream_to_their_initial_position_on_retry():
+    received_data = []
+
+    # Retry two times before succeeding.
+    def inner(h: BaseHTTPRequestHandler):
+        if len(received_data) == 2:
+            h.send_response(200)
+            h.end_headers()
+        else:
+            h.send_response(429)
+            h.end_headers()
+
+        content_length = int(h.headers.get('Content-Length', 0))
+        if content_length > 0:
+            received_data.append(h.rfile.read(content_length))
+
+    input_data = io.BytesIO(b"0123456789")
+    input_data.seek(4)
+
+    with http_fixture_server(inner) as host:
+        client = _BaseClient()
+
+        # Retries should reset the stream.
+        client.do('POST', f'{host}/foo', data=input_data)
+
+        assert received_data == [b"456789", b"456789", b"456789"]
+        assert input_data.tell() == 10 # EOF
+
+
+def test_no_retry_or_reset_on_non_seekable_stream():
+    requests = []
+
+    # Always respond with a response that triggers a retry.
+    def inner(h: BaseHTTPRequestHandler):
+        content_length = int(h.headers.get('Content-Length', 0))
+        if content_length > 0:
+            requests.append(h.rfile.read(content_length))
+
+        h.send_response(429)
+        h.send_header('Retry-After', '1')
+        h.end_headers()
+
+    input_data = io.BytesIO(b"0123456789")
+    input_data.seekable = lambda: False # makes the stream appear non-seekable
+
+    with http_fixture_server(inner) as host:
+        client = _BaseClient()
+
+        # Should raise error immediately without retry.
+        with pytest.raises(DatabricksError):
+            client.do('POST', f'{host}/foo', data=input_data)
+
+        # Verify that only one request was made (no retries).
+        assert requests == [b"0123456789"]
+        assert input_data.tell() == 10 # EOF

From d516d1ee6239f78b097df5dd19452837e0375fed Mon Sep 17 00:00:00 2001
From: Renaud Hartert 
Date: Mon, 18 Nov 2024 14:53:19 +0100
Subject: [PATCH 072/136] [Release] Release v0.38.0 (#826)

### New Features and Improvements

* Read streams by 1MB chunks by default.
([#817](https://github.com/databricks/databricks-sdk-py/pull/817)).

### Bug Fixes

* Rewind seekable streams before retrying
([#821](https://github.com/databricks/databricks-sdk-py/pull/821)).
 * Properly serialize nested data classes.

### Internal Changes

* Reformat SDK with YAPF 0.43.
([#822](https://github.com/databricks/databricks-sdk-py/pull/822)).
* Update Jobs GetRun API to support paginated responses for jobs and
ForEach tasks
([#819](https://github.com/databricks/databricks-sdk-py/pull/819)).

### API Changes:

* Added `service_principal_client_id` field for
`databricks.sdk.service.apps.App`.
* Added `azure_service_principal`, `gcp_service_account_key` and
`read_only` fields for
`databricks.sdk.service.catalog.CreateCredentialRequest`.
* Added `azure_service_principal`, `read_only` and
`used_for_managed_storage` fields for
`databricks.sdk.service.catalog.CredentialInfo`.
* Added `omit_username` field for
`databricks.sdk.service.catalog.ListTablesRequest`.
* Added `azure_service_principal` and `read_only` fields for
`databricks.sdk.service.catalog.UpdateCredentialRequest`.
* Added `external_location_name`, `read_only` and `url` fields for
`databricks.sdk.service.catalog.ValidateCredentialRequest`.
* Added `is_dir` field for
`databricks.sdk.service.catalog.ValidateCredentialResponse`.
 * Added `only` field for `databricks.sdk.service.jobs.RunNow`.
* Added `restart_window` field for
`databricks.sdk.service.pipelines.CreatePipeline`.
* Added `restart_window` field for
`databricks.sdk.service.pipelines.EditPipeline`.
* Added `restart_window` field for
`databricks.sdk.service.pipelines.PipelineSpec`.
* Added `private_access_settings_id` field for
`databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
* Changed `create_credential()` and
`generate_temporary_service_credential()` methods for
[w.credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/credentials.html)
workspace-level service with new required argument order.
* Changed `access_connector_id` field for
`databricks.sdk.service.catalog.AzureManagedIdentity` to be required.
* Changed `access_connector_id` field for
`databricks.sdk.service.catalog.AzureManagedIdentity` to be required.
* Changed `name` field for
`databricks.sdk.service.catalog.CreateCredentialRequest` to be required.
* Changed `credential_name` field for
`databricks.sdk.service.catalog.GenerateTemporaryServiceCredentialRequest`
to be required.

OpenAPI SHA: f2385add116e3716c8a90a0b68e204deb40f996c, Date: 2024-11-15
---
 .codegen/_openapi_sha                         |   2 +-
 databricks/sdk/service/apps.py                |  14 +-
 databricks/sdk/service/billing.py             |   2 +-
 databricks/sdk/service/catalog.py             | 228 +++++++++++++++---
 databricks/sdk/service/dashboards.py          |  20 +-
 databricks/sdk/service/iam.py                 |   4 +-
 databricks/sdk/service/jobs.py                |  25 +-
 databricks/sdk/service/oauth2.py              |   1 +
 databricks/sdk/service/pipelines.py           |  97 ++++++--
 databricks/sdk/service/provisioning.py        |  15 ++
 databricks/sdk/service/settings.py            |   4 +-
 databricks/sdk/service/sharing.py             |   2 +
 databricks/sdk/service/workspace.py           |   3 +-
 docs/account/billing/budgets.rst              |   2 +-
 docs/account/iam/workspace_assignment.rst     |   2 +-
 .../account/oauth2/custom_app_integration.rst |   1 +
 docs/account/provisioning/workspaces.rst      |   7 +-
 docs/dbdataclasses/catalog.rst                |   7 +
 docs/dbdataclasses/pipelines.rst              |  29 +++
 docs/dbdataclasses/sharing.rst                |   6 +
 docs/dbdataclasses/workspace.rst              |   3 +
 docs/workspace/apps/apps.rst                  |   3 +-
 docs/workspace/catalog/tables.rst             |   5 +-
 docs/workspace/dashboards/lakeview.rst        |  10 +-
 docs/workspace/jobs/jobs.rst                  |  24 +-
 docs/workspace/pipelines/pipelines.rst        |  12 +-
 .../settings/notification_destinations.rst    |   1 +
 docs/workspace/settings/token_management.rst  |   2 +-
 docs/workspace/workspace/repos.rst            |   2 +-
 29 files changed, 429 insertions(+), 104 deletions(-)

diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 00e5d84f9..a2ba58aa5 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-5285ce76f81314f342c1702d5c2ad4ef42488781
\ No newline at end of file
+f2385add116e3716c8a90a0b68e204deb40f996c
\ No newline at end of file
diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py
index 4123ea08c..a08a7e66d 100755
--- a/databricks/sdk/service/apps.py
+++ b/databricks/sdk/service/apps.py
@@ -52,6 +52,8 @@ class App:
     resources: Optional[List[AppResource]] = None
     """Resources for the app."""
 
+    service_principal_client_id: Optional[str] = None
+
     service_principal_id: Optional[int] = None
 
     service_principal_name: Optional[str] = None
@@ -79,6 +81,8 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         if self.pending_deployment: body['pending_deployment'] = self.pending_deployment.as_dict()
         if self.resources: body['resources'] = [v.as_dict() for v in self.resources]
+        if self.service_principal_client_id is not None:
+            body['service_principal_client_id'] = self.service_principal_client_id
         if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id
         if self.service_principal_name is not None:
             body['service_principal_name'] = self.service_principal_name
@@ -100,6 +104,7 @@ def from_dict(cls, d: Dict[str, any]) -> App:
                    name=d.get('name', None),
                    pending_deployment=_from_dict(d, 'pending_deployment', AppDeployment),
                    resources=_repeated_dict(d, 'resources', AppResource),
+                   service_principal_client_id=d.get('service_principal_client_id', None),
                    service_principal_id=d.get('service_principal_id', None),
                    service_principal_name=d.get('service_principal_name', None),
                    update_time=d.get('update_time', None),
@@ -798,7 +803,7 @@ def create(self, *, app: Optional[App] = None) -> Wait[App]:
           Long-running operation waiter for :class:`App`.
           See :method:wait_get_app_active for more details.
         """
-        body = app
+        body = app.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         op_response = self._api.do('POST', '/api/2.0/apps', body=body, headers=headers)
@@ -836,7 +841,7 @@ def deploy(self, app_name: str, *, app_deployment: Optional[AppDeployment] = Non
           Long-running operation waiter for :class:`AppDeployment`.
           See :method:wait_get_deployment_app_succeeded for more details.
         """
-        body = app_deployment
+        body = app_deployment.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         op_response = self._api.do('POST',
@@ -1053,12 +1058,13 @@ def update(self, name: str, *, app: Optional[App] = None) -> App:
         Updates the app with the supplied name.
         
         :param name: str
-          The name of the app.
+          The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
+          must be unique within the workspace.
         :param app: :class:`App` (optional)
         
         :returns: :class:`App`
         """
-        body = app
+        body = app.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('PATCH', f'/api/2.0/apps/{name}', body=body, headers=headers)
diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py
index cfb7ba0b4..8375a2629 100755
--- a/databricks/sdk/service/billing.py
+++ b/databricks/sdk/service/billing.py
@@ -1121,7 +1121,7 @@ def get(self, budget_id: str) -> GetBudgetConfigurationResponse:
         Gets a budget configuration for an account. Both account and budget configuration are specified by ID.
         
         :param budget_id: str
-          The Databricks budget configuration ID.
+          The budget configuration ID
         
         :returns: :class:`GetBudgetConfigurationResponse`
         """
diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py
index 3943608ef..d24ad54e6 100755
--- a/databricks/sdk/service/catalog.py
+++ b/databricks/sdk/service/catalog.py
@@ -415,7 +415,7 @@ def from_dict(cls, d: Dict[str, any]) -> AzureActiveDirectoryToken:
 class AzureManagedIdentity:
     """The Azure managed identity configuration."""
 
-    access_connector_id: Optional[str] = None
+    access_connector_id: str
     """The Azure resource ID of the Azure Databricks Access Connector. Use the format
     `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}`."""
 
@@ -508,6 +508,8 @@ def from_dict(cls, d: Dict[str, any]) -> AzureManagedIdentityResponse:
 
 @dataclass
 class AzureServicePrincipal:
+    """The Azure service principal configuration."""
+
     directory_id: str
     """The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application."""
 
@@ -1161,22 +1163,31 @@ def from_dict(cls, d: Dict[str, any]) -> CreateConnection:
 
 @dataclass
 class CreateCredentialRequest:
+    name: str
+    """The credential name. The name must be unique among storage and service credentials within the
+    metastore."""
+
     aws_iam_role: Optional[AwsIamRole] = None
     """The AWS IAM role configuration"""
 
     azure_managed_identity: Optional[AzureManagedIdentity] = None
     """The Azure managed identity configuration."""
 
+    azure_service_principal: Optional[AzureServicePrincipal] = None
+    """The Azure service principal configuration."""
+
     comment: Optional[str] = None
     """Comment associated with the credential."""
 
-    name: Optional[str] = None
-    """The credential name. The name must be unique among storage and service credentials within the
-    metastore."""
+    gcp_service_account_key: Optional[GcpServiceAccountKey] = None
 
     purpose: Optional[CredentialPurpose] = None
     """Indicates the purpose of the credential."""
 
+    read_only: Optional[bool] = None
+    """Whether the credential is usable only for read operations. Only applicable when purpose is
+    **STORAGE**."""
+
     skip_validation: Optional[bool] = None
     """Optional. Supplying true to this argument skips validation of the created set of credentials."""
 
@@ -1185,9 +1196,14 @@ def as_dict(self) -> dict:
         body = {}
         if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
         if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
+        if self.azure_service_principal:
+            body['azure_service_principal'] = self.azure_service_principal.as_dict()
         if self.comment is not None: body['comment'] = self.comment
+        if self.gcp_service_account_key:
+            body['gcp_service_account_key'] = self.gcp_service_account_key.as_dict()
         if self.name is not None: body['name'] = self.name
         if self.purpose is not None: body['purpose'] = self.purpose.value
+        if self.read_only is not None: body['read_only'] = self.read_only
         if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
         return body
 
@@ -1196,9 +1212,12 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCredentialRequest:
         """Deserializes the CreateCredentialRequest from a dictionary."""
         return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole),
                    azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
+                   azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
                    comment=d.get('comment', None),
+                   gcp_service_account_key=_from_dict(d, 'gcp_service_account_key', GcpServiceAccountKey),
                    name=d.get('name', None),
                    purpose=_enum(d, 'purpose', CredentialPurpose),
+                   read_only=d.get('read_only', None),
                    skip_validation=d.get('skip_validation', None))
 
 
@@ -1796,6 +1815,9 @@ class CredentialInfo:
     azure_managed_identity: Optional[AzureManagedIdentity] = None
     """The Azure managed identity configuration."""
 
+    azure_service_principal: Optional[AzureServicePrincipal] = None
+    """The Azure service principal configuration."""
+
     comment: Optional[str] = None
     """Comment associated with the credential."""
 
@@ -1827,17 +1849,27 @@ class CredentialInfo:
     purpose: Optional[CredentialPurpose] = None
     """Indicates the purpose of the credential."""
 
+    read_only: Optional[bool] = None
+    """Whether the credential is usable only for read operations. Only applicable when purpose is
+    **STORAGE**."""
+
     updated_at: Optional[int] = None
     """Time at which this credential was last modified, in epoch milliseconds."""
 
     updated_by: Optional[str] = None
     """Username of user who last modified the credential."""
 
+    used_for_managed_storage: Optional[bool] = None
+    """Whether this credential is the current metastore's root storage credential. Only applicable when
+    purpose is **STORAGE**."""
+
     def as_dict(self) -> dict:
         """Serializes the CredentialInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
         if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
+        if self.azure_service_principal:
+            body['azure_service_principal'] = self.azure_service_principal.as_dict()
         if self.comment is not None: body['comment'] = self.comment
         if self.created_at is not None: body['created_at'] = self.created_at
         if self.created_by is not None: body['created_by'] = self.created_by
@@ -1848,8 +1880,11 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         if self.owner is not None: body['owner'] = self.owner
         if self.purpose is not None: body['purpose'] = self.purpose.value
+        if self.read_only is not None: body['read_only'] = self.read_only
         if self.updated_at is not None: body['updated_at'] = self.updated_at
         if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.used_for_managed_storage is not None:
+            body['used_for_managed_storage'] = self.used_for_managed_storage
         return body
 
     @classmethod
@@ -1857,6 +1892,7 @@ def from_dict(cls, d: Dict[str, any]) -> CredentialInfo:
         """Deserializes the CredentialInfo from a dictionary."""
         return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole),
                    azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
+                   azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
                    comment=d.get('comment', None),
                    created_at=d.get('created_at', None),
                    created_by=d.get('created_by', None),
@@ -1867,13 +1903,16 @@ def from_dict(cls, d: Dict[str, any]) -> CredentialInfo:
                    name=d.get('name', None),
                    owner=d.get('owner', None),
                    purpose=_enum(d, 'purpose', CredentialPurpose),
+                   read_only=d.get('read_only', None),
                    updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None))
+                   updated_by=d.get('updated_by', None),
+                   used_for_managed_storage=d.get('used_for_managed_storage', None))
 
 
 class CredentialPurpose(Enum):
 
     SERVICE = 'SERVICE'
+    STORAGE = 'STORAGE'
 
 
 class CredentialType(Enum):
@@ -2751,6 +2790,35 @@ def from_dict(cls, d: Dict[str, any]) -> GcpOauthToken:
         return cls(oauth_token=d.get('oauth_token', None))
 
 
+@dataclass
+class GcpServiceAccountKey:
+    """GCP long-lived credential. GCP Service Account."""
+
+    email: Optional[str] = None
+    """The email of the service account."""
+
+    private_key: Optional[str] = None
+    """The service account's RSA private key."""
+
+    private_key_id: Optional[str] = None
+    """The ID of the service account's private key."""
+
+    def as_dict(self) -> dict:
+        """Serializes the GcpServiceAccountKey into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.email is not None: body['email'] = self.email
+        if self.private_key is not None: body['private_key'] = self.private_key
+        if self.private_key_id is not None: body['private_key_id'] = self.private_key_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GcpServiceAccountKey:
+        """Deserializes the GcpServiceAccountKey from a dictionary."""
+        return cls(email=d.get('email', None),
+                   private_key=d.get('private_key', None),
+                   private_key_id=d.get('private_key_id', None))
+
+
 @dataclass
 class GenerateTemporaryServiceCredentialAzureOptions:
     """Options to customize the requested temporary credential"""
@@ -2774,12 +2842,12 @@ def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryServiceCredentialAzure
 
 @dataclass
 class GenerateTemporaryServiceCredentialRequest:
+    credential_name: str
+    """The name of the service credential used to generate a temporary credential"""
+
     azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None
     """Options to customize the requested temporary credential"""
 
-    credential_name: Optional[str] = None
-    """The name of the service credential used to generate a temporary credential"""
-
     def as_dict(self) -> dict:
         """Serializes the GenerateTemporaryServiceCredentialRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
@@ -5661,11 +5729,15 @@ class UpdateCredentialRequest:
     azure_managed_identity: Optional[AzureManagedIdentity] = None
     """The Azure managed identity configuration."""
 
+    azure_service_principal: Optional[AzureServicePrincipal] = None
+    """The Azure service principal configuration."""
+
     comment: Optional[str] = None
     """Comment associated with the credential."""
 
     force: Optional[bool] = None
-    """Force update even if there are dependent services."""
+    """Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
+    external locations and external tables (when purpose is **STORAGE**)."""
 
     isolation_mode: Optional[IsolationMode] = None
     """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
@@ -5679,6 +5751,10 @@ class UpdateCredentialRequest:
     owner: Optional[str] = None
     """Username of current owner of credential."""
 
+    read_only: Optional[bool] = None
+    """Whether the credential is usable only for read operations. Only applicable when purpose is
+    **STORAGE**."""
+
     skip_validation: Optional[bool] = None
     """Supply true to this argument to skip validation of the updated credential."""
 
@@ -5687,12 +5763,15 @@ def as_dict(self) -> dict:
         body = {}
         if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
         if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
+        if self.azure_service_principal:
+            body['azure_service_principal'] = self.azure_service_principal.as_dict()
         if self.comment is not None: body['comment'] = self.comment
         if self.force is not None: body['force'] = self.force
         if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
         if self.name_arg is not None: body['name_arg'] = self.name_arg
         if self.new_name is not None: body['new_name'] = self.new_name
         if self.owner is not None: body['owner'] = self.owner
+        if self.read_only is not None: body['read_only'] = self.read_only
         if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
         return body
 
@@ -5701,12 +5780,14 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateCredentialRequest:
         """Deserializes the UpdateCredentialRequest from a dictionary."""
         return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole),
                    azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
+                   azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
                    comment=d.get('comment', None),
                    force=d.get('force', None),
                    isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
                    name_arg=d.get('name_arg', None),
                    new_name=d.get('new_name', None),
                    owner=d.get('owner', None),
+                   read_only=d.get('read_only', None),
                    skip_validation=d.get('skip_validation', None))
 
 
@@ -6310,16 +6391,31 @@ class ValidateCredentialRequest:
     credential_name: Optional[str] = None
     """Required. The name of an existing credential or long-lived cloud credential to validate."""
 
+    external_location_name: Optional[str] = None
+    """The name of an existing external location to validate. Only applicable for storage credentials
+    (purpose is **STORAGE**.)"""
+
     purpose: Optional[CredentialPurpose] = None
     """The purpose of the credential. This should only be used when the credential is specified."""
 
+    read_only: Optional[bool] = None
+    """Whether the credential is only usable for read operations. Only applicable for storage
+    credentials (purpose is **STORAGE**.)"""
+
+    url: Optional[str] = None
+    """The external location url to validate. Only applicable when purpose is **STORAGE**."""
+
     def as_dict(self) -> dict:
         """Serializes the ValidateCredentialRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
         if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
         if self.credential_name is not None: body['credential_name'] = self.credential_name
+        if self.external_location_name is not None:
+            body['external_location_name'] = self.external_location_name
         if self.purpose is not None: body['purpose'] = self.purpose.value
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.url is not None: body['url'] = self.url
         return body
 
     @classmethod
@@ -6328,24 +6424,33 @@ def from_dict(cls, d: Dict[str, any]) -> ValidateCredentialRequest:
         return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole),
                    azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
                    credential_name=d.get('credential_name', None),
-                   purpose=_enum(d, 'purpose', CredentialPurpose))
+                   external_location_name=d.get('external_location_name', None),
+                   purpose=_enum(d, 'purpose', CredentialPurpose),
+                   read_only=d.get('read_only', None),
+                   url=d.get('url', None))
 
 
 @dataclass
 class ValidateCredentialResponse:
+    is_dir: Optional[bool] = None
+    """Whether the tested location is a directory in cloud storage. Only applicable for when purpose is
+    **STORAGE**."""
+
     results: Optional[List[CredentialValidationResult]] = None
     """The results of the validation check."""
 
     def as_dict(self) -> dict:
         """Serializes the ValidateCredentialResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.is_dir is not None: body['isDir'] = self.is_dir
         if self.results: body['results'] = [v.as_dict() for v in self.results]
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ValidateCredentialResponse:
         """Deserializes the ValidateCredentialResponse from a dictionary."""
-        return cls(results=_repeated_dict(d, 'results', CredentialValidationResult))
+        return cls(is_dir=d.get('isDir', None),
+                   results=_repeated_dict(d, 'results', CredentialValidationResult))
 
 
 class ValidateCredentialResult(Enum):
@@ -7405,28 +7510,41 @@ def __init__(self, api_client):
         self._api = api_client
 
     def create_credential(self,
+                          name: str,
                           *,
                           aws_iam_role: Optional[AwsIamRole] = None,
                           azure_managed_identity: Optional[AzureManagedIdentity] = None,
+                          azure_service_principal: Optional[AzureServicePrincipal] = None,
                           comment: Optional[str] = None,
-                          name: Optional[str] = None,
+                          gcp_service_account_key: Optional[GcpServiceAccountKey] = None,
                           purpose: Optional[CredentialPurpose] = None,
+                          read_only: Optional[bool] = None,
                           skip_validation: Optional[bool] = None) -> CredentialInfo:
         """Create a credential.
         
-        Creates a new credential.
+        Creates a new credential. The type of credential to be created is determined by the **purpose** field,
+        which should be either **SERVICE** or **STORAGE**.
         
+        The caller must be a metastore admin or have the metastore privilege **CREATE_STORAGE_CREDENTIAL** for
+        storage credentials, or **CREATE_SERVICE_CREDENTIAL** for service credentials.
+        
+        :param name: str
+          The credential name. The name must be unique among storage and service credentials within the
+          metastore.
         :param aws_iam_role: :class:`AwsIamRole` (optional)
           The AWS IAM role configuration
         :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
           The Azure managed identity configuration.
+        :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
+          The Azure service principal configuration.
         :param comment: str (optional)
           Comment associated with the credential.
-        :param name: str (optional)
-          The credential name. The name must be unique among storage and service credentials within the
-          metastore.
+        :param gcp_service_account_key: :class:`GcpServiceAccountKey` (optional)
         :param purpose: :class:`CredentialPurpose` (optional)
           Indicates the purpose of the credential.
+        :param read_only: bool (optional)
+          Whether the credential is usable only for read operations. Only applicable when purpose is
+          **STORAGE**.
         :param skip_validation: bool (optional)
           Optional. Supplying true to this argument skips validation of the created set of credentials.
         
@@ -7436,9 +7554,14 @@ def create_credential(self,
         if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict()
         if azure_managed_identity is not None:
             body['azure_managed_identity'] = azure_managed_identity.as_dict()
+        if azure_service_principal is not None:
+            body['azure_service_principal'] = azure_service_principal.as_dict()
         if comment is not None: body['comment'] = comment
+        if gcp_service_account_key is not None:
+            body['gcp_service_account_key'] = gcp_service_account_key.as_dict()
         if name is not None: body['name'] = name
         if purpose is not None: body['purpose'] = purpose.value
+        if read_only is not None: body['read_only'] = read_only
         if skip_validation is not None: body['skip_validation'] = skip_validation
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
@@ -7448,12 +7571,14 @@ def create_credential(self,
     def delete_credential(self, name_arg: str, *, force: Optional[bool] = None):
         """Delete a credential.
         
-        Deletes a credential from the metastore. The caller must be an owner of the credential.
+        Deletes a service or storage credential from the metastore. The caller must be an owner of the
+        credential.
         
         :param name_arg: str
           Name of the credential.
         :param force: bool (optional)
-          Force deletion even if there are dependent services.
+          Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
+          external locations and external tables (when purpose is **STORAGE**).
         
         
         """
@@ -7465,19 +7590,20 @@ def delete_credential(self, name_arg: str, *, force: Optional[bool] = None):
         self._api.do('DELETE', f'/api/2.1/unity-catalog/credentials/{name_arg}', query=query, headers=headers)
 
     def generate_temporary_service_credential(
-            self,
-            *,
-            azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None,
-            credential_name: Optional[str] = None) -> TemporaryCredentials:
+        self,
+        credential_name: str,
+        *,
+        azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None
+    ) -> TemporaryCredentials:
         """Generate a temporary service credential.
         
         Returns a set of temporary credentials generated using the specified service credential. The caller
         must be a metastore admin or have the metastore privilege **ACCESS** on the service credential.
         
+        :param credential_name: str
+          The name of the service credential used to generate a temporary credential
         :param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional)
           Options to customize the requested temporary credential
-        :param credential_name: str (optional)
-          The name of the service credential used to generate a temporary credential
         
         :returns: :class:`TemporaryCredentials`
         """
@@ -7495,8 +7621,8 @@ def generate_temporary_service_credential(
     def get_credential(self, name_arg: str) -> CredentialInfo:
         """Get a credential.
         
-        Gets a credential from the metastore. The caller must be a metastore admin, the owner of the
-        credential, or have any permission on the credential.
+        Gets a service or storage credential from the metastore. The caller must be a metastore admin, the
+        owner of the credential, or have any permission on the credential.
         
         :param name_arg: str
           Name of the credential.
@@ -7555,15 +7681,17 @@ def update_credential(self,
                           *,
                           aws_iam_role: Optional[AwsIamRole] = None,
                           azure_managed_identity: Optional[AzureManagedIdentity] = None,
+                          azure_service_principal: Optional[AzureServicePrincipal] = None,
                           comment: Optional[str] = None,
                           force: Optional[bool] = None,
                           isolation_mode: Optional[IsolationMode] = None,
                           new_name: Optional[str] = None,
                           owner: Optional[str] = None,
+                          read_only: Optional[bool] = None,
                           skip_validation: Optional[bool] = None) -> CredentialInfo:
         """Update a credential.
         
-        Updates a credential on the metastore.
+        Updates a service or storage credential on the metastore.
         
         The caller must be the owner of the credential or a metastore admin or have the `MANAGE` permission.
         If the caller is a metastore admin, only the __owner__ field can be changed.
@@ -7574,16 +7702,22 @@ def update_credential(self,
           The AWS IAM role configuration
         :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
           The Azure managed identity configuration.
+        :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
+          The Azure service principal configuration.
         :param comment: str (optional)
           Comment associated with the credential.
         :param force: bool (optional)
-          Force update even if there are dependent services.
+          Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
+          external locations and external tables (when purpose is **STORAGE**).
         :param isolation_mode: :class:`IsolationMode` (optional)
           Whether the current securable is accessible from all workspaces or a specific set of workspaces.
         :param new_name: str (optional)
           New name of credential.
         :param owner: str (optional)
           Username of current owner of credential.
+        :param read_only: bool (optional)
+          Whether the credential is usable only for read operations. Only applicable when purpose is
+          **STORAGE**.
         :param skip_validation: bool (optional)
           Supply true to this argument to skip validation of the updated credential.
         
@@ -7593,11 +7727,14 @@ def update_credential(self,
         if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict()
         if azure_managed_identity is not None:
             body['azure_managed_identity'] = azure_managed_identity.as_dict()
+        if azure_service_principal is not None:
+            body['azure_service_principal'] = azure_service_principal.as_dict()
         if comment is not None: body['comment'] = comment
         if force is not None: body['force'] = force
         if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value
         if new_name is not None: body['new_name'] = new_name
         if owner is not None: body['owner'] = owner
+        if read_only is not None: body['read_only'] = read_only
         if skip_validation is not None: body['skip_validation'] = skip_validation
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
@@ -7612,14 +7749,25 @@ def validate_credential(self,
                             aws_iam_role: Optional[AwsIamRole] = None,
                             azure_managed_identity: Optional[AzureManagedIdentity] = None,
                             credential_name: Optional[str] = None,
-                            purpose: Optional[CredentialPurpose] = None) -> ValidateCredentialResponse:
+                            external_location_name: Optional[str] = None,
+                            purpose: Optional[CredentialPurpose] = None,
+                            read_only: Optional[bool] = None,
+                            url: Optional[str] = None) -> ValidateCredentialResponse:
         """Validate a credential.
         
         Validates a credential.
         
-        Either the __credential_name__ or the cloud-specific credential must be provided.
+        For service credentials (purpose is **SERVICE**), either the __credential_name__ or the cloud-specific
+        credential must be provided.
         
-        The caller must be a metastore admin or the credential owner.
+        For storage credentials (purpose is **STORAGE**), at least one of __external_location_name__ and
+        __url__ need to be provided. If only one of them is provided, it will be used for validation. And if
+        both are provided, the __url__ will be used for validation, and __external_location_name__ will be
+        ignored when checking overlapping urls. Either the __credential_name__ or the cloud-specific
+        credential must be provided.
+        
+        The caller must be a metastore admin or the credential owner or have the required permission on the
+        metastore and the credential (e.g., **CREATE_EXTERNAL_LOCATION** when purpose is **STORAGE**).
         
         :param aws_iam_role: :class:`AwsIamRole` (optional)
           The AWS IAM role configuration
@@ -7627,8 +7775,16 @@ def validate_credential(self,
           The Azure managed identity configuration.
         :param credential_name: str (optional)
           Required. The name of an existing credential or long-lived cloud credential to validate.
+        :param external_location_name: str (optional)
+          The name of an existing external location to validate. Only applicable for storage credentials
+          (purpose is **STORAGE**.)
         :param purpose: :class:`CredentialPurpose` (optional)
           The purpose of the credential. This should only be used when the credential is specified.
+        :param read_only: bool (optional)
+          Whether the credential is only usable for read operations. Only applicable for storage credentials
+          (purpose is **STORAGE**.)
+        :param url: str (optional)
+          The external location url to validate. Only applicable when purpose is **STORAGE**.
         
         :returns: :class:`ValidateCredentialResponse`
         """
@@ -7637,7 +7793,10 @@ def validate_credential(self,
         if azure_managed_identity is not None:
             body['azure_managed_identity'] = azure_managed_identity.as_dict()
         if credential_name is not None: body['credential_name'] = credential_name
+        if external_location_name is not None: body['external_location_name'] = external_location_name
         if purpose is not None: body['purpose'] = purpose.value
+        if read_only is not None: body['read_only'] = read_only
+        if url is not None: body['url'] = url
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST', '/api/2.1/unity-catalog/validate-credentials', body=body, headers=headers)
@@ -8640,7 +8799,7 @@ def create(self, *, table: Optional[OnlineTable] = None) -> Wait[OnlineTable]:
           Long-running operation waiter for :class:`OnlineTable`.
           See :method:wait_get_online_table_active for more details.
         """
-        body = table
+        body = table.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         op_response = self._api.do('POST', '/api/2.0/online-tables', body=body, headers=headers)
@@ -10128,6 +10287,7 @@ def list(self,
              max_results: Optional[int] = None,
              omit_columns: Optional[bool] = None,
              omit_properties: Optional[bool] = None,
+             omit_username: Optional[bool] = None,
              page_token: Optional[str] = None) -> Iterator[TableInfo]:
         """List tables.
         
@@ -10157,6 +10317,9 @@ def list(self,
           Whether to omit the columns of the table from the response or not.
         :param omit_properties: bool (optional)
           Whether to omit the properties of the table from the response or not.
+        :param omit_username: bool (optional)
+          Whether to omit the username of the table (e.g. owner, updated_by, created_by) from the response or
+          not.
         :param page_token: str (optional)
           Opaque token to send for the next page of results (pagination).
         
@@ -10172,6 +10335,7 @@ def list(self,
         if max_results is not None: query['max_results'] = max_results
         if omit_columns is not None: query['omit_columns'] = omit_columns
         if omit_properties is not None: query['omit_properties'] = omit_properties
+        if omit_username is not None: query['omit_username'] = omit_username
         if page_token is not None: query['page_token'] = page_token
         if schema_name is not None: query['schema_name'] = schema_name
         headers = {'Accept': 'application/json', }
diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py
index 1b02d8c89..5f9fe2c2c 100755
--- a/databricks/sdk/service/dashboards.py
+++ b/databricks/sdk/service/dashboards.py
@@ -1144,7 +1144,7 @@ def create(self, *, dashboard: Optional[Dashboard] = None) -> Dashboard:
         
         :returns: :class:`Dashboard`
         """
-        body = dashboard
+        body = dashboard.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST', '/api/2.0/lakeview/dashboards', body=body, headers=headers)
@@ -1159,7 +1159,7 @@ def create_schedule(self, dashboard_id: str, *, schedule: Optional[Schedule] = N
         
         :returns: :class:`Schedule`
         """
-        body = schedule
+        body = schedule.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST',
@@ -1183,7 +1183,7 @@ def create_subscription(self,
         
         :returns: :class:`Subscription`
         """
-        body = subscription
+        body = subscription.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do(
@@ -1269,7 +1269,7 @@ def get_published(self, dashboard_id: str) -> PublishedDashboard:
         Get the current published dashboard.
         
         :param dashboard_id: str
-          UUID identifying the dashboard to be published.
+          UUID identifying the published dashboard.
         
         :returns: :class:`PublishedDashboard`
         """
@@ -1364,7 +1364,7 @@ def list_schedules(self,
         """List dashboard schedules.
         
         :param dashboard_id: str
-          UUID identifying the dashboard to which the schedule belongs.
+          UUID identifying the dashboard to which the schedules belongs.
         :param page_size: int (optional)
           The number of schedules to return per page.
         :param page_token: str (optional)
@@ -1400,9 +1400,9 @@ def list_subscriptions(self,
         """List schedule subscriptions.
         
         :param dashboard_id: str
-          UUID identifying the dashboard to which the subscription belongs.
+          UUID identifying the dashboard which the subscriptions belongs.
         :param schedule_id: str
-          UUID identifying the schedule to which the subscription belongs.
+          UUID identifying the schedule which the subscriptions belongs.
         :param page_size: int (optional)
           The number of subscriptions to return per page.
         :param page_token: str (optional)
@@ -1508,7 +1508,7 @@ def unpublish(self, dashboard_id: str):
         Unpublish the dashboard.
         
         :param dashboard_id: str
-          UUID identifying the dashboard to be published.
+          UUID identifying the published dashboard.
         
         
         """
@@ -1528,7 +1528,7 @@ def update(self, dashboard_id: str, *, dashboard: Optional[Dashboard] = None) ->
         
         :returns: :class:`Dashboard`
         """
-        body = dashboard
+        body = dashboard.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('PATCH',
@@ -1552,7 +1552,7 @@ def update_schedule(self,
         
         :returns: :class:`Schedule`
         """
-        body = schedule
+        body = schedule.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('PUT',
diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py
index 05d1ccce3..fc0122b2b 100755
--- a/databricks/sdk/service/iam.py
+++ b/databricks/sdk/service/iam.py
@@ -1150,7 +1150,7 @@ class UpdateWorkspaceAssignments:
     """The ID of the user, service principal, or group."""
 
     workspace_id: Optional[int] = None
-    """The workspace ID for the account."""
+    """The workspace ID."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateWorkspaceAssignments into a dictionary suitable for use as a JSON request body."""
@@ -3385,7 +3385,7 @@ def update(self,
         specified principal.
         
         :param workspace_id: int
-          The workspace ID for the account.
+          The workspace ID.
         :param principal_id: int
           The ID of the user, service principal, or group.
         :param permissions: List[:class:`WorkspacePermission`] (optional)
diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index 82d3bac65..ab485b33c 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -574,8 +574,7 @@ class CreateJob:
     """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
     as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
     
-    Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not,
-    an error is thrown."""
+    Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown."""
 
     schedule: Optional[CronSchedule] = None
     """An optional periodic schedule for this job. The default behavior is that the job only runs when
@@ -1752,8 +1751,7 @@ class JobRunAs:
     """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
     as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
     
-    Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not,
-    an error is thrown."""
+    Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown."""
 
     service_principal_name: Optional[str] = None
     """Application ID of an active service principal. Setting this field requires the
@@ -1861,8 +1859,7 @@ class JobSettings:
     """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
     as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
     
-    Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not,
-    an error is thrown."""
+    Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown."""
 
     schedule: Optional[CronSchedule] = None
     """An optional periodic schedule for this job. The default behavior is that the job only runs when
@@ -3371,6 +3368,10 @@ class RunNow:
     [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
     [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
 
+    only: Optional[List[str]] = None
+    """A list of task keys to run inside of the job. If this field is not provided, all tasks in the
+    job will be run."""
+
     pipeline_params: Optional[PipelineParams] = None
     """Controls whether the pipeline should perform a full refresh"""
 
@@ -3425,6 +3426,7 @@ def as_dict(self) -> dict:
         if self.job_id is not None: body['job_id'] = self.job_id
         if self.job_parameters: body['job_parameters'] = self.job_parameters
         if self.notebook_params: body['notebook_params'] = self.notebook_params
+        if self.only: body['only'] = [v for v in self.only]
         if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict()
         if self.python_named_params: body['python_named_params'] = self.python_named_params
         if self.python_params: body['python_params'] = [v for v in self.python_params]
@@ -3442,6 +3444,7 @@ def from_dict(cls, d: Dict[str, any]) -> RunNow:
                    job_id=d.get('job_id', None),
                    job_parameters=d.get('job_parameters', None),
                    notebook_params=d.get('notebook_params', None),
+                   only=d.get('only', None),
                    pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams),
                    python_named_params=d.get('python_named_params', None),
                    python_params=d.get('python_params', None),
@@ -5754,8 +5757,7 @@ def create(self,
           Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If
           not specified, the job/pipeline runs as the user who created the job/pipeline.
           
-          Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, an
-          error is thrown.
+          Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.
         :param schedule: :class:`CronSchedule` (optional)
           An optional periodic schedule for this job. The default behavior is that the job only runs when
           triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.
@@ -6275,6 +6277,7 @@ def run_now(self,
                 jar_params: Optional[List[str]] = None,
                 job_parameters: Optional[Dict[str, str]] = None,
                 notebook_params: Optional[Dict[str, str]] = None,
+                only: Optional[List[str]] = None,
                 pipeline_params: Optional[PipelineParams] = None,
                 python_named_params: Optional[Dict[str, str]] = None,
                 python_params: Optional[List[str]] = None,
@@ -6331,6 +6334,9 @@ def run_now(self,
           
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
           [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
+        :param only: List[str] (optional)
+          A list of task keys to run inside of the job. If this field is not provided, all tasks in the job
+          will be run.
         :param pipeline_params: :class:`PipelineParams` (optional)
           Controls whether the pipeline should perform a full refresh
         :param python_named_params: Dict[str,str] (optional)
@@ -6382,6 +6388,7 @@ def run_now(self,
         if job_id is not None: body['job_id'] = job_id
         if job_parameters is not None: body['job_parameters'] = job_parameters
         if notebook_params is not None: body['notebook_params'] = notebook_params
+        if only is not None: body['only'] = [v for v in only]
         if pipeline_params is not None: body['pipeline_params'] = pipeline_params.as_dict()
         if python_named_params is not None: body['python_named_params'] = python_named_params
         if python_params is not None: body['python_params'] = [v for v in python_params]
@@ -6403,6 +6410,7 @@ def run_now_and_wait(self,
                          jar_params: Optional[List[str]] = None,
                          job_parameters: Optional[Dict[str, str]] = None,
                          notebook_params: Optional[Dict[str, str]] = None,
+                         only: Optional[List[str]] = None,
                          pipeline_params: Optional[PipelineParams] = None,
                          python_named_params: Optional[Dict[str, str]] = None,
                          python_params: Optional[List[str]] = None,
@@ -6416,6 +6424,7 @@ def run_now_and_wait(self,
                             job_id=job_id,
                             job_parameters=job_parameters,
                             notebook_params=notebook_params,
+                            only=only,
                             pipeline_params=pipeline_params,
                             python_named_params=python_named_params,
                             python_params=python_params,
diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py
index 01edcdf50..7bfc8fe1a 100755
--- a/databricks/sdk/service/oauth2.py
+++ b/databricks/sdk/service/oauth2.py
@@ -666,6 +666,7 @@ def get(self, integration_id: str) -> GetCustomAppIntegrationOutput:
         Gets the Custom OAuth App Integration for the given integration id.
         
         :param integration_id: str
+          The OAuth app integration ID.
         
         :returns: :class:`GetCustomAppIntegrationOutput`
         """
diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py
index 26461d088..137ab3c21 100755
--- a/databricks/sdk/service/pipelines.py
+++ b/databricks/sdk/service/pipelines.py
@@ -61,7 +61,7 @@ class CreatePipeline:
     """Filters on which Pipeline packages to include in the deployed graph."""
 
     gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None
-    """The definition of a gateway pipeline to support CDC."""
+    """The definition of a gateway pipeline to support change data capture."""
 
     id: Optional[str] = None
     """Unique identifier for this pipeline."""
@@ -82,6 +82,9 @@ class CreatePipeline:
     photon: Optional[bool] = None
     """Whether Photon is enabled for this pipeline."""
 
+    restart_window: Optional[RestartWindow] = None
+    """Restart window of this pipeline."""
+
     schema: Optional[str] = None
     """The default schema (database) where tables are read from or published to. The presence of this
     field implies that the pipeline is in direct publishing mode."""
@@ -122,6 +125,7 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
         if self.photon is not None: body['photon'] = self.photon
+        if self.restart_window: body['restart_window'] = self.restart_window.as_dict()
         if self.schema is not None: body['schema'] = self.schema
         if self.serverless is not None: body['serverless'] = self.serverless
         if self.storage is not None: body['storage'] = self.storage
@@ -151,6 +155,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreatePipeline:
                    name=d.get('name', None),
                    notifications=_repeated_dict(d, 'notifications', Notifications),
                    photon=d.get('photon', None),
+                   restart_window=_from_dict(d, 'restart_window', RestartWindow),
                    schema=d.get('schema', None),
                    serverless=d.get('serverless', None),
                    storage=d.get('storage', None),
@@ -285,7 +290,7 @@ class EditPipeline:
     """Filters on which Pipeline packages to include in the deployed graph."""
 
     gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None
-    """The definition of a gateway pipeline to support CDC."""
+    """The definition of a gateway pipeline to support change data capture."""
 
     id: Optional[str] = None
     """Unique identifier for this pipeline."""
@@ -309,6 +314,9 @@ class EditPipeline:
     pipeline_id: Optional[str] = None
     """Unique identifier for this pipeline."""
 
+    restart_window: Optional[RestartWindow] = None
+    """Restart window of this pipeline."""
+
     schema: Optional[str] = None
     """The default schema (database) where tables are read from or published to. The presence of this
     field implies that the pipeline is in direct publishing mode."""
@@ -351,6 +359,7 @@ def as_dict(self) -> dict:
         if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
         if self.photon is not None: body['photon'] = self.photon
         if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.restart_window: body['restart_window'] = self.restart_window.as_dict()
         if self.schema is not None: body['schema'] = self.schema
         if self.serverless is not None: body['serverless'] = self.serverless
         if self.storage is not None: body['storage'] = self.storage
@@ -381,6 +390,7 @@ def from_dict(cls, d: Dict[str, any]) -> EditPipeline:
                    notifications=_repeated_dict(d, 'notifications', Notifications),
                    photon=d.get('photon', None),
                    pipeline_id=d.get('pipeline_id', None),
+                   restart_window=_from_dict(d, 'restart_window', RestartWindow),
                    schema=d.get('schema', None),
                    serverless=d.get('serverless', None),
                    storage=d.get('storage', None),
@@ -588,13 +598,13 @@ def from_dict(cls, d: Dict[str, any]) -> GetUpdateResponse:
 @dataclass
 class IngestionConfig:
     report: Optional[ReportSpec] = None
-    """Select tables from a specific source report."""
+    """Select a specific source report."""
 
     schema: Optional[SchemaSpec] = None
-    """Select tables from a specific source schema."""
+    """Select all tables from a specific source schema."""
 
     table: Optional[TableSpec] = None
-    """Select tables from a specific source table."""
+    """Select a specific source table."""
 
     def as_dict(self) -> dict:
         """Serializes the IngestionConfig into a dictionary suitable for use as a JSON request body."""
@@ -615,11 +625,11 @@ def from_dict(cls, d: Dict[str, any]) -> IngestionConfig:
 @dataclass
 class IngestionGatewayPipelineDefinition:
     connection_id: Optional[str] = None
-    """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection this gateway
-    pipeline uses to communicate with the source."""
+    """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this
+    gateway pipeline uses to communicate with the source."""
 
     connection_name: Optional[str] = None
-    """Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the
+    """Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the
     source."""
 
     gateway_storage_catalog: Optional[str] = None
@@ -658,12 +668,12 @@ def from_dict(cls, d: Dict[str, any]) -> IngestionGatewayPipelineDefinition:
 @dataclass
 class IngestionPipelineDefinition:
     connection_name: Optional[str] = None
-    """Immutable. The Unity Catalog connection this ingestion pipeline uses to communicate with the
-    source. Specify either ingestion_gateway_id or connection_name."""
+    """Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with
+    the source. This is used with connectors for applications like Salesforce, Workday, and so on."""
 
     ingestion_gateway_id: Optional[str] = None
-    """Immutable. Identifier for the ingestion gateway used by this ingestion pipeline to communicate
-    with the source. Specify either ingestion_gateway_id or connection_name."""
+    """Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate
+    with the source database. This is used with connectors to databases like SQL Server."""
 
     objects: Optional[List[IngestionConfig]] = None
     """Required. Settings specifying tables to replicate and the destination for the replicated tables."""
@@ -1450,7 +1460,7 @@ class PipelineSpec:
     """Filters on which Pipeline packages to include in the deployed graph."""
 
     gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None
-    """The definition of a gateway pipeline to support CDC."""
+    """The definition of a gateway pipeline to support change data capture."""
 
     id: Optional[str] = None
     """Unique identifier for this pipeline."""
@@ -1471,6 +1481,9 @@ class PipelineSpec:
     photon: Optional[bool] = None
     """Whether Photon is enabled for this pipeline."""
 
+    restart_window: Optional[RestartWindow] = None
+    """Restart window of this pipeline."""
+
     schema: Optional[str] = None
     """The default schema (database) where tables are read from or published to. The presence of this
     field implies that the pipeline is in direct publishing mode."""
@@ -1509,6 +1522,7 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
         if self.photon is not None: body['photon'] = self.photon
+        if self.restart_window: body['restart_window'] = self.restart_window.as_dict()
         if self.schema is not None: body['schema'] = self.schema
         if self.serverless is not None: body['serverless'] = self.serverless
         if self.storage is not None: body['storage'] = self.storage
@@ -1536,6 +1550,7 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineSpec:
                    name=d.get('name', None),
                    notifications=_repeated_dict(d, 'notifications', Notifications),
                    photon=d.get('photon', None),
+                   restart_window=_from_dict(d, 'restart_window', RestartWindow),
                    schema=d.get('schema', None),
                    serverless=d.get('serverless', None),
                    storage=d.get('storage', None),
@@ -1674,6 +1689,50 @@ def from_dict(cls, d: Dict[str, any]) -> ReportSpec:
                    table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig))
 
 
+@dataclass
+class RestartWindow:
+    start_hour: int
+    """An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day.
+    Continuous pipeline restart is triggered only within a five-hour window starting at this hour."""
+
+    days_of_week: Optional[RestartWindowDaysOfWeek] = None
+    """Days of week in which the restart is allowed to happen (within a five-hour window starting at
+    start_hour). If not specified all days of the week will be used."""
+
+    time_zone_id: Optional[str] = None
+    """Time zone id of restart window. See
+    https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html
+    for details. If not specified, UTC will be used."""
+
+    def as_dict(self) -> dict:
+        """Serializes the RestartWindow into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.days_of_week is not None: body['days_of_week'] = self.days_of_week.value
+        if self.start_hour is not None: body['start_hour'] = self.start_hour
+        if self.time_zone_id is not None: body['time_zone_id'] = self.time_zone_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> RestartWindow:
+        """Deserializes the RestartWindow from a dictionary."""
+        return cls(days_of_week=_enum(d, 'days_of_week', RestartWindowDaysOfWeek),
+                   start_hour=d.get('start_hour', None),
+                   time_zone_id=d.get('time_zone_id', None))
+
+
+class RestartWindowDaysOfWeek(Enum):
+    """Days of week in which the restart is allowed to happen (within a five-hour window starting at
+    start_hour). If not specified all days of the week will be used."""
+
+    FRIDAY = 'FRIDAY'
+    MONDAY = 'MONDAY'
+    SATURDAY = 'SATURDAY'
+    SUNDAY = 'SUNDAY'
+    THURSDAY = 'THURSDAY'
+    TUESDAY = 'TUESDAY'
+    WEDNESDAY = 'WEDNESDAY'
+
+
 @dataclass
 class SchemaSpec:
     destination_catalog: Optional[str] = None
@@ -2211,6 +2270,7 @@ def create(self,
                name: Optional[str] = None,
                notifications: Optional[List[Notifications]] = None,
                photon: Optional[bool] = None,
+               restart_window: Optional[RestartWindow] = None,
                schema: Optional[str] = None,
                serverless: Optional[bool] = None,
                storage: Optional[str] = None,
@@ -2247,7 +2307,7 @@ def create(self,
         :param filters: :class:`Filters` (optional)
           Filters on which Pipeline packages to include in the deployed graph.
         :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
-          The definition of a gateway pipeline to support CDC.
+          The definition of a gateway pipeline to support change data capture.
         :param id: str (optional)
           Unique identifier for this pipeline.
         :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
@@ -2261,6 +2321,8 @@ def create(self,
           List of notification settings for this pipeline.
         :param photon: bool (optional)
           Whether Photon is enabled for this pipeline.
+        :param restart_window: :class:`RestartWindow` (optional)
+          Restart window of this pipeline.
         :param schema: str (optional)
           The default schema (database) where tables are read from or published to. The presence of this field
           implies that the pipeline is in direct publishing mode.
@@ -2296,6 +2358,7 @@ def create(self,
         if name is not None: body['name'] = name
         if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications]
         if photon is not None: body['photon'] = photon
+        if restart_window is not None: body['restart_window'] = restart_window.as_dict()
         if schema is not None: body['schema'] = schema
         if serverless is not None: body['serverless'] = serverless
         if storage is not None: body['storage'] = storage
@@ -2629,6 +2692,7 @@ def update(self,
                name: Optional[str] = None,
                notifications: Optional[List[Notifications]] = None,
                photon: Optional[bool] = None,
+               restart_window: Optional[RestartWindow] = None,
                schema: Optional[str] = None,
                serverless: Optional[bool] = None,
                storage: Optional[str] = None,
@@ -2668,7 +2732,7 @@ def update(self,
         :param filters: :class:`Filters` (optional)
           Filters on which Pipeline packages to include in the deployed graph.
         :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
-          The definition of a gateway pipeline to support CDC.
+          The definition of a gateway pipeline to support change data capture.
         :param id: str (optional)
           Unique identifier for this pipeline.
         :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
@@ -2682,6 +2746,8 @@ def update(self,
           List of notification settings for this pipeline.
         :param photon: bool (optional)
           Whether Photon is enabled for this pipeline.
+        :param restart_window: :class:`RestartWindow` (optional)
+          Restart window of this pipeline.
         :param schema: str (optional)
           The default schema (database) where tables are read from or published to. The presence of this field
           implies that the pipeline is in direct publishing mode.
@@ -2717,6 +2783,7 @@ def update(self,
         if name is not None: body['name'] = name
         if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications]
         if photon is not None: body['photon'] = photon
+        if restart_window is not None: body['restart_window'] = restart_window.as_dict()
         if schema is not None: body['schema'] = schema
         if serverless is not None: body['serverless'] = serverless
         if storage is not None: body['storage'] = storage
diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py
index b1d825d1a..d108f7984 100755
--- a/databricks/sdk/service/provisioning.py
+++ b/databricks/sdk/service/provisioning.py
@@ -1245,6 +1245,10 @@ class UpdateWorkspaceRequest:
     customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC
     to a customer-managed VPC by updating the workspace to add a network configuration ID."""
 
+    private_access_settings_id: Optional[str] = None
+    """The ID of the workspace's private access settings configuration object. This parameter is
+    available only for updating failed workspaces."""
+
     storage_configuration_id: Optional[str] = None
     """The ID of the workspace's storage configuration object. This parameter is available only for
     updating failed workspaces."""
@@ -1267,6 +1271,8 @@ def as_dict(self) -> dict:
         if self.network_connectivity_config_id is not None:
             body['network_connectivity_config_id'] = self.network_connectivity_config_id
         if self.network_id is not None: body['network_id'] = self.network_id
+        if self.private_access_settings_id is not None:
+            body['private_access_settings_id'] = self.private_access_settings_id
         if self.storage_configuration_id is not None:
             body['storage_configuration_id'] = self.storage_configuration_id
         if self.storage_customer_managed_key_id is not None:
@@ -1284,6 +1290,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceRequest:
                                                                   None),
                    network_connectivity_config_id=d.get('network_connectivity_config_id', None),
                    network_id=d.get('network_id', None),
+                   private_access_settings_id=d.get('private_access_settings_id', None),
                    storage_configuration_id=d.get('storage_configuration_id', None),
                    storage_customer_managed_key_id=d.get('storage_customer_managed_key_id', None),
                    workspace_id=d.get('workspace_id', None))
@@ -2706,6 +2713,7 @@ def update(self,
                managed_services_customer_managed_key_id: Optional[str] = None,
                network_connectivity_config_id: Optional[str] = None,
                network_id: Optional[str] = None,
+               private_access_settings_id: Optional[str] = None,
                storage_configuration_id: Optional[str] = None,
                storage_customer_managed_key_id: Optional[str] = None) -> Wait[Workspace]:
         """Update workspace configuration.
@@ -2824,6 +2832,9 @@ def update(self,
           The ID of the workspace's network configuration object. Used only if you already use a
           customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a
           customer-managed VPC by updating the workspace to add a network configuration ID.
+        :param private_access_settings_id: str (optional)
+          The ID of the workspace's private access settings configuration object. This parameter is available
+          only for updating failed workspaces.
         :param storage_configuration_id: str (optional)
           The ID of the workspace's storage configuration object. This parameter is available only for
           updating failed workspaces.
@@ -2844,6 +2855,8 @@ def update(self,
         if network_connectivity_config_id is not None:
             body['network_connectivity_config_id'] = network_connectivity_config_id
         if network_id is not None: body['network_id'] = network_id
+        if private_access_settings_id is not None:
+            body['private_access_settings_id'] = private_access_settings_id
         if storage_configuration_id is not None: body['storage_configuration_id'] = storage_configuration_id
         if storage_customer_managed_key_id is not None:
             body['storage_customer_managed_key_id'] = storage_customer_managed_key_id
@@ -2867,6 +2880,7 @@ def update_and_wait(
         managed_services_customer_managed_key_id: Optional[str] = None,
         network_connectivity_config_id: Optional[str] = None,
         network_id: Optional[str] = None,
+        private_access_settings_id: Optional[str] = None,
         storage_configuration_id: Optional[str] = None,
         storage_customer_managed_key_id: Optional[str] = None,
         timeout=timedelta(minutes=20)) -> Workspace:
@@ -2876,6 +2890,7 @@ def update_and_wait(
                            managed_services_customer_managed_key_id=managed_services_customer_managed_key_id,
                            network_connectivity_config_id=network_connectivity_config_id,
                            network_id=network_id,
+                           private_access_settings_id=private_access_settings_id,
                            storage_configuration_id=storage_configuration_id,
                            storage_customer_managed_key_id=storage_customer_managed_key_id,
                            workspace_id=workspace_id).result(timeout=timeout)
diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py
index 607cc3085..d6294b261 100755
--- a/databricks/sdk/service/settings.py
+++ b/databricks/sdk/service/settings.py
@@ -2943,6 +2943,7 @@ class UpdateNotificationDestinationRequest:
     """The display name for the notification destination."""
 
     id: Optional[str] = None
+    """UUID identifying notification destination."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body."""
@@ -4670,6 +4671,7 @@ def update(self,
         required in the request body.
         
         :param id: str
+          UUID identifying notification destination.
         :param config: :class:`Config` (optional)
           The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.
         :param display_name: str (optional)
@@ -4984,7 +4986,7 @@ def delete(self, token_id: str):
         Deletes a token, specified by its ID.
         
         :param token_id: str
-          The ID of the token to get.
+          The ID of the token to revoke.
         
         
         """
diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py
index 091fa9e82..13cba2ccf 100755
--- a/databricks/sdk/service/sharing.py
+++ b/databricks/sdk/service/sharing.py
@@ -984,6 +984,8 @@ def from_dict(cls, d: Dict[str, any]) -> SharedDataObject:
 class SharedDataObjectDataObjectType(Enum):
     """The type of the data object."""
 
+    FEATURE_SPEC = 'FEATURE_SPEC'
+    FUNCTION = 'FUNCTION'
     MATERIALIZED_VIEW = 'MATERIALIZED_VIEW'
     MODEL = 'MODEL'
     NOTEBOOK_FILE = 'NOTEBOOK_FILE'
diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py
index 01c463a0d..29380d4f3 100755
--- a/databricks/sdk/service/workspace.py
+++ b/databricks/sdk/service/workspace.py
@@ -684,6 +684,7 @@ class ImportFormat(Enum):
     DBC = 'DBC'
     HTML = 'HTML'
     JUPYTER = 'JUPYTER'
+    RAW = 'RAW'
     R_MARKDOWN = 'R_MARKDOWN'
     SOURCE = 'SOURCE'
 
@@ -1799,7 +1800,7 @@ def delete(self, repo_id: int):
         Deletes the specified repo.
         
         :param repo_id: int
-          ID of the Git folder (repo) object in the workspace.
+          The ID for the corresponding repo to delete.
         
         
         """
diff --git a/docs/account/billing/budgets.rst b/docs/account/billing/budgets.rst
index edba0a733..43c77d00b 100644
--- a/docs/account/billing/budgets.rst
+++ b/docs/account/billing/budgets.rst
@@ -115,7 +115,7 @@
         Gets a budget configuration for an account. Both account and budget configuration are specified by ID.
         
         :param budget_id: str
-          The Databricks budget configuration ID.
+          The budget configuration ID
         
         :returns: :class:`GetBudgetConfigurationResponse`
         
diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst
index 6230b8199..697f0a5da 100644
--- a/docs/account/iam/workspace_assignment.rst
+++ b/docs/account/iam/workspace_assignment.rst
@@ -92,7 +92,7 @@
         specified principal.
         
         :param workspace_id: int
-          The workspace ID for the account.
+          The workspace ID.
         :param principal_id: int
           The ID of the user, service principal, or group.
         :param permissions: List[:class:`WorkspacePermission`] (optional)
diff --git a/docs/account/oauth2/custom_app_integration.rst b/docs/account/oauth2/custom_app_integration.rst
index 0dcc3d8e0..4192b2109 100644
--- a/docs/account/oauth2/custom_app_integration.rst
+++ b/docs/account/oauth2/custom_app_integration.rst
@@ -49,6 +49,7 @@
         Gets the Custom OAuth App Integration for the given integration id.
         
         :param integration_id: str
+          The OAuth app integration ID.
         
         :returns: :class:`GetCustomAppIntegrationOutput`
         
diff --git a/docs/account/provisioning/workspaces.rst b/docs/account/provisioning/workspaces.rst
index fa1d130b1..ad8a75942 100644
--- a/docs/account/provisioning/workspaces.rst
+++ b/docs/account/provisioning/workspaces.rst
@@ -229,7 +229,7 @@
         :returns: Iterator over :class:`Workspace`
         
 
-    .. py:method:: update(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str]]) -> Wait[Workspace]
+    .. py:method:: update(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str]]) -> Wait[Workspace]
 
 
         Usage:
@@ -372,6 +372,9 @@
           The ID of the workspace's network configuration object. Used only if you already use a
           customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a
           customer-managed VPC by updating the workspace to add a network configuration ID.
+        :param private_access_settings_id: str (optional)
+          The ID of the workspace's private access settings configuration object. This parameter is available
+          only for updating failed workspaces.
         :param storage_configuration_id: str (optional)
           The ID of the workspace's storage configuration object. This parameter is available only for
           updating failed workspaces.
@@ -384,7 +387,7 @@
           See :method:wait_get_workspace_running for more details.
         
 
-    .. py:method:: update_and_wait(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace
+    .. py:method:: update_and_wait(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace
 
 
     .. py:method:: wait_get_workspace_running(workspace_id: int, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[Workspace], None]]) -> Workspace
diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst
index 9f5fef3bc..19b245b25 100644
--- a/docs/dbdataclasses/catalog.rst
+++ b/docs/dbdataclasses/catalog.rst
@@ -455,6 +455,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: SERVICE
       :value: "SERVICE"
 
+   .. py:attribute:: STORAGE
+      :value: "STORAGE"
+
 .. py:class:: CredentialType
 
    The type of credential.
@@ -715,6 +718,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: GcpServiceAccountKey
+   :members:
+   :undoc-members:
+
 .. autoclass:: GenerateTemporaryServiceCredentialAzureOptions
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst
index 9f419f160..f82cd73c2 100644
--- a/docs/dbdataclasses/pipelines.rst
+++ b/docs/dbdataclasses/pipelines.rst
@@ -269,6 +269,35 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: RestartWindow
+   :members:
+   :undoc-members:
+
+.. py:class:: RestartWindowDaysOfWeek
+
+   Days of week in which the restart is allowed to happen (within a five-hour window starting at start_hour). If not specified all days of the week will be used.
+
+   .. py:attribute:: FRIDAY
+      :value: "FRIDAY"
+
+   .. py:attribute:: MONDAY
+      :value: "MONDAY"
+
+   .. py:attribute:: SATURDAY
+      :value: "SATURDAY"
+
+   .. py:attribute:: SUNDAY
+      :value: "SUNDAY"
+
+   .. py:attribute:: THURSDAY
+      :value: "THURSDAY"
+
+   .. py:attribute:: TUESDAY
+      :value: "TUESDAY"
+
+   .. py:attribute:: WEDNESDAY
+      :value: "WEDNESDAY"
+
 .. autoclass:: SchemaSpec
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/sharing.rst b/docs/dbdataclasses/sharing.rst
index cd4c2dcea..650811e08 100644
--- a/docs/dbdataclasses/sharing.rst
+++ b/docs/dbdataclasses/sharing.rst
@@ -262,6 +262,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
    The type of the data object.
 
+   .. py:attribute:: FEATURE_SPEC
+      :value: "FEATURE_SPEC"
+
+   .. py:attribute:: FUNCTION
+      :value: "FUNCTION"
+
    .. py:attribute:: MATERIALIZED_VIEW
       :value: "MATERIALIZED_VIEW"
 
diff --git a/docs/dbdataclasses/workspace.rst b/docs/dbdataclasses/workspace.rst
index 9ff3eb66b..bd0785db4 100644
--- a/docs/dbdataclasses/workspace.rst
+++ b/docs/dbdataclasses/workspace.rst
@@ -157,6 +157,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: JUPYTER
       :value: "JUPYTER"
 
+   .. py:attribute:: RAW
+      :value: "RAW"
+
    .. py:attribute:: R_MARKDOWN
       :value: "R_MARKDOWN"
 
diff --git a/docs/workspace/apps/apps.rst b/docs/workspace/apps/apps.rst
index a24941242..40791a143 100644
--- a/docs/workspace/apps/apps.rst
+++ b/docs/workspace/apps/apps.rst
@@ -188,7 +188,8 @@
         Updates the app with the supplied name.
         
         :param name: str
-          The name of the app.
+          The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
+          must be unique within the workspace.
         :param app: :class:`App` (optional)
         
         :returns: :class:`App`
diff --git a/docs/workspace/catalog/tables.rst b/docs/workspace/catalog/tables.rst
index 4cb458b46..15cfb1cac 100644
--- a/docs/workspace/catalog/tables.rst
+++ b/docs/workspace/catalog/tables.rst
@@ -100,7 +100,7 @@
         :returns: :class:`TableInfo`
         
 
-    .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool], include_manifest_capabilities: Optional[bool], max_results: Optional[int], omit_columns: Optional[bool], omit_properties: Optional[bool], page_token: Optional[str]]) -> Iterator[TableInfo]
+    .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool], include_manifest_capabilities: Optional[bool], max_results: Optional[int], omit_columns: Optional[bool], omit_properties: Optional[bool], omit_username: Optional[bool], page_token: Optional[str]]) -> Iterator[TableInfo]
 
 
         Usage:
@@ -151,6 +151,9 @@
           Whether to omit the columns of the table from the response or not.
         :param omit_properties: bool (optional)
           Whether to omit the properties of the table from the response or not.
+        :param omit_username: bool (optional)
+          Whether to omit the username of the table (e.g. owner, updated_by, created_by) from the response or
+          not.
         :param page_token: str (optional)
           Opaque token to send for the next page of results (pagination).
         
diff --git a/docs/workspace/dashboards/lakeview.rst b/docs/workspace/dashboards/lakeview.rst
index 0fe55542a..b8dceeb9e 100644
--- a/docs/workspace/dashboards/lakeview.rst
+++ b/docs/workspace/dashboards/lakeview.rst
@@ -93,7 +93,7 @@
         Get the current published dashboard.
         
         :param dashboard_id: str
-          UUID identifying the dashboard to be published.
+          UUID identifying the published dashboard.
         
         :returns: :class:`PublishedDashboard`
         
@@ -147,7 +147,7 @@
         List dashboard schedules.
         
         :param dashboard_id: str
-          UUID identifying the dashboard to which the schedule belongs.
+          UUID identifying the dashboard to which the schedules belongs.
         :param page_size: int (optional)
           The number of schedules to return per page.
         :param page_token: str (optional)
@@ -162,9 +162,9 @@
         List schedule subscriptions.
         
         :param dashboard_id: str
-          UUID identifying the dashboard to which the subscription belongs.
+          UUID identifying the dashboard which the subscriptions belongs.
         :param schedule_id: str
-          UUID identifying the schedule to which the subscription belongs.
+          UUID identifying the schedule which the subscriptions belongs.
         :param page_size: int (optional)
           The number of subscriptions to return per page.
         :param page_token: str (optional)
@@ -226,7 +226,7 @@
         Unpublish the dashboard.
         
         :param dashboard_id: str
-          UUID identifying the dashboard to be published.
+          UUID identifying the published dashboard.
         
         
         
diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst
index 0c6d51439..e9e63bb20 100644
--- a/docs/workspace/jobs/jobs.rst
+++ b/docs/workspace/jobs/jobs.rst
@@ -2,7 +2,7 @@
 ================
 .. currentmodule:: databricks.sdk.service.jobs
 
-.. py:class:: JobsAPI
+.. py:class:: JobsExt
 
     The Jobs API allows you to create, edit, and delete jobs.
     
@@ -221,8 +221,7 @@
           Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If
           not specified, the job/pipeline runs as the user who created the job/pipeline.
           
-          Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, an
-          error is thrown.
+          Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.
         :param schedule: :class:`CronSchedule` (optional)
           An optional periodic schedule for this job. The default behavior is that the job only runs when
           triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.
@@ -382,7 +381,7 @@
         :returns: :class:`JobPermissions`
         
 
-    .. py:method:: get_run(run_id: int [, include_history: Optional[bool], include_resolved_values: Optional[bool], page_token: Optional[str]]) -> Run
+    .. py:method:: get_run(run_id: int [, include_history: bool, include_resolved_values: bool, page_token: str]) -> Run
 
 
         Usage:
@@ -414,10 +413,9 @@
             # cleanup
             w.jobs.delete_run(run_id=run.run_id)
 
-        Get a single job run.
-        
-        Retrieve the metadata of a run.
         
+        This method fetches the details of a run identified by `run_id`. If the run has multiple pages of tasks or iterations,
+        it will paginate through all pages and aggregate the results.
         :param run_id: int
           The canonical identifier of the run for which to retrieve the metadata. This field is required.
         :param include_history: bool (optional)
@@ -425,9 +423,8 @@
         :param include_resolved_values: bool (optional)
           Whether to include resolved parameter values in the response.
         :param page_token: str (optional)
-          To list the next page of job tasks, set this field to the value of the `next_page_token` returned in
-          the GetJob response.
-        
+          To list the next page or the previous page of job tasks, set this field to the value of the
+          `next_page_token` or `prev_page_token` returned in the GetJob response.
         :returns: :class:`Run`
         
 
@@ -792,7 +789,7 @@
         
         
 
-    .. py:method:: run_now(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]]]) -> Wait[Run]
+    .. py:method:: run_now(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], only: Optional[List[str]], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]]]) -> Wait[Run]
 
 
         Usage:
@@ -876,6 +873,9 @@
           
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
           [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
+        :param only: List[str] (optional)
+          A list of task keys to run inside of the job. If this field is not provided, all tasks in the job
+          will be run.
         :param pipeline_params: :class:`PipelineParams` (optional)
           Controls whether the pipeline should perform a full refresh
         :param python_named_params: Dict[str,str] (optional)
@@ -921,7 +921,7 @@
           See :method:wait_get_run_job_terminated_or_skipped for more details.
         
 
-    .. py:method:: run_now_and_wait(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]], timeout: datetime.timedelta = 0:20:00]) -> Run
+    .. py:method:: run_now_and_wait(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], only: Optional[List[str]], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]], timeout: datetime.timedelta = 0:20:00]) -> Run
 
 
     .. py:method:: set_permissions(job_id: str [, access_control_list: Optional[List[JobAccessControlRequest]]]) -> JobPermissions
diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst
index 39b5c9d77..1ba875740 100644
--- a/docs/workspace/pipelines/pipelines.rst
+++ b/docs/workspace/pipelines/pipelines.rst
@@ -15,7 +15,7 @@
     also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected
     data quality and specify how to handle records that fail those expectations.
 
-    .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse
+    .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse
 
 
         Usage:
@@ -79,7 +79,7 @@
         :param filters: :class:`Filters` (optional)
           Filters on which Pipeline packages to include in the deployed graph.
         :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
-          The definition of a gateway pipeline to support CDC.
+          The definition of a gateway pipeline to support change data capture.
         :param id: str (optional)
           Unique identifier for this pipeline.
         :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
@@ -93,6 +93,8 @@
           List of notification settings for this pipeline.
         :param photon: bool (optional)
           Whether Photon is enabled for this pipeline.
+        :param restart_window: :class:`RestartWindow` (optional)
+          Restart window of this pipeline.
         :param schema: str (optional)
           The default schema (database) where tables are read from or published to. The presence of this field
           implies that the pipeline is in direct publishing mode.
@@ -377,7 +379,7 @@
     .. py:method:: stop_and_wait(pipeline_id: str, timeout: datetime.timedelta = 0:20:00) -> GetPipelineResponse
 
 
-    .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]])
+    .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]])
 
 
         Usage:
@@ -457,7 +459,7 @@
         :param filters: :class:`Filters` (optional)
           Filters on which Pipeline packages to include in the deployed graph.
         :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
-          The definition of a gateway pipeline to support CDC.
+          The definition of a gateway pipeline to support change data capture.
         :param id: str (optional)
           Unique identifier for this pipeline.
         :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
@@ -471,6 +473,8 @@
           List of notification settings for this pipeline.
         :param photon: bool (optional)
           Whether Photon is enabled for this pipeline.
+        :param restart_window: :class:`RestartWindow` (optional)
+          Restart window of this pipeline.
         :param schema: str (optional)
           The default schema (database) where tables are read from or published to. The presence of this field
           implies that the pipeline is in direct publishing mode.
diff --git a/docs/workspace/settings/notification_destinations.rst b/docs/workspace/settings/notification_destinations.rst
index 29d947f55..8fb2d0c3c 100644
--- a/docs/workspace/settings/notification_destinations.rst
+++ b/docs/workspace/settings/notification_destinations.rst
@@ -65,6 +65,7 @@
         required in the request body.
         
         :param id: str
+          UUID identifying notification destination.
         :param config: :class:`Config` (optional)
           The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.
         :param display_name: str (optional)
diff --git a/docs/workspace/settings/token_management.rst b/docs/workspace/settings/token_management.rst
index 9c938ce3e..50dbe1328 100644
--- a/docs/workspace/settings/token_management.rst
+++ b/docs/workspace/settings/token_management.rst
@@ -53,7 +53,7 @@
         Deletes a token, specified by its ID.
         
         :param token_id: str
-          The ID of the token to get.
+          The ID of the token to revoke.
         
         
         
diff --git a/docs/workspace/workspace/repos.rst b/docs/workspace/workspace/repos.rst
index 3e826a064..5f3e3e290 100644
--- a/docs/workspace/workspace/repos.rst
+++ b/docs/workspace/workspace/repos.rst
@@ -62,7 +62,7 @@
         Deletes the specified repo.
         
         :param repo_id: int
-          ID of the Git folder (repo) object in the workspace.
+          The ID for the corresponding repo to delete.
         
         
         

From 197b5f9f3723158ea82389ed061329239c3fbdba Mon Sep 17 00:00:00 2001
From: Renaud Hartert 
Date: Mon, 18 Nov 2024 16:23:41 +0100
Subject: [PATCH 073/136] [Internal] Bump release number to 0.38.0 (#828)

## What changes are proposed in this pull request?

Note: this PR is the real v0.38.0 release. It is the same as PR #826 but
with the right version number.

### New Features and Improvements

* Read streams by 1MB chunks by default.
([https://github.com/databricks/databricks-sdk-py/pull/817](https://github.com/databricks/databricks-sdk-py/pull/817)).

### Bug Fixes

* Rewind seekable streams before retrying
([https://github.com/databricks/databricks-sdk-py/pull/821](https://github.com/databricks/databricks-sdk-py/pull/821)).
 * Properly serialize nested data classes.

### Internal Changes

* Reformat SDK with YAPF 0.43.
([https://github.com/databricks/databricks-sdk-py/pull/822](https://github.com/databricks/databricks-sdk-py/pull/822)).
* Update Jobs GetRun API to support paginated responses for jobs and
ForEach tasks
([https://github.com/databricks/databricks-sdk-py/pull/819](https://github.com/databricks/databricks-sdk-py/pull/819)).

### API Changes:

* Added `service_principal_client_id` field for
`databricks.sdk.service.apps.App`.
* Added `azure_service_principal`, `gcp_service_account_key` and
`read_only` fields for
`databricks.sdk.service.catalog.CreateCredentialRequest`.
* Added `azure_service_principal`, `read_only` and
`used_for_managed_storage` fields for
`databricks.sdk.service.catalog.CredentialInfo`.
* Added `omit_username` field for
`databricks.sdk.service.catalog.ListTablesRequest`.
* Added `azure_service_principal` and `read_only` fields for
`databricks.sdk.service.catalog.UpdateCredentialRequest`.
* Added `external_location_name`, `read_only` and `url` fields for
`databricks.sdk.service.catalog.ValidateCredentialRequest`.
* Added `is_dir` field for
`databricks.sdk.service.catalog.ValidateCredentialResponse`.
 * Added `only` field for `databricks.sdk.service.jobs.RunNow`.
* Added `restart_window` field for
`databricks.sdk.service.pipelines.CreatePipeline`.
* Added `restart_window` field for
`databricks.sdk.service.pipelines.EditPipeline`.
* Added `restart_window` field for
`databricks.sdk.service.pipelines.PipelineSpec`.
* Added `private_access_settings_id` field for
`databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
* Changed `create_credential()` and
`generate_temporary_service_credential()` methods for
[w.credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/credentials.html)
workspace-level service with new required argument order.
* Changed `access_connector_id` field for
`databricks.sdk.service.catalog.AzureManagedIdentity` to be required.
* Changed `access_connector_id` field for
`databricks.sdk.service.catalog.AzureManagedIdentity` to be required.
* Changed `name` field for
`databricks.sdk.service.catalog.CreateCredentialRequest` to be required.
* Changed `credential_name` field for
`databricks.sdk.service.catalog.GenerateTemporaryServiceCredentialRequest`
to be required.

OpenAPI SHA: f2385add116e3716c8a90a0b68e204deb40f996c, Date: 2024-11-15
---
 databricks/sdk/version.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py
index 8935b5b5d..457618b15 100644
--- a/databricks/sdk/version.py
+++ b/databricks/sdk/version.py
@@ -1 +1 @@
-__version__ = '0.37.0'
+__version__ = '0.38.0'

From 74f9a4f22991ba459f1d4bafe95cd602a3cd9530 Mon Sep 17 00:00:00 2001
From: Renaud Hartert 
Date: Wed, 20 Nov 2024 15:00:05 +0100
Subject: [PATCH 074/136] [Internal] Fix a couple of typos in open_ai_client.py
 (#829)

## What changes are proposed in this pull request?

This PR fixes a couple of typos in `open_ai_client.py`.

## How is this tested?

N/A
---
 databricks/sdk/mixins/open_ai_client.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/databricks/sdk/mixins/open_ai_client.py b/databricks/sdk/mixins/open_ai_client.py
index f7a8af02d..a86827128 100644
--- a/databricks/sdk/mixins/open_ai_client.py
+++ b/databricks/sdk/mixins/open_ai_client.py
@@ -29,7 +29,7 @@ def get_open_ai_client(self):
             from openai import OpenAI
         except Exception:
             raise ImportError(
-                "Open AI is not installed. Please install the Databricks SDK with the following command `pip isntall databricks-sdk[openai]`"
+                "Open AI is not installed. Please install the Databricks SDK with the following command `pip install databricks-sdk[openai]`"
             )
 
         return OpenAI(
@@ -42,7 +42,7 @@ def get_langchain_chat_open_ai_client(self, model):
             from langchain_openai import ChatOpenAI
         except Exception:
             raise ImportError(
-                "Langchain Open AI is not installed. Please install the Databricks SDK with the following command `pip isntall databricks-sdk[openai]` and ensure you are using python>3.7"
+                "Langchain Open AI is not installed. Please install the Databricks SDK with the following command `pip install databricks-sdk[openai]` and ensure you are using python>3.7"
             )
 
         return ChatOpenAI(

From 980166030c688e08a52d0f55389abd766f1d928d Mon Sep 17 00:00:00 2001
From: hectorcast-db 
Date: Wed, 20 Nov 2024 15:43:35 +0100
Subject: [PATCH 075/136] [Fix] Update Changelog file (#830)

## What changes are proposed in this pull request?

Update Changelog file.
This file was not updated during the last release due it being manual
and an oversight.

## How is this tested?

N/A
---
 CHANGELOG.md | 38 ++++++++++++++++++++++++++++++++++++++
 1 file changed, 38 insertions(+)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 409fce709..d3e7aac0b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,43 @@
 # Version changelog
 
+## [Release] Release v0.38.0
+
+### New Features and Improvements
+
+ * Read streams by 1MB chunks by default. ([#817](https://github.com/databricks/databricks-sdk-py/pull/817)).
+
+### Bug Fixes
+
+ * Rewind seekable streams before retrying ([#821](https://github.com/databricks/databricks-sdk-py/pull/821)).
+ * Properly serialize nested data classes. 
+
+### Internal Changes
+
+ * Reformat SDK with YAPF 0.43. ([#822](https://github.com/databricks/databricks-sdk-py/pull/822)).
+ * Update Jobs GetRun API to support paginated responses for jobs and ForEach tasks ([#819](https://github.com/databricks/databricks-sdk-py/pull/819)).
+
+### API Changes:
+
+ * Added `service_principal_client_id` field for `databricks.sdk.service.apps.App`.
+ * Added `azure_service_principal`, `gcp_service_account_key` and `read_only` fields for `databricks.sdk.service.catalog.CreateCredentialRequest`.
+ * Added `azure_service_principal`, `read_only` and `used_for_managed_storage` fields for `databricks.sdk.service.catalog.CredentialInfo`.
+ * Added `omit_username` field for `databricks.sdk.service.catalog.ListTablesRequest`.
+ * Added `azure_service_principal` and `read_only` fields for `databricks.sdk.service.catalog.UpdateCredentialRequest`.
+ * Added `external_location_name`, `read_only` and `url` fields for `databricks.sdk.service.catalog.ValidateCredentialRequest`.
+ * Added `is_dir` field for `databricks.sdk.service.catalog.ValidateCredentialResponse`.
+ * Added `only` field for `databricks.sdk.service.jobs.RunNow`.
+ * Added `restart_window` field for `databricks.sdk.service.pipelines.CreatePipeline`.
+ * Added `restart_window` field for `databricks.sdk.service.pipelines.EditPipeline`.
+ * Added `restart_window` field for `databricks.sdk.service.pipelines.PipelineSpec`.
+ * Added `private_access_settings_id` field for `databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
+ * Changed `create_credential()` and `generate_temporary_service_credential()` methods for [w.credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/credentials.html) workspace-level service with new required argument order.
+ * Changed `access_connector_id` field for `databricks.sdk.service.catalog.AzureManagedIdentity` to be required.
+ * Changed `access_connector_id` field for `databricks.sdk.service.catalog.AzureManagedIdentity` to be required.
+ * Changed `name` field for `databricks.sdk.service.catalog.CreateCredentialRequest` to be required.
+ * Changed `credential_name` field for `databricks.sdk.service.catalog.GenerateTemporaryServiceCredentialRequest` to be required.
+
+OpenAPI SHA: f2385add116e3716c8a90a0b68e204deb40f996c, Date: 2024-11-15
+
 ## [Release] Release v0.37.0
 
 ### Bug Fixes

From b23571730435ba95afb0e3a8593419b9140664b9 Mon Sep 17 00:00:00 2001
From: Tanmay Rustagi <88379306+tanmay-db@users.noreply.github.com>
Date: Tue, 10 Dec 2024 16:36:10 +0100
Subject: [PATCH 076/136] [Internal] Update SDK to OpenAPI spec (#834)

## What changes are proposed in this pull request?
Update SDK to OpenAPI spec

## How is this tested?
N/A
---
 .codegen/_openapi_sha                         |    2 +-
 .gitattributes                                |    1 +
 databricks/sdk/__init__.py                    |   21 +
 databricks/sdk/service/apps.py                |  175 ++
 databricks/sdk/service/billing.py             |  247 +++
 databricks/sdk/service/catalog.py             | 1855 ++++++++++++++++-
 databricks/sdk/service/cleanrooms.py          | 1281 ++++++++++++
 databricks/sdk/service/compute.py             | 1494 ++++++++++++-
 databricks/sdk/service/dashboards.py          |  327 ++-
 databricks/sdk/service/files.py               |  164 +-
 databricks/sdk/service/iam.py                 |  351 ++++
 databricks/sdk/service/jobs.py                | 1272 ++++++++++-
 databricks/sdk/service/marketplace.py         |  688 ++++++
 databricks/sdk/service/ml.py                  | 1040 ++++++++-
 databricks/sdk/service/oauth2.py              |  175 ++
 databricks/sdk/service/pipelines.py           |  520 +++++
 databricks/sdk/service/provisioning.py        |  387 ++++
 databricks/sdk/service/serving.py             |  615 ++++++
 databricks/sdk/service/settings.py            | 1187 ++++++++++-
 databricks/sdk/service/sharing.py             |  328 ++-
 databricks/sdk/service/sql.py                 | 1188 ++++++++++-
 databricks/sdk/service/vectorsearch.py        |  290 +++
 databricks/sdk/service/workspace.py           |  451 ++++
 docs/dbdataclasses/catalog.rst                |   13 +-
 docs/dbdataclasses/cleanrooms.rst             |  155 ++
 docs/dbdataclasses/dashboards.rst             |   61 +
 docs/dbdataclasses/index.rst                  |    1 +
 docs/dbdataclasses/jobs.rst                   |   75 +
 docs/dbdataclasses/settings.rst               |   85 +
 docs/dbdataclasses/sharing.rst                |    9 +-
 .../cleanrooms/clean_room_assets.rst          |   94 +
 .../cleanrooms/clean_room_task_runs.rst       |   25 +
 docs/workspace/cleanrooms/clean_rooms.rst     |   94 +
 docs/workspace/cleanrooms/index.rst           |   12 +
 docs/workspace/files/files.rst                |    4 +-
 docs/workspace/index.rst                      |    1 +
 docs/workspace/jobs/jobs.rst                  |    4 +-
 ...aibi_dashboard_embedding_access_policy.rst |   16 +
 ...i_dashboard_embedding_approved_domains.rst |   17 +
 docs/workspace/sql/dashboards.rst             |    4 +-
 40 files changed, 14629 insertions(+), 100 deletions(-)
 create mode 100755 databricks/sdk/service/cleanrooms.py
 create mode 100644 docs/dbdataclasses/cleanrooms.rst
 create mode 100644 docs/workspace/cleanrooms/clean_room_assets.rst
 create mode 100644 docs/workspace/cleanrooms/clean_room_task_runs.rst
 create mode 100644 docs/workspace/cleanrooms/clean_rooms.rst
 create mode 100644 docs/workspace/cleanrooms/index.rst

diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index a2ba58aa5..68cd2f4be 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-f2385add116e3716c8a90a0b68e204deb40f996c
\ No newline at end of file
+7016dcbf2e011459416cf408ce21143bcc4b3a25
\ No newline at end of file
diff --git a/.gitattributes b/.gitattributes
index c8e5b2f0b..a0bfc0940 100755
--- a/.gitattributes
+++ b/.gitattributes
@@ -4,6 +4,7 @@ databricks/sdk/errors/platform.py linguist-generated=true
 databricks/sdk/service/apps.py linguist-generated=true
 databricks/sdk/service/billing.py linguist-generated=true
 databricks/sdk/service/catalog.py linguist-generated=true
+databricks/sdk/service/cleanrooms.py linguist-generated=true
 databricks/sdk/service/compute.py linguist-generated=true
 databricks/sdk/service/dashboards.py linguist-generated=true
 databricks/sdk/service/files.py linguist-generated=true
diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py
index 4f4689af2..beb3fd7bb 100755
--- a/databricks/sdk/__init__.py
+++ b/databricks/sdk/__init__.py
@@ -28,6 +28,9 @@
                                             TableConstraintsAPI, TablesAPI,
                                             TemporaryTableCredentialsAPI,
                                             VolumesAPI, WorkspaceBindingsAPI)
+from databricks.sdk.service.cleanrooms import (CleanRoomAssetsAPI,
+                                               CleanRoomsAPI,
+                                               CleanRoomTaskRunsAPI)
 from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI,
                                             CommandExecutionAPI,
                                             GlobalInitScriptsAPI,
@@ -176,6 +179,9 @@ def __init__(self,
         self._apps = AppsAPI(self._api_client)
         self._artifact_allowlists = ArtifactAllowlistsAPI(self._api_client)
         self._catalogs = CatalogsAPI(self._api_client)
+        self._clean_room_assets = CleanRoomAssetsAPI(self._api_client)
+        self._clean_room_task_runs = CleanRoomTaskRunsAPI(self._api_client)
+        self._clean_rooms = CleanRoomsAPI(self._api_client)
         self._cluster_policies = ClusterPoliciesAPI(self._api_client)
         self._clusters = ClustersExt(self._api_client)
         self._command_execution = CommandExecutionAPI(self._api_client)
@@ -305,6 +311,21 @@ def catalogs(self) -> CatalogsAPI:
         """A catalog is the first layer of Unity Catalog’s three-level namespace."""
         return self._catalogs
 
+    @property
+    def clean_room_assets(self) -> CleanRoomAssetsAPI:
+        """Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the clean room."""
+        return self._clean_room_assets
+
+    @property
+    def clean_room_task_runs(self) -> CleanRoomTaskRunsAPI:
+        """Clean room task runs are the executions of notebooks in a clean room."""
+        return self._clean_room_task_runs
+
+    @property
+    def clean_rooms(self) -> CleanRoomsAPI:
+        """A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each other’s data."""
+        return self._clean_rooms
+
     @property
     def cluster_policies(self) -> ClusterPoliciesAPI:
         """You can use cluster policies to control users' ability to configure clusters based on a set of rules."""
diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py
index a08a7e66d..eee49a212 100755
--- a/databricks/sdk/service/apps.py
+++ b/databricks/sdk/service/apps.py
@@ -91,6 +91,30 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the App into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.active_deployment: body['active_deployment'] = self.active_deployment
+        if self.app_status: body['app_status'] = self.app_status
+        if self.compute_status: body['compute_status'] = self.compute_status
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.creator is not None: body['creator'] = self.creator
+        if self.default_source_code_path is not None:
+            body['default_source_code_path'] = self.default_source_code_path
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.pending_deployment: body['pending_deployment'] = self.pending_deployment
+        if self.resources: body['resources'] = self.resources
+        if self.service_principal_client_id is not None:
+            body['service_principal_client_id'] = self.service_principal_client_id
+        if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.updater is not None: body['updater'] = self.updater
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> App:
         """Deserializes the App from a dictionary."""
@@ -136,6 +160,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppAccessControlRequest:
         """Deserializes the AppAccessControlRequest from a dictionary."""
@@ -173,6 +207,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppAccessControlResponse:
         """Deserializes the AppAccessControlResponse from a dictionary."""
@@ -226,6 +271,19 @@ def as_dict(self) -> dict:
         if self.update_time is not None: body['update_time'] = self.update_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppDeployment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.creator is not None: body['creator'] = self.creator
+        if self.deployment_artifacts: body['deployment_artifacts'] = self.deployment_artifacts
+        if self.deployment_id is not None: body['deployment_id'] = self.deployment_id
+        if self.mode is not None: body['mode'] = self.mode
+        if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
+        if self.status: body['status'] = self.status
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppDeployment:
         """Deserializes the AppDeployment from a dictionary."""
@@ -250,6 +308,12 @@ def as_dict(self) -> dict:
         if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppDeploymentArtifacts into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppDeploymentArtifacts:
         """Deserializes the AppDeploymentArtifacts from a dictionary."""
@@ -285,6 +349,13 @@ def as_dict(self) -> dict:
         if self.state is not None: body['state'] = self.state.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppDeploymentStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        if self.state is not None: body['state'] = self.state
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppDeploymentStatus:
         """Deserializes the AppDeploymentStatus from a dictionary."""
@@ -308,6 +379,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppPermission:
         """Deserializes the AppPermission from a dictionary."""
@@ -340,6 +419,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppPermissions:
         """Deserializes the AppPermissions from a dictionary."""
@@ -362,6 +449,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppPermissionsDescription:
         """Deserializes the AppPermissionsDescription from a dictionary."""
@@ -384,6 +478,13 @@ def as_dict(self) -> dict:
         if self.app_name is not None: body['app_name'] = self.app_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.app_name is not None: body['app_name'] = self.app_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppPermissionsRequest:
         """Deserializes the AppPermissionsRequest from a dictionary."""
@@ -418,6 +519,17 @@ def as_dict(self) -> dict:
         if self.sql_warehouse: body['sql_warehouse'] = self.sql_warehouse.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppResource into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.job: body['job'] = self.job
+        if self.name is not None: body['name'] = self.name
+        if self.secret: body['secret'] = self.secret
+        if self.serving_endpoint: body['serving_endpoint'] = self.serving_endpoint
+        if self.sql_warehouse: body['sql_warehouse'] = self.sql_warehouse
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppResource:
         """Deserializes the AppResource from a dictionary."""
@@ -445,6 +557,13 @@ def as_dict(self) -> dict:
         if self.permission is not None: body['permission'] = self.permission.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppResourceJob into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.permission is not None: body['permission'] = self.permission
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppResourceJob:
         """Deserializes the AppResourceJob from a dictionary."""
@@ -479,6 +598,14 @@ def as_dict(self) -> dict:
         if self.scope is not None: body['scope'] = self.scope
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppResourceSecret into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.permission is not None: body['permission'] = self.permission
+        if self.scope is not None: body['scope'] = self.scope
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppResourceSecret:
         """Deserializes the AppResourceSecret from a dictionary."""
@@ -511,6 +638,13 @@ def as_dict(self) -> dict:
         if self.permission is not None: body['permission'] = self.permission.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppResourceServingEndpoint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.permission is not None: body['permission'] = self.permission
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppResourceServingEndpoint:
         """Deserializes the AppResourceServingEndpoint from a dictionary."""
@@ -541,6 +675,13 @@ def as_dict(self) -> dict:
         if self.permission is not None: body['permission'] = self.permission.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppResourceSqlWarehouse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.permission is not None: body['permission'] = self.permission
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppResourceSqlWarehouse:
         """Deserializes the AppResourceSqlWarehouse from a dictionary."""
@@ -578,6 +719,13 @@ def as_dict(self) -> dict:
         if self.state is not None: body['state'] = self.state.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ApplicationStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        if self.state is not None: body['state'] = self.state
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ApplicationStatus:
         """Deserializes the ApplicationStatus from a dictionary."""
@@ -610,6 +758,13 @@ def as_dict(self) -> dict:
         if self.state is not None: body['state'] = self.state.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ComputeStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        if self.state is not None: body['state'] = self.state
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ComputeStatus:
         """Deserializes the ComputeStatus from a dictionary."""
@@ -627,6 +782,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetAppPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetAppPermissionLevelsResponse:
         """Deserializes the GetAppPermissionLevelsResponse from a dictionary."""
@@ -648,6 +809,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAppDeploymentsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.app_deployments: body['app_deployments'] = self.app_deployments
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAppDeploymentsResponse:
         """Deserializes the ListAppDeploymentsResponse from a dictionary."""
@@ -669,6 +837,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAppsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apps: body['apps'] = self.apps
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAppsResponse:
         """Deserializes the ListAppsResponse from a dictionary."""
diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py
index 8375a2629..62f596d0b 100755
--- a/databricks/sdk/service/billing.py
+++ b/databricks/sdk/service/billing.py
@@ -34,6 +34,15 @@ def as_dict(self) -> dict:
         if self.target is not None: body['target'] = self.target
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ActionConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.action_configuration_id is not None:
+            body['action_configuration_id'] = self.action_configuration_id
+        if self.action_type is not None: body['action_type'] = self.action_type
+        if self.target is not None: body['target'] = self.target
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ActionConfiguration:
         """Deserializes the ActionConfiguration from a dictionary."""
@@ -83,6 +92,18 @@ def as_dict(self) -> dict:
         if self.trigger_type is not None: body['trigger_type'] = self.trigger_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AlertConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.action_configurations: body['action_configurations'] = self.action_configurations
+        if self.alert_configuration_id is not None:
+            body['alert_configuration_id'] = self.alert_configuration_id
+        if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold
+        if self.quantity_type is not None: body['quantity_type'] = self.quantity_type
+        if self.time_period is not None: body['time_period'] = self.time_period
+        if self.trigger_type is not None: body['trigger_type'] = self.trigger_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertConfiguration:
         """Deserializes the AlertConfiguration from a dictionary."""
@@ -149,6 +170,19 @@ def as_dict(self) -> dict:
         if self.update_time is not None: body['update_time'] = self.update_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BudgetConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.alert_configurations: body['alert_configurations'] = self.alert_configurations
+        if self.budget_configuration_id is not None:
+            body['budget_configuration_id'] = self.budget_configuration_id
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.filter: body['filter'] = self.filter
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BudgetConfiguration:
         """Deserializes the BudgetConfiguration from a dictionary."""
@@ -178,6 +212,13 @@ def as_dict(self) -> dict:
         if self.workspace_id: body['workspace_id'] = self.workspace_id.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BudgetConfigurationFilter into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.tags: body['tags'] = self.tags
+        if self.workspace_id: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilter:
         """Deserializes the BudgetConfigurationFilter from a dictionary."""
@@ -198,6 +239,13 @@ def as_dict(self) -> dict:
         if self.values: body['values'] = [v for v in self.values]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BudgetConfigurationFilterClause into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.operator is not None: body['operator'] = self.operator
+        if self.values: body['values'] = self.values
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterClause:
         """Deserializes the BudgetConfigurationFilterClause from a dictionary."""
@@ -223,6 +271,13 @@ def as_dict(self) -> dict:
         if self.value: body['value'] = self.value.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BudgetConfigurationFilterTagClause into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterTagClause:
         """Deserializes the BudgetConfigurationFilterTagClause from a dictionary."""
@@ -242,6 +297,13 @@ def as_dict(self) -> dict:
         if self.values: body['values'] = [v for v in self.values]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BudgetConfigurationFilterWorkspaceIdClause into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.operator is not None: body['operator'] = self.operator
+        if self.values: body['values'] = self.values
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterWorkspaceIdClause:
         """Deserializes the BudgetConfigurationFilterWorkspaceIdClause from a dictionary."""
@@ -265,6 +327,13 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateBillingUsageDashboardRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_type is not None: body['dashboard_type'] = self.dashboard_type
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBillingUsageDashboardRequest:
         """Deserializes the CreateBillingUsageDashboardRequest from a dictionary."""
@@ -283,6 +352,12 @@ def as_dict(self) -> dict:
         if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateBillingUsageDashboardResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBillingUsageDashboardResponse:
         """Deserializes the CreateBillingUsageDashboardResponse from a dictionary."""
@@ -316,6 +391,15 @@ def as_dict(self) -> dict:
         if self.filter: body['filter'] = self.filter.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateBudgetConfigurationBudget into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.alert_configurations: body['alert_configurations'] = self.alert_configurations
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.filter: body['filter'] = self.filter
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudget:
         """Deserializes the CreateBudgetConfigurationBudget from a dictionary."""
@@ -341,6 +425,13 @@ def as_dict(self) -> dict:
         if self.target is not None: body['target'] = self.target
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateBudgetConfigurationBudgetActionConfigurations into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.action_type is not None: body['action_type'] = self.action_type
+        if self.target is not None: body['target'] = self.target
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudgetActionConfigurations:
         """Deserializes the CreateBudgetConfigurationBudgetActionConfigurations from a dictionary."""
@@ -378,6 +469,16 @@ def as_dict(self) -> dict:
         if self.trigger_type is not None: body['trigger_type'] = self.trigger_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateBudgetConfigurationBudgetAlertConfigurations into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.action_configurations: body['action_configurations'] = self.action_configurations
+        if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold
+        if self.quantity_type is not None: body['quantity_type'] = self.quantity_type
+        if self.time_period is not None: body['time_period'] = self.time_period
+        if self.trigger_type is not None: body['trigger_type'] = self.trigger_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudgetAlertConfigurations:
         """Deserializes the CreateBudgetConfigurationBudgetAlertConfigurations from a dictionary."""
@@ -400,6 +501,12 @@ def as_dict(self) -> dict:
         if self.budget: body['budget'] = self.budget.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateBudgetConfigurationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.budget: body['budget'] = self.budget
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationRequest:
         """Deserializes the CreateBudgetConfigurationRequest from a dictionary."""
@@ -417,6 +524,12 @@ def as_dict(self) -> dict:
         if self.budget: body['budget'] = self.budget.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateBudgetConfigurationResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.budget: body['budget'] = self.budget
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationResponse:
         """Deserializes the CreateBudgetConfigurationResponse from a dictionary."""
@@ -509,6 +622,21 @@ def as_dict(self) -> dict:
         if self.workspace_ids_filter: body['workspace_ids_filter'] = [v for v in self.workspace_ids_filter]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateLogDeliveryConfigurationParams into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.config_name is not None: body['config_name'] = self.config_name
+        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
+        if self.delivery_path_prefix is not None: body['delivery_path_prefix'] = self.delivery_path_prefix
+        if self.delivery_start_time is not None: body['delivery_start_time'] = self.delivery_start_time
+        if self.log_type is not None: body['log_type'] = self.log_type
+        if self.output_format is not None: body['output_format'] = self.output_format
+        if self.status is not None: body['status'] = self.status
+        if self.storage_configuration_id is not None:
+            body['storage_configuration_id'] = self.storage_configuration_id
+        if self.workspace_ids_filter: body['workspace_ids_filter'] = self.workspace_ids_filter
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateLogDeliveryConfigurationParams:
         """Deserializes the CreateLogDeliveryConfigurationParams from a dictionary."""
@@ -531,6 +659,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteBudgetConfigurationResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteBudgetConfigurationResponse:
         """Deserializes the DeleteBudgetConfigurationResponse from a dictionary."""
@@ -563,6 +696,12 @@ def as_dict(self) -> dict:
         if self.contents: body['contents'] = self.contents
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DownloadResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.contents: body['contents'] = self.contents
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DownloadResponse:
         """Deserializes the DownloadResponse from a dictionary."""
@@ -584,6 +723,13 @@ def as_dict(self) -> dict:
         if self.dashboard_url is not None: body['dashboard_url'] = self.dashboard_url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetBillingUsageDashboardResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.dashboard_url is not None: body['dashboard_url'] = self.dashboard_url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetBillingUsageDashboardResponse:
         """Deserializes the GetBillingUsageDashboardResponse from a dictionary."""
@@ -600,6 +746,12 @@ def as_dict(self) -> dict:
         if self.budget: body['budget'] = self.budget.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetBudgetConfigurationResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.budget: body['budget'] = self.budget
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetBudgetConfigurationResponse:
         """Deserializes the GetBudgetConfigurationResponse from a dictionary."""
@@ -621,6 +773,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListBudgetConfigurationsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.budgets: body['budgets'] = self.budgets
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListBudgetConfigurationsResponse:
         """Deserializes the ListBudgetConfigurationsResponse from a dictionary."""
@@ -744,6 +903,26 @@ def as_dict(self) -> dict:
         if self.workspace_ids_filter: body['workspace_ids_filter'] = [v for v in self.workspace_ids_filter]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogDeliveryConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.config_id is not None: body['config_id'] = self.config_id
+        if self.config_name is not None: body['config_name'] = self.config_name
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
+        if self.delivery_path_prefix is not None: body['delivery_path_prefix'] = self.delivery_path_prefix
+        if self.delivery_start_time is not None: body['delivery_start_time'] = self.delivery_start_time
+        if self.log_delivery_status: body['log_delivery_status'] = self.log_delivery_status
+        if self.log_type is not None: body['log_type'] = self.log_type
+        if self.output_format is not None: body['output_format'] = self.output_format
+        if self.status is not None: body['status'] = self.status
+        if self.storage_configuration_id is not None:
+            body['storage_configuration_id'] = self.storage_configuration_id
+        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.workspace_ids_filter: body['workspace_ids_filter'] = self.workspace_ids_filter
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogDeliveryConfiguration:
         """Deserializes the LogDeliveryConfiguration from a dictionary."""
@@ -796,6 +975,16 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogDeliveryStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.last_attempt_time is not None: body['last_attempt_time'] = self.last_attempt_time
+        if self.last_successful_attempt_time is not None:
+            body['last_successful_attempt_time'] = self.last_successful_attempt_time
+        if self.message is not None: body['message'] = self.message
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogDeliveryStatus:
         """Deserializes the LogDeliveryStatus from a dictionary."""
@@ -846,6 +1035,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PatchStatusResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PatchStatusResponse:
         """Deserializes the PatchStatusResponse from a dictionary."""
@@ -884,6 +1078,17 @@ def as_dict(self) -> dict:
         if self.filter: body['filter'] = self.filter.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateBudgetConfigurationBudget into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.alert_configurations: body['alert_configurations'] = self.alert_configurations
+        if self.budget_configuration_id is not None:
+            body['budget_configuration_id'] = self.budget_configuration_id
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.filter: body['filter'] = self.filter
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationBudget:
         """Deserializes the UpdateBudgetConfigurationBudget from a dictionary."""
@@ -909,6 +1114,13 @@ def as_dict(self) -> dict:
         if self.budget_id is not None: body['budget_id'] = self.budget_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateBudgetConfigurationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.budget: body['budget'] = self.budget
+        if self.budget_id is not None: body['budget_id'] = self.budget_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationRequest:
         """Deserializes the UpdateBudgetConfigurationRequest from a dictionary."""
@@ -927,6 +1139,12 @@ def as_dict(self) -> dict:
         if self.budget: body['budget'] = self.budget.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateBudgetConfigurationResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.budget: body['budget'] = self.budget
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationResponse:
         """Deserializes the UpdateBudgetConfigurationResponse from a dictionary."""
@@ -952,6 +1170,14 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateLogDeliveryConfigurationStatusRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.log_delivery_configuration_id is not None:
+            body['log_delivery_configuration_id'] = self.log_delivery_configuration_id
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateLogDeliveryConfigurationStatusRequest:
         """Deserializes the UpdateLogDeliveryConfigurationStatusRequest from a dictionary."""
@@ -976,6 +1202,13 @@ def as_dict(self) -> dict:
             body['log_delivery_configuration'] = self.log_delivery_configuration.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WrappedCreateLogDeliveryConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.log_delivery_configuration:
+            body['log_delivery_configuration'] = self.log_delivery_configuration
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WrappedCreateLogDeliveryConfiguration:
         """Deserializes the WrappedCreateLogDeliveryConfiguration from a dictionary."""
@@ -994,6 +1227,13 @@ def as_dict(self) -> dict:
             body['log_delivery_configuration'] = self.log_delivery_configuration.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WrappedLogDeliveryConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.log_delivery_configuration:
+            body['log_delivery_configuration'] = self.log_delivery_configuration
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WrappedLogDeliveryConfiguration:
         """Deserializes the WrappedLogDeliveryConfiguration from a dictionary."""
@@ -1012,6 +1252,13 @@ def as_dict(self) -> dict:
             body['log_delivery_configurations'] = [v.as_dict() for v in self.log_delivery_configurations]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WrappedLogDeliveryConfigurations into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.log_delivery_configurations:
+            body['log_delivery_configurations'] = self.log_delivery_configurations
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WrappedLogDeliveryConfigurations:
         """Deserializes the WrappedLogDeliveryConfigurations from a dictionary."""
diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py
index d24ad54e6..0798bb5b6 100755
--- a/databricks/sdk/service/catalog.py
+++ b/databricks/sdk/service/catalog.py
@@ -28,6 +28,12 @@ def as_dict(self) -> dict:
         if self.metastore_info: body['metastore_info'] = self.metastore_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountsCreateMetastore into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metastore_info: body['metastore_info'] = self.metastore_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsCreateMetastore:
         """Deserializes the AccountsCreateMetastore from a dictionary."""
@@ -52,6 +58,14 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountsCreateMetastoreAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsCreateMetastoreAssignment:
         """Deserializes the AccountsCreateMetastoreAssignment from a dictionary."""
@@ -74,6 +88,13 @@ def as_dict(self) -> dict:
         if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountsCreateStorageCredential into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential_info: body['credential_info'] = self.credential_info
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsCreateStorageCredential:
         """Deserializes the AccountsCreateStorageCredential from a dictionary."""
@@ -91,6 +112,12 @@ def as_dict(self) -> dict:
         if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountsMetastoreAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsMetastoreAssignment:
         """Deserializes the AccountsMetastoreAssignment from a dictionary."""
@@ -107,6 +134,12 @@ def as_dict(self) -> dict:
         if self.metastore_info: body['metastore_info'] = self.metastore_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountsMetastoreInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metastore_info: body['metastore_info'] = self.metastore_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsMetastoreInfo:
         """Deserializes the AccountsMetastoreInfo from a dictionary."""
@@ -123,6 +156,12 @@ def as_dict(self) -> dict:
         if self.credential_info: body['credential_info'] = self.credential_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountsStorageCredentialInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential_info: body['credential_info'] = self.credential_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsStorageCredentialInfo:
         """Deserializes the AccountsStorageCredentialInfo from a dictionary."""
@@ -143,6 +182,13 @@ def as_dict(self) -> dict:
         if self.metastore_info: body['metastore_info'] = self.metastore_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountsUpdateMetastore into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.metastore_info: body['metastore_info'] = self.metastore_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsUpdateMetastore:
         """Deserializes the AccountsUpdateMetastore from a dictionary."""
@@ -168,6 +214,14 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountsUpdateMetastoreAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsUpdateMetastoreAssignment:
         """Deserializes the AccountsUpdateMetastoreAssignment from a dictionary."""
@@ -195,6 +249,15 @@ def as_dict(self) -> dict:
             body['storage_credential_name'] = self.storage_credential_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountsUpdateStorageCredential into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential_info: body['credential_info'] = self.credential_info
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.storage_credential_name is not None:
+            body['storage_credential_name'] = self.storage_credential_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsUpdateStorageCredential:
         """Deserializes the AccountsUpdateStorageCredential from a dictionary."""
@@ -226,6 +289,15 @@ def as_dict(self) -> dict:
         if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ArtifactAllowlistInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.artifact_matchers: body['artifact_matchers'] = self.artifact_matchers
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ArtifactAllowlistInfo:
         """Deserializes the ArtifactAllowlistInfo from a dictionary."""
@@ -250,6 +322,13 @@ def as_dict(self) -> dict:
         if self.match_type is not None: body['match_type'] = self.match_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ArtifactMatcher into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.artifact is not None: body['artifact'] = self.artifact
+        if self.match_type is not None: body['match_type'] = self.match_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ArtifactMatcher:
         """Deserializes the ArtifactMatcher from a dictionary."""
@@ -272,6 +351,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AssignResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AssignResponse:
         """Deserializes the AssignResponse from a dictionary."""
@@ -305,6 +389,15 @@ def as_dict(self) -> dict:
         if self.session_token is not None: body['session_token'] = self.session_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AwsCredentials into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_key_id is not None: body['access_key_id'] = self.access_key_id
+        if self.access_point is not None: body['access_point'] = self.access_point
+        if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
+        if self.session_token is not None: body['session_token'] = self.session_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsCredentials:
         """Deserializes the AwsCredentials from a dictionary."""
@@ -336,6 +429,14 @@ def as_dict(self) -> dict:
         if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AwsIamRole into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.external_id is not None: body['external_id'] = self.external_id
+        if self.role_arn is not None: body['role_arn'] = self.role_arn
+        if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsIamRole:
         """Deserializes the AwsIamRole from a dictionary."""
@@ -355,6 +456,12 @@ def as_dict(self) -> dict:
         if self.role_arn is not None: body['role_arn'] = self.role_arn
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AwsIamRoleRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.role_arn is not None: body['role_arn'] = self.role_arn
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsIamRoleRequest:
         """Deserializes the AwsIamRoleRequest from a dictionary."""
@@ -381,6 +488,14 @@ def as_dict(self) -> dict:
         if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AwsIamRoleResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.external_id is not None: body['external_id'] = self.external_id
+        if self.role_arn is not None: body['role_arn'] = self.role_arn
+        if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsIamRoleResponse:
         """Deserializes the AwsIamRoleResponse from a dictionary."""
@@ -405,6 +520,12 @@ def as_dict(self) -> dict:
         if self.aad_token is not None: body['aad_token'] = self.aad_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AzureActiveDirectoryToken into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aad_token is not None: body['aad_token'] = self.aad_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureActiveDirectoryToken:
         """Deserializes the AzureActiveDirectoryToken from a dictionary."""
@@ -439,6 +560,14 @@ def as_dict(self) -> dict:
         if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AzureManagedIdentity into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureManagedIdentity:
         """Deserializes the AzureManagedIdentity from a dictionary."""
@@ -467,6 +596,13 @@ def as_dict(self) -> dict:
         if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AzureManagedIdentityRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id
+        if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureManagedIdentityRequest:
         """Deserializes the AzureManagedIdentityRequest from a dictionary."""
@@ -498,6 +634,14 @@ def as_dict(self) -> dict:
         if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AzureManagedIdentityResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureManagedIdentityResponse:
         """Deserializes the AzureManagedIdentityResponse from a dictionary."""
@@ -508,7 +652,7 @@ def from_dict(cls, d: Dict[str, any]) -> AzureManagedIdentityResponse:
 
 @dataclass
 class AzureServicePrincipal:
-    """The Azure service principal configuration."""
+    """The Azure service principal configuration. Only applicable when purpose is **STORAGE**."""
 
     directory_id: str
     """The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application."""
@@ -527,6 +671,14 @@ def as_dict(self) -> dict:
         if self.directory_id is not None: body['directory_id'] = self.directory_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AzureServicePrincipal into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.application_id is not None: body['application_id'] = self.application_id
+        if self.client_secret is not None: body['client_secret'] = self.client_secret
+        if self.directory_id is not None: body['directory_id'] = self.directory_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureServicePrincipal:
         """Deserializes the AzureServicePrincipal from a dictionary."""
@@ -549,6 +701,12 @@ def as_dict(self) -> dict:
         if self.sas_token is not None: body['sas_token'] = self.sas_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AzureUserDelegationSas into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.sas_token is not None: body['sas_token'] = self.sas_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureUserDelegationSas:
         """Deserializes the AzureUserDelegationSas from a dictionary."""
@@ -563,6 +721,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelRefreshResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelRefreshResponse:
         """Deserializes the CancelRefreshResponse from a dictionary."""
@@ -677,6 +840,37 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CatalogInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.browse_only is not None: body['browse_only'] = self.browse_only
+        if self.catalog_type is not None: body['catalog_type'] = self.catalog_type
+        if self.comment is not None: body['comment'] = self.comment
+        if self.connection_name is not None: body['connection_name'] = self.connection_name
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.effective_predictive_optimization_flag:
+            body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag
+        if self.enable_predictive_optimization is not None:
+            body['enable_predictive_optimization'] = self.enable_predictive_optimization
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.owner is not None: body['owner'] = self.owner
+        if self.properties: body['properties'] = self.properties
+        if self.provider_name is not None: body['provider_name'] = self.provider_name
+        if self.provisioning_info: body['provisioning_info'] = self.provisioning_info
+        if self.securable_kind is not None: body['securable_kind'] = self.securable_kind
+        if self.securable_type is not None: body['securable_type'] = self.securable_type
+        if self.share_name is not None: body['share_name'] = self.share_name
+        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CatalogInfo:
         """Deserializes the CatalogInfo from a dictionary."""
@@ -760,6 +954,14 @@ def as_dict(self) -> dict:
         if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CloudflareApiToken into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_key_id is not None: body['access_key_id'] = self.access_key_id
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CloudflareApiToken:
         """Deserializes the CloudflareApiToken from a dictionary."""
@@ -794,7 +996,6 @@ class ColumnInfo:
     """Full data type specification, JSON-serialized."""
 
     type_name: Optional[ColumnTypeName] = None
-    """Name of type (INT, STRUCT, MAP, etc.)."""
 
     type_precision: Optional[int] = None
     """Digits of precision; required for DecimalTypes."""
@@ -822,6 +1023,23 @@ def as_dict(self) -> dict:
         if self.type_text is not None: body['type_text'] = self.type_text
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ColumnInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.mask: body['mask'] = self.mask
+        if self.name is not None: body['name'] = self.name
+        if self.nullable is not None: body['nullable'] = self.nullable
+        if self.partition_index is not None: body['partition_index'] = self.partition_index
+        if self.position is not None: body['position'] = self.position
+        if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type
+        if self.type_json is not None: body['type_json'] = self.type_json
+        if self.type_name is not None: body['type_name'] = self.type_name
+        if self.type_precision is not None: body['type_precision'] = self.type_precision
+        if self.type_scale is not None: body['type_scale'] = self.type_scale
+        if self.type_text is not None: body['type_text'] = self.type_text
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ColumnInfo:
         """Deserializes the ColumnInfo from a dictionary."""
@@ -856,6 +1074,13 @@ def as_dict(self) -> dict:
         if self.using_column_names: body['using_column_names'] = [v for v in self.using_column_names]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ColumnMask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.function_name is not None: body['function_name'] = self.function_name
+        if self.using_column_names: body['using_column_names'] = self.using_column_names
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ColumnMask:
         """Deserializes the ColumnMask from a dictionary."""
@@ -864,7 +1089,6 @@ def from_dict(cls, d: Dict[str, any]) -> ColumnMask:
 
 
 class ColumnTypeName(Enum):
-    """Name of type (INT, STRUCT, MAP, etc.)."""
 
     ARRAY = 'ARRAY'
     BINARY = 'BINARY'
@@ -972,6 +1196,30 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ConnectionInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.connection_id is not None: body['connection_id'] = self.connection_id
+        if self.connection_type is not None: body['connection_type'] = self.connection_type
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.credential_type is not None: body['credential_type'] = self.credential_type
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.owner is not None: body['owner'] = self.owner
+        if self.properties: body['properties'] = self.properties
+        if self.provisioning_info: body['provisioning_info'] = self.provisioning_info
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.securable_kind is not None: body['securable_kind'] = self.securable_kind
+        if self.securable_type is not None: body['securable_type'] = self.securable_type
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ConnectionInfo:
         """Deserializes the ConnectionInfo from a dictionary."""
@@ -1056,6 +1304,16 @@ def as_dict(self) -> dict:
         if self.timestamp is not None: body['timestamp'] = self.timestamp
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ContinuousUpdateStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.initial_pipeline_sync_progress:
+            body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress
+        if self.last_processed_commit_version is not None:
+            body['last_processed_commit_version'] = self.last_processed_commit_version
+        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ContinuousUpdateStatus:
         """Deserializes the ContinuousUpdateStatus from a dictionary."""
@@ -1106,6 +1364,19 @@ def as_dict(self) -> dict:
         if self.storage_root is not None: body['storage_root'] = self.storage_root
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCatalog into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.connection_name is not None: body['connection_name'] = self.connection_name
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.properties: body['properties'] = self.properties
+        if self.provider_name is not None: body['provider_name'] = self.provider_name
+        if self.share_name is not None: body['share_name'] = self.share_name
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCatalog:
         """Deserializes the CreateCatalog from a dictionary."""
@@ -1150,6 +1421,17 @@ def as_dict(self) -> dict:
         if self.read_only is not None: body['read_only'] = self.read_only
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateConnection into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.connection_type is not None: body['connection_type'] = self.connection_type
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.properties: body['properties'] = self.properties
+        if self.read_only is not None: body['read_only'] = self.read_only
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateConnection:
         """Deserializes the CreateConnection from a dictionary."""
@@ -1174,12 +1456,13 @@ class CreateCredentialRequest:
     """The Azure managed identity configuration."""
 
     azure_service_principal: Optional[AzureServicePrincipal] = None
-    """The Azure service principal configuration."""
+    """The Azure service principal configuration. Only applicable when purpose is **STORAGE**."""
 
     comment: Optional[str] = None
     """Comment associated with the credential."""
 
-    gcp_service_account_key: Optional[GcpServiceAccountKey] = None
+    databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None
+    """GCP long-lived credential. Databricks-created Google Cloud Storage service account."""
 
     purpose: Optional[CredentialPurpose] = None
     """Indicates the purpose of the credential."""
@@ -1199,14 +1482,29 @@ def as_dict(self) -> dict:
         if self.azure_service_principal:
             body['azure_service_principal'] = self.azure_service_principal.as_dict()
         if self.comment is not None: body['comment'] = self.comment
-        if self.gcp_service_account_key:
-            body['gcp_service_account_key'] = self.gcp_service_account_key.as_dict()
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
         if self.name is not None: body['name'] = self.name
         if self.purpose is not None: body['purpose'] = self.purpose.value
         if self.read_only is not None: body['read_only'] = self.read_only
         if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCredentialRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
+        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
+        if self.comment is not None: body['comment'] = self.comment
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
+        if self.name is not None: body['name'] = self.name
+        if self.purpose is not None: body['purpose'] = self.purpose
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCredentialRequest:
         """Deserializes the CreateCredentialRequest from a dictionary."""
@@ -1214,7 +1512,8 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCredentialRequest:
                    azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
                    azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
                    comment=d.get('comment', None),
-                   gcp_service_account_key=_from_dict(d, 'gcp_service_account_key', GcpServiceAccountKey),
+                   databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
+                                                             DatabricksGcpServiceAccount),
                    name=d.get('name', None),
                    purpose=_enum(d, 'purpose', CredentialPurpose),
                    read_only=d.get('read_only', None),
@@ -1266,6 +1565,20 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateExternalLocation into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_point is not None: body['access_point'] = self.access_point
+        if self.comment is not None: body['comment'] = self.comment
+        if self.credential_name is not None: body['credential_name'] = self.credential_name
+        if self.encryption_details: body['encryption_details'] = self.encryption_details
+        if self.fallback is not None: body['fallback'] = self.fallback
+        if self.name is not None: body['name'] = self.name
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExternalLocation:
         """Deserializes the CreateExternalLocation from a dictionary."""
@@ -1373,6 +1686,32 @@ def as_dict(self) -> dict:
         if self.sql_path is not None: body['sql_path'] = self.sql_path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateFunction into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.comment is not None: body['comment'] = self.comment
+        if self.data_type is not None: body['data_type'] = self.data_type
+        if self.external_language is not None: body['external_language'] = self.external_language
+        if self.external_name is not None: body['external_name'] = self.external_name
+        if self.full_data_type is not None: body['full_data_type'] = self.full_data_type
+        if self.input_params: body['input_params'] = self.input_params
+        if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic
+        if self.is_null_call is not None: body['is_null_call'] = self.is_null_call
+        if self.name is not None: body['name'] = self.name
+        if self.parameter_style is not None: body['parameter_style'] = self.parameter_style
+        if self.properties is not None: body['properties'] = self.properties
+        if self.return_params: body['return_params'] = self.return_params
+        if self.routine_body is not None: body['routine_body'] = self.routine_body
+        if self.routine_definition is not None: body['routine_definition'] = self.routine_definition
+        if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.security_type is not None: body['security_type'] = self.security_type
+        if self.specific_name is not None: body['specific_name'] = self.specific_name
+        if self.sql_data_access is not None: body['sql_data_access'] = self.sql_data_access
+        if self.sql_path is not None: body['sql_path'] = self.sql_path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateFunction:
         """Deserializes the CreateFunction from a dictionary."""
@@ -1416,6 +1755,12 @@ def as_dict(self) -> dict:
         if self.function_info: body['function_info'] = self.function_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateFunctionRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.function_info: body['function_info'] = self.function_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateFunctionRequest:
         """Deserializes the CreateFunctionRequest from a dictionary."""
@@ -1467,6 +1812,14 @@ def as_dict(self) -> dict:
         if self.storage_root is not None: body['storage_root'] = self.storage_root
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateMetastore into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.region is not None: body['region'] = self.region
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateMetastore:
         """Deserializes the CreateMetastore from a dictionary."""
@@ -1495,6 +1848,14 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateMetastoreAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateMetastoreAssignment:
         """Deserializes the CreateMetastoreAssignment from a dictionary."""
@@ -1574,10 +1935,31 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateMonitor:
-        """Deserializes the CreateMonitor from a dictionary."""
-        return cls(assets_dir=d.get('assets_dir', None),
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateMonitor into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.assets_dir is not None: body['assets_dir'] = self.assets_dir
+        if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name
+        if self.custom_metrics: body['custom_metrics'] = self.custom_metrics
+        if self.data_classification_config:
+            body['data_classification_config'] = self.data_classification_config
+        if self.inference_log: body['inference_log'] = self.inference_log
+        if self.notifications: body['notifications'] = self.notifications
+        if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
+        if self.schedule: body['schedule'] = self.schedule
+        if self.skip_builtin_dashboard is not None:
+            body['skip_builtin_dashboard'] = self.skip_builtin_dashboard
+        if self.slicing_exprs: body['slicing_exprs'] = self.slicing_exprs
+        if self.snapshot: body['snapshot'] = self.snapshot
+        if self.table_name is not None: body['table_name'] = self.table_name
+        if self.time_series: body['time_series'] = self.time_series
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateMonitor:
+        """Deserializes the CreateMonitor from a dictionary."""
+        return cls(assets_dir=d.get('assets_dir', None),
                    baseline_table_name=d.get('baseline_table_name', None),
                    custom_metrics=_repeated_dict(d, 'custom_metrics', MonitorMetric),
                    data_classification_config=_from_dict(d, 'data_classification_config',
@@ -1621,6 +2003,16 @@ def as_dict(self) -> dict:
         if self.storage_location is not None: body['storage_location'] = self.storage_location
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateRegisteredModelRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRegisteredModelRequest:
         """Deserializes the CreateRegisteredModelRequest from a dictionary."""
@@ -1639,6 +2031,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateResponse:
         """Deserializes the CreateResponse from a dictionary."""
@@ -1672,6 +2069,16 @@ def as_dict(self) -> dict:
         if self.storage_root is not None: body['storage_root'] = self.storage_root
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateSchema into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.properties: body['properties'] = self.properties
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateSchema:
         """Deserializes the CreateSchema from a dictionary."""
@@ -1727,6 +2134,21 @@ def as_dict(self) -> dict:
         if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateStorageCredential into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
+        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
+        if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token
+        if self.comment is not None: body['comment'] = self.comment
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
+        if self.name is not None: body['name'] = self.name
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateStorageCredential:
         """Deserializes the CreateStorageCredential from a dictionary."""
@@ -1759,6 +2181,13 @@ def as_dict(self) -> dict:
         if self.full_name_arg is not None: body['full_name_arg'] = self.full_name_arg
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateTableConstraint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.constraint: body['constraint'] = self.constraint
+        if self.full_name_arg is not None: body['full_name_arg'] = self.full_name_arg
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateTableConstraint:
         """Deserializes the CreateTableConstraint from a dictionary."""
@@ -1796,6 +2225,17 @@ def as_dict(self) -> dict:
         if self.volume_type is not None: body['volume_type'] = self.volume_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateVolumeRequestContent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        if self.volume_type is not None: body['volume_type'] = self.volume_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateVolumeRequestContent:
         """Deserializes the CreateVolumeRequestContent from a dictionary."""
@@ -1816,7 +2256,7 @@ class CredentialInfo:
     """The Azure managed identity configuration."""
 
     azure_service_principal: Optional[AzureServicePrincipal] = None
-    """The Azure service principal configuration."""
+    """The Azure service principal configuration. Only applicable when purpose is **STORAGE**."""
 
     comment: Optional[str] = None
     """Comment associated with the credential."""
@@ -1827,6 +2267,9 @@ class CredentialInfo:
     created_by: Optional[str] = None
     """Username of credential creator."""
 
+    databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None
+    """GCP long-lived credential. Databricks-created Google Cloud Storage service account."""
+
     full_name: Optional[str] = None
     """The full name of the credential."""
 
@@ -1873,6 +2316,8 @@ def as_dict(self) -> dict:
         if self.comment is not None: body['comment'] = self.comment
         if self.created_at is not None: body['created_at'] = self.created_at
         if self.created_by is not None: body['created_by'] = self.created_by
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
         if self.full_name is not None: body['full_name'] = self.full_name
         if self.id is not None: body['id'] = self.id
         if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
@@ -1887,6 +2332,31 @@ def as_dict(self) -> dict:
             body['used_for_managed_storage'] = self.used_for_managed_storage
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CredentialInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
+        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.id is not None: body['id'] = self.id
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.purpose is not None: body['purpose'] = self.purpose
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.used_for_managed_storage is not None:
+            body['used_for_managed_storage'] = self.used_for_managed_storage
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CredentialInfo:
         """Deserializes the CredentialInfo from a dictionary."""
@@ -1896,6 +2366,8 @@ def from_dict(cls, d: Dict[str, any]) -> CredentialInfo:
                    comment=d.get('comment', None),
                    created_at=d.get('created_at', None),
                    created_by=d.get('created_by', None),
+                   databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
+                                                             DatabricksGcpServiceAccount),
                    full_name=d.get('full_name', None),
                    id=d.get('id', None),
                    isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
@@ -1937,6 +2409,13 @@ def as_dict(self) -> dict:
         if self.result is not None: body['result'] = self.result.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CredentialValidationResult into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        if self.result is not None: body['result'] = self.result
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CredentialValidationResult:
         """Deserializes the CredentialValidationResult from a dictionary."""
@@ -1956,6 +2435,12 @@ def as_dict(self) -> dict:
         if self.workspaces: body['workspaces'] = [v for v in self.workspaces]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CurrentWorkspaceBindings into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.workspaces: body['workspaces'] = self.workspaces
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CurrentWorkspaceBindings:
         """Deserializes the CurrentWorkspaceBindings from a dictionary."""
@@ -1990,6 +2475,45 @@ class DataSourceFormat(Enum):
     WORKDAY_RAAS_FORMAT = 'WORKDAY_RAAS_FORMAT'
 
 
+@dataclass
+class DatabricksGcpServiceAccount:
+    """GCP long-lived credential. Databricks-created Google Cloud Storage service account."""
+
+    credential_id: Optional[str] = None
+    """The Databricks internal ID that represents this managed identity. This field is only used to
+    persist the credential_id once it is fetched from the credentials manager - as we only use the
+    protobuf serializer to store credentials, this ID gets persisted to the database"""
+
+    email: Optional[str] = None
+    """The email of the service account."""
+
+    private_key_id: Optional[str] = None
+    """The ID that represents the private key for this Service Account"""
+
+    def as_dict(self) -> dict:
+        """Serializes the DatabricksGcpServiceAccount into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.email is not None: body['email'] = self.email
+        if self.private_key_id is not None: body['private_key_id'] = self.private_key_id
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DatabricksGcpServiceAccount into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.email is not None: body['email'] = self.email
+        if self.private_key_id is not None: body['private_key_id'] = self.private_key_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DatabricksGcpServiceAccount:
+        """Deserializes the DatabricksGcpServiceAccount from a dictionary."""
+        return cls(credential_id=d.get('credential_id', None),
+                   email=d.get('email', None),
+                   private_key_id=d.get('private_key_id', None))
+
+
 @dataclass
 class DatabricksGcpServiceAccountRequest:
 
@@ -1998,6 +2522,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DatabricksGcpServiceAccountRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DatabricksGcpServiceAccountRequest:
         """Deserializes the DatabricksGcpServiceAccountRequest from a dictionary."""
@@ -2019,6 +2548,13 @@ def as_dict(self) -> dict:
         if self.email is not None: body['email'] = self.email
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DatabricksGcpServiceAccountResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.email is not None: body['email'] = self.email
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DatabricksGcpServiceAccountResponse:
         """Deserializes the DatabricksGcpServiceAccountResponse from a dictionary."""
@@ -2033,6 +2569,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteAliasResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteAliasResponse:
         """Deserializes the DeleteAliasResponse from a dictionary."""
@@ -2047,6 +2588,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteCredentialResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteCredentialResponse:
         """Deserializes the DeleteCredentialResponse from a dictionary."""
@@ -2061,6 +2607,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -2081,6 +2632,12 @@ def as_dict(self) -> dict:
         if self.delta_runtime_properties: body['delta_runtime_properties'] = self.delta_runtime_properties
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeltaRuntimePropertiesKvPairs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.delta_runtime_properties: body['delta_runtime_properties'] = self.delta_runtime_properties
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeltaRuntimePropertiesKvPairs:
         """Deserializes the DeltaRuntimePropertiesKvPairs from a dictionary."""
@@ -2105,6 +2662,13 @@ def as_dict(self) -> dict:
         if self.table: body['table'] = self.table.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Dependency into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.function: body['function'] = self.function
+        if self.table: body['table'] = self.table
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Dependency:
         """Deserializes the Dependency from a dictionary."""
@@ -2125,6 +2689,12 @@ def as_dict(self) -> dict:
         if self.dependencies: body['dependencies'] = [v.as_dict() for v in self.dependencies]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DependencyList into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dependencies: body['dependencies'] = self.dependencies
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DependencyList:
         """Deserializes the DependencyList from a dictionary."""
@@ -2139,6 +2709,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DisableResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DisableResponse:
         """Deserializes the DisableResponse from a dictionary."""
@@ -2157,6 +2732,12 @@ def as_dict(self) -> dict:
             body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EffectivePermissionsList into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EffectivePermissionsList:
         """Deserializes the EffectivePermissionsList from a dictionary."""
@@ -2185,6 +2766,14 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EffectivePredictiveOptimizationFlag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited_from_name is not None: body['inherited_from_name'] = self.inherited_from_name
+        if self.inherited_from_type is not None: body['inherited_from_type'] = self.inherited_from_type
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EffectivePredictiveOptimizationFlag:
         """Deserializes the EffectivePredictiveOptimizationFlag from a dictionary."""
@@ -2223,6 +2812,14 @@ def as_dict(self) -> dict:
         if self.privilege is not None: body['privilege'] = self.privilege.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EffectivePrivilege into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited_from_name is not None: body['inherited_from_name'] = self.inherited_from_name
+        if self.inherited_from_type is not None: body['inherited_from_type'] = self.inherited_from_type
+        if self.privilege is not None: body['privilege'] = self.privilege
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EffectivePrivilege:
         """Deserializes the EffectivePrivilege from a dictionary."""
@@ -2246,6 +2843,13 @@ def as_dict(self) -> dict:
         if self.privileges: body['privileges'] = [v.as_dict() for v in self.privileges]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EffectivePrivilegeAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.principal is not None: body['principal'] = self.principal
+        if self.privileges: body['privileges'] = self.privileges
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EffectivePrivilegeAssignment:
         """Deserializes the EffectivePrivilegeAssignment from a dictionary."""
@@ -2269,6 +2873,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EnableResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnableResponse:
         """Deserializes the EnableResponse from a dictionary."""
@@ -2288,6 +2897,12 @@ def as_dict(self) -> dict:
         if self.sse_encryption_details: body['sse_encryption_details'] = self.sse_encryption_details.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EncryptionDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.sse_encryption_details: body['sse_encryption_details'] = self.sse_encryption_details
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EncryptionDetails:
         """Deserializes the EncryptionDetails from a dictionary."""
@@ -2371,6 +2986,28 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExternalLocationInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_point is not None: body['access_point'] = self.access_point
+        if self.browse_only is not None: body['browse_only'] = self.browse_only
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.credential_name is not None: body['credential_name'] = self.credential_name
+        if self.encryption_details: body['encryption_details'] = self.encryption_details
+        if self.fallback is not None: body['fallback'] = self.fallback
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExternalLocationInfo:
         """Deserializes the ExternalLocationInfo from a dictionary."""
@@ -2415,6 +3052,14 @@ def as_dict(self) -> dict:
         if self.timestamp is not None: body['timestamp'] = self.timestamp
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FailedStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.last_processed_commit_version is not None:
+            body['last_processed_commit_version'] = self.last_processed_commit_version
+        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FailedStatus:
         """Deserializes the FailedStatus from a dictionary."""
@@ -2445,6 +3090,15 @@ def as_dict(self) -> dict:
         if self.parent_table is not None: body['parent_table'] = self.parent_table
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ForeignKeyConstraint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.child_columns: body['child_columns'] = self.child_columns
+        if self.name is not None: body['name'] = self.name
+        if self.parent_columns: body['parent_columns'] = self.parent_columns
+        if self.parent_table is not None: body['parent_table'] = self.parent_table
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ForeignKeyConstraint:
         """Deserializes the ForeignKeyConstraint from a dictionary."""
@@ -2468,6 +3122,12 @@ def as_dict(self) -> dict:
         if self.function_full_name is not None: body['function_full_name'] = self.function_full_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FunctionDependency into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.function_full_name is not None: body['function_full_name'] = self.function_full_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FunctionDependency:
         """Deserializes the FunctionDependency from a dictionary."""
@@ -2604,6 +3264,41 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FunctionInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.browse_only is not None: body['browse_only'] = self.browse_only
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.data_type is not None: body['data_type'] = self.data_type
+        if self.external_language is not None: body['external_language'] = self.external_language
+        if self.external_name is not None: body['external_name'] = self.external_name
+        if self.full_data_type is not None: body['full_data_type'] = self.full_data_type
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.function_id is not None: body['function_id'] = self.function_id
+        if self.input_params: body['input_params'] = self.input_params
+        if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic
+        if self.is_null_call is not None: body['is_null_call'] = self.is_null_call
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.parameter_style is not None: body['parameter_style'] = self.parameter_style
+        if self.properties is not None: body['properties'] = self.properties
+        if self.return_params: body['return_params'] = self.return_params
+        if self.routine_body is not None: body['routine_body'] = self.routine_body
+        if self.routine_definition is not None: body['routine_definition'] = self.routine_definition
+        if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.security_type is not None: body['security_type'] = self.security_type
+        if self.specific_name is not None: body['specific_name'] = self.specific_name
+        if self.sql_data_access is not None: body['sql_data_access'] = self.sql_data_access
+        if self.sql_path is not None: body['sql_path'] = self.sql_path
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FunctionInfo:
         """Deserializes the FunctionInfo from a dictionary."""
@@ -2678,7 +3373,6 @@ class FunctionParameterInfo:
     """Full data type spec, SQL/catalogString text."""
 
     type_name: ColumnTypeName
-    """Name of type (INT, STRUCT, MAP, etc.)."""
 
     position: int
     """Ordinal position of column (starting at position 0)."""
@@ -2724,6 +3418,23 @@ def as_dict(self) -> dict:
         if self.type_text is not None: body['type_text'] = self.type_text
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FunctionParameterInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.parameter_default is not None: body['parameter_default'] = self.parameter_default
+        if self.parameter_mode is not None: body['parameter_mode'] = self.parameter_mode
+        if self.parameter_type is not None: body['parameter_type'] = self.parameter_type
+        if self.position is not None: body['position'] = self.position
+        if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type
+        if self.type_json is not None: body['type_json'] = self.type_json
+        if self.type_name is not None: body['type_name'] = self.type_name
+        if self.type_precision is not None: body['type_precision'] = self.type_precision
+        if self.type_scale is not None: body['type_scale'] = self.type_scale
+        if self.type_text is not None: body['type_text'] = self.type_text
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FunctionParameterInfo:
         """Deserializes the FunctionParameterInfo from a dictionary."""
@@ -2752,6 +3463,12 @@ def as_dict(self) -> dict:
         if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FunctionParameterInfos into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.parameters: body['parameters'] = self.parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FunctionParameterInfos:
         """Deserializes the FunctionParameterInfos from a dictionary."""
@@ -2784,44 +3501,21 @@ def as_dict(self) -> dict:
         if self.oauth_token is not None: body['oauth_token'] = self.oauth_token
         return body
 
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> GcpOauthToken:
-        """Deserializes the GcpOauthToken from a dictionary."""
-        return cls(oauth_token=d.get('oauth_token', None))
-
-
-@dataclass
-class GcpServiceAccountKey:
-    """GCP long-lived credential. GCP Service Account."""
-
-    email: Optional[str] = None
-    """The email of the service account."""
-
-    private_key: Optional[str] = None
-    """The service account's RSA private key."""
-
-    private_key_id: Optional[str] = None
-    """The ID of the service account's private key."""
-
-    def as_dict(self) -> dict:
-        """Serializes the GcpServiceAccountKey into a dictionary suitable for use as a JSON request body."""
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GcpOauthToken into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.email is not None: body['email'] = self.email
-        if self.private_key is not None: body['private_key'] = self.private_key
-        if self.private_key_id is not None: body['private_key_id'] = self.private_key_id
+        if self.oauth_token is not None: body['oauth_token'] = self.oauth_token
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> GcpServiceAccountKey:
-        """Deserializes the GcpServiceAccountKey from a dictionary."""
-        return cls(email=d.get('email', None),
-                   private_key=d.get('private_key', None),
-                   private_key_id=d.get('private_key_id', None))
+    def from_dict(cls, d: Dict[str, any]) -> GcpOauthToken:
+        """Deserializes the GcpOauthToken from a dictionary."""
+        return cls(oauth_token=d.get('oauth_token', None))
 
 
 @dataclass
 class GenerateTemporaryServiceCredentialAzureOptions:
-    """Options to customize the requested temporary credential"""
+    """The Azure cloud options to customize the requested temporary credential"""
 
     resources: Optional[List[str]] = None
     """The resources to which the temporary Azure credential should apply. These resources are the
@@ -2834,25 +3528,70 @@ def as_dict(self) -> dict:
         if self.resources: body['resources'] = [v for v in self.resources]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.resources: body['resources'] = self.resources
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryServiceCredentialAzureOptions:
         """Deserializes the GenerateTemporaryServiceCredentialAzureOptions from a dictionary."""
         return cls(resources=d.get('resources', None))
 
 
+@dataclass
+class GenerateTemporaryServiceCredentialGcpOptions:
+    """The GCP cloud options to customize the requested temporary credential"""
+
+    scopes: Optional[List[str]] = None
+    """The scopes to which the temporary GCP credential should apply. These resources are the scopes
+    that are passed to the token provider (see
+    https://google-auth.readthedocs.io/en/latest/reference/google.auth.html#google.auth.credentials.Credentials)"""
+
+    def as_dict(self) -> dict:
+        """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.scopes: body['scopes'] = [v for v in self.scopes]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.scopes: body['scopes'] = self.scopes
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryServiceCredentialGcpOptions:
+        """Deserializes the GenerateTemporaryServiceCredentialGcpOptions from a dictionary."""
+        return cls(scopes=d.get('scopes', None))
+
+
 @dataclass
 class GenerateTemporaryServiceCredentialRequest:
     credential_name: str
     """The name of the service credential used to generate a temporary credential"""
 
     azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None
-    """Options to customize the requested temporary credential"""
+    """The Azure cloud options to customize the requested temporary credential"""
+
+    gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions] = None
+    """The GCP cloud options to customize the requested temporary credential"""
 
     def as_dict(self) -> dict:
         """Serializes the GenerateTemporaryServiceCredentialRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.azure_options: body['azure_options'] = self.azure_options.as_dict()
         if self.credential_name is not None: body['credential_name'] = self.credential_name
+        if self.gcp_options: body['gcp_options'] = self.gcp_options.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenerateTemporaryServiceCredentialRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.azure_options: body['azure_options'] = self.azure_options
+        if self.credential_name is not None: body['credential_name'] = self.credential_name
+        if self.gcp_options: body['gcp_options'] = self.gcp_options
         return body
 
     @classmethod
@@ -2860,7 +3599,8 @@ def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryServiceCredentialReque
         """Deserializes the GenerateTemporaryServiceCredentialRequest from a dictionary."""
         return cls(azure_options=_from_dict(d, 'azure_options',
                                             GenerateTemporaryServiceCredentialAzureOptions),
-                   credential_name=d.get('credential_name', None))
+                   credential_name=d.get('credential_name', None),
+                   gcp_options=_from_dict(d, 'gcp_options', GenerateTemporaryServiceCredentialGcpOptions))
 
 
 @dataclass
@@ -2880,6 +3620,13 @@ def as_dict(self) -> dict:
         if self.table_id is not None: body['table_id'] = self.table_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenerateTemporaryTableCredentialRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.operation is not None: body['operation'] = self.operation
+        if self.table_id is not None: body['table_id'] = self.table_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryTableCredentialRequest:
         """Deserializes the GenerateTemporaryTableCredentialRequest from a dictionary."""
@@ -2929,6 +3676,18 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenerateTemporaryTableCredentialResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials
+        if self.azure_aad: body['azure_aad'] = self.azure_aad
+        if self.azure_user_delegation_sas: body['azure_user_delegation_sas'] = self.azure_user_delegation_sas
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token
+        if self.r2_temp_credentials: body['r2_temp_credentials'] = self.r2_temp_credentials
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryTableCredentialResponse:
         """Deserializes the GenerateTemporaryTableCredentialResponse from a dictionary."""
@@ -3042,6 +3801,38 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetMetastoreSummaryResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cloud is not None: body['cloud'] = self.cloud
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.default_data_access_config_id is not None:
+            body['default_data_access_config_id'] = self.default_data_access_config_id
+        if self.delta_sharing_organization_name is not None:
+            body['delta_sharing_organization_name'] = self.delta_sharing_organization_name
+        if self.delta_sharing_recipient_token_lifetime_in_seconds is not None:
+            body[
+                'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds
+        if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope
+        if self.external_access_enabled is not None:
+            body['external_access_enabled'] = self.external_access_enabled
+        if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.privilege_model_version is not None:
+            body['privilege_model_version'] = self.privilege_model_version
+        if self.region is not None: body['region'] = self.region
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.storage_root_credential_id is not None:
+            body['storage_root_credential_id'] = self.storage_root_credential_id
+        if self.storage_root_credential_name is not None:
+            body['storage_root_credential_name'] = self.storage_root_credential_name
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetMetastoreSummaryResponse:
         """Deserializes the GetMetastoreSummaryResponse from a dictionary."""
@@ -3086,6 +3877,12 @@ def as_dict(self) -> dict:
         if self.quota_info: body['quota_info'] = self.quota_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetQuotaResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.quota_info: body['quota_info'] = self.quota_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetQuotaResponse:
         """Deserializes the GetQuotaResponse from a dictionary."""
@@ -3110,6 +3907,12 @@ def as_dict(self) -> dict:
         if self.workspace_ids: body['workspace_ids'] = [v for v in self.workspace_ids]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAccountMetastoreAssignmentsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.workspace_ids: body['workspace_ids'] = self.workspace_ids
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAccountMetastoreAssignmentsResponse:
         """Deserializes the ListAccountMetastoreAssignmentsResponse from a dictionary."""
@@ -3128,6 +3931,12 @@ def as_dict(self) -> dict:
             body['storage_credentials'] = [v.as_dict() for v in self.storage_credentials]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAccountStorageCredentialsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.storage_credentials: body['storage_credentials'] = self.storage_credentials
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAccountStorageCredentialsResponse:
         """Deserializes the ListAccountStorageCredentialsResponse from a dictionary."""
@@ -3150,6 +3959,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListCatalogsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalogs: body['catalogs'] = self.catalogs
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListCatalogsResponse:
         """Deserializes the ListCatalogsResponse from a dictionary."""
@@ -3173,6 +3989,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListConnectionsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.connections: body['connections'] = self.connections
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListConnectionsResponse:
         """Deserializes the ListConnectionsResponse from a dictionary."""
@@ -3195,6 +4018,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListCredentialsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credentials: body['credentials'] = self.credentials
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListCredentialsResponse:
         """Deserializes the ListCredentialsResponse from a dictionary."""
@@ -3219,6 +4049,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListExternalLocationsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.external_locations: body['external_locations'] = self.external_locations
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListExternalLocationsResponse:
         """Deserializes the ListExternalLocationsResponse from a dictionary."""
@@ -3242,6 +4079,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListFunctionsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.functions: body['functions'] = self.functions
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListFunctionsResponse:
         """Deserializes the ListFunctionsResponse from a dictionary."""
@@ -3260,6 +4104,12 @@ def as_dict(self) -> dict:
         if self.metastores: body['metastores'] = [v.as_dict() for v in self.metastores]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListMetastoresResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metastores: body['metastores'] = self.metastores
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListMetastoresResponse:
         """Deserializes the ListMetastoresResponse from a dictionary."""
@@ -3281,6 +4131,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListModelVersionsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.model_versions: body['model_versions'] = self.model_versions
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListModelVersionsResponse:
         """Deserializes the ListModelVersionsResponse from a dictionary."""
@@ -3304,6 +4161,13 @@ def as_dict(self) -> dict:
         if self.quotas: body['quotas'] = [v.as_dict() for v in self.quotas]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListQuotasResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.quotas: body['quotas'] = self.quotas
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListQuotasResponse:
         """Deserializes the ListQuotasResponse from a dictionary."""
@@ -3326,6 +4190,13 @@ def as_dict(self) -> dict:
         if self.registered_models: body['registered_models'] = [v.as_dict() for v in self.registered_models]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListRegisteredModelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.registered_models: body['registered_models'] = self.registered_models
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListRegisteredModelsResponse:
         """Deserializes the ListRegisteredModelsResponse from a dictionary."""
@@ -3349,6 +4220,13 @@ def as_dict(self) -> dict:
         if self.schemas: body['schemas'] = [v.as_dict() for v in self.schemas]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListSchemasResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.schemas: body['schemas'] = self.schemas
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSchemasResponse:
         """Deserializes the ListSchemasResponse from a dictionary."""
@@ -3372,6 +4250,13 @@ def as_dict(self) -> dict:
             body['storage_credentials'] = [v.as_dict() for v in self.storage_credentials]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListStorageCredentialsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.storage_credentials: body['storage_credentials'] = self.storage_credentials
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListStorageCredentialsResponse:
         """Deserializes the ListStorageCredentialsResponse from a dictionary."""
@@ -3395,6 +4280,13 @@ def as_dict(self) -> dict:
         if self.schemas: body['schemas'] = [v.as_dict() for v in self.schemas]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListSystemSchemasResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.schemas: body['schemas'] = self.schemas
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSystemSchemasResponse:
         """Deserializes the ListSystemSchemasResponse from a dictionary."""
@@ -3418,6 +4310,13 @@ def as_dict(self) -> dict:
         if self.tables: body['tables'] = [v.as_dict() for v in self.tables]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListTableSummariesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.tables: body['tables'] = self.tables
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListTableSummariesResponse:
         """Deserializes the ListTableSummariesResponse from a dictionary."""
@@ -3441,6 +4340,13 @@ def as_dict(self) -> dict:
         if self.tables: body['tables'] = [v.as_dict() for v in self.tables]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListTablesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.tables: body['tables'] = self.tables
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListTablesResponse:
         """Deserializes the ListTablesResponse from a dictionary."""
@@ -3464,6 +4370,13 @@ def as_dict(self) -> dict:
         if self.volumes: body['volumes'] = [v.as_dict() for v in self.volumes]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListVolumesResponseContent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.volumes: body['volumes'] = self.volumes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListVolumesResponseContent:
         """Deserializes the ListVolumesResponseContent from a dictionary."""
@@ -3496,6 +4409,14 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MetastoreAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MetastoreAssignment:
         """Deserializes the MetastoreAssignment from a dictionary."""
@@ -3577,7 +4498,39 @@ def as_dict(self) -> dict:
         if self.delta_sharing_recipient_token_lifetime_in_seconds is not None:
             body[
                 'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds
-        if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope.value
+        if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope.value
+        if self.external_access_enabled is not None:
+            body['external_access_enabled'] = self.external_access_enabled
+        if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.privilege_model_version is not None:
+            body['privilege_model_version'] = self.privilege_model_version
+        if self.region is not None: body['region'] = self.region
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.storage_root_credential_id is not None:
+            body['storage_root_credential_id'] = self.storage_root_credential_id
+        if self.storage_root_credential_name is not None:
+            body['storage_root_credential_name'] = self.storage_root_credential_name
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MetastoreInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cloud is not None: body['cloud'] = self.cloud
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.default_data_access_config_id is not None:
+            body['default_data_access_config_id'] = self.default_data_access_config_id
+        if self.delta_sharing_organization_name is not None:
+            body['delta_sharing_organization_name'] = self.delta_sharing_organization_name
+        if self.delta_sharing_recipient_token_lifetime_in_seconds is not None:
+            body[
+                'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds
+        if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope
         if self.external_access_enabled is not None:
             body['external_access_enabled'] = self.external_access_enabled
         if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
@@ -3715,6 +4668,31 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ModelVersionInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aliases: body['aliases'] = self.aliases
+        if self.browse_only is not None: body['browse_only'] = self.browse_only
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.id is not None: body['id'] = self.id
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.model_name is not None: body['model_name'] = self.model_name
+        if self.model_version_dependencies:
+            body['model_version_dependencies'] = self.model_version_dependencies
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_workspace_id is not None: body['run_workspace_id'] = self.run_workspace_id
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.source is not None: body['source'] = self.source
+        if self.status is not None: body['status'] = self.status
+        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelVersionInfo:
         """Deserializes the ModelVersionInfo from a dictionary."""
@@ -3771,6 +4749,15 @@ def as_dict(self) -> dict:
         if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorCronSchedule into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.pause_status is not None: body['pause_status'] = self.pause_status
+        if self.quartz_cron_expression is not None:
+            body['quartz_cron_expression'] = self.quartz_cron_expression
+        if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorCronSchedule:
         """Deserializes the MonitorCronSchedule from a dictionary."""
@@ -3797,6 +4784,12 @@ def as_dict(self) -> dict:
         if self.enabled is not None: body['enabled'] = self.enabled
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorDataClassificationConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enabled is not None: body['enabled'] = self.enabled
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorDataClassificationConfig:
         """Deserializes the MonitorDataClassificationConfig from a dictionary."""
@@ -3815,6 +4808,12 @@ def as_dict(self) -> dict:
         if self.email_addresses: body['email_addresses'] = [v for v in self.email_addresses]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorDestination into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.email_addresses: body['email_addresses'] = self.email_addresses
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorDestination:
         """Deserializes the MonitorDestination from a dictionary."""
@@ -3866,6 +4865,18 @@ def as_dict(self) -> dict:
         if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorInferenceLog into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.granularities: body['granularities'] = self.granularities
+        if self.label_col is not None: body['label_col'] = self.label_col
+        if self.model_id_col is not None: body['model_id_col'] = self.model_id_col
+        if self.prediction_col is not None: body['prediction_col'] = self.prediction_col
+        if self.prediction_proba_col is not None: body['prediction_proba_col'] = self.prediction_proba_col
+        if self.problem_type is not None: body['problem_type'] = self.problem_type
+        if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorInferenceLog:
         """Deserializes the MonitorInferenceLog from a dictionary."""
@@ -3977,6 +4988,33 @@ def as_dict(self) -> dict:
         if self.time_series: body['time_series'] = self.time_series.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.assets_dir is not None: body['assets_dir'] = self.assets_dir
+        if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name
+        if self.custom_metrics: body['custom_metrics'] = self.custom_metrics
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.data_classification_config:
+            body['data_classification_config'] = self.data_classification_config
+        if self.drift_metrics_table_name is not None:
+            body['drift_metrics_table_name'] = self.drift_metrics_table_name
+        if self.inference_log: body['inference_log'] = self.inference_log
+        if self.latest_monitor_failure_msg is not None:
+            body['latest_monitor_failure_msg'] = self.latest_monitor_failure_msg
+        if self.monitor_version is not None: body['monitor_version'] = self.monitor_version
+        if self.notifications: body['notifications'] = self.notifications
+        if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
+        if self.profile_metrics_table_name is not None:
+            body['profile_metrics_table_name'] = self.profile_metrics_table_name
+        if self.schedule: body['schedule'] = self.schedule
+        if self.slicing_exprs: body['slicing_exprs'] = self.slicing_exprs
+        if self.snapshot: body['snapshot'] = self.snapshot
+        if self.status is not None: body['status'] = self.status
+        if self.table_name is not None: body['table_name'] = self.table_name
+        if self.time_series: body['time_series'] = self.time_series
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorInfo:
         """Deserializes the MonitorInfo from a dictionary."""
@@ -4048,6 +5086,16 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorMetric into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.definition is not None: body['definition'] = self.definition
+        if self.input_columns: body['input_columns'] = self.input_columns
+        if self.name is not None: body['name'] = self.name
+        if self.output_data_type is not None: body['output_data_type'] = self.output_data_type
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorMetric:
         """Deserializes the MonitorMetric from a dictionary."""
@@ -4088,6 +5136,14 @@ def as_dict(self) -> dict:
             body['on_new_classification_tag_detected'] = self.on_new_classification_tag_detected.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorNotifications into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.on_failure: body['on_failure'] = self.on_failure
+        if self.on_new_classification_tag_detected:
+            body['on_new_classification_tag_detected'] = self.on_new_classification_tag_detected
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorNotifications:
         """Deserializes the MonitorNotifications from a dictionary."""
@@ -4127,6 +5183,17 @@ def as_dict(self) -> dict:
         if self.trigger is not None: body['trigger'] = self.trigger.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorRefreshInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.end_time_ms is not None: body['end_time_ms'] = self.end_time_ms
+        if self.message is not None: body['message'] = self.message
+        if self.refresh_id is not None: body['refresh_id'] = self.refresh_id
+        if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms
+        if self.state is not None: body['state'] = self.state
+        if self.trigger is not None: body['trigger'] = self.trigger
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorRefreshInfo:
         """Deserializes the MonitorRefreshInfo from a dictionary."""
@@ -4166,6 +5233,12 @@ def as_dict(self) -> dict:
         if self.refreshes: body['refreshes'] = [v.as_dict() for v in self.refreshes]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorRefreshListResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.refreshes: body['refreshes'] = self.refreshes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorRefreshListResponse:
         """Deserializes the MonitorRefreshListResponse from a dictionary."""
@@ -4180,6 +5253,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorSnapshot into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorSnapshot:
         """Deserializes the MonitorSnapshot from a dictionary."""
@@ -4207,6 +5285,13 @@ def as_dict(self) -> dict:
         if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorTimeSeries into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.granularities: body['granularities'] = self.granularities
+        if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorTimeSeries:
         """Deserializes the MonitorTimeSeries from a dictionary."""
@@ -4224,6 +5309,12 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NamedTableConstraint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NamedTableConstraint:
         """Deserializes the NamedTableConstraint from a dictionary."""
@@ -4262,6 +5353,17 @@ def as_dict(self) -> dict:
             body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the OnlineTable into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.spec: body['spec'] = self.spec
+        if self.status: body['status'] = self.status
+        if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url
+        if self.unity_catalog_provisioning_state is not None:
+            body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OnlineTable:
         """Deserializes the OnlineTable from a dictionary."""
@@ -4317,6 +5419,19 @@ def as_dict(self) -> dict:
         if self.timeseries_key is not None: body['timeseries_key'] = self.timeseries_key
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the OnlineTableSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.perform_full_copy is not None: body['perform_full_copy'] = self.perform_full_copy
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.primary_key_columns: body['primary_key_columns'] = self.primary_key_columns
+        if self.run_continuously: body['run_continuously'] = self.run_continuously
+        if self.run_triggered: body['run_triggered'] = self.run_triggered
+        if self.source_table_full_name is not None:
+            body['source_table_full_name'] = self.source_table_full_name
+        if self.timeseries_key is not None: body['timeseries_key'] = self.timeseries_key
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OnlineTableSpec:
         """Deserializes the OnlineTableSpec from a dictionary."""
@@ -4338,6 +5453,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the OnlineTableSpecContinuousSchedulingPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OnlineTableSpecContinuousSchedulingPolicy:
         """Deserializes the OnlineTableSpecContinuousSchedulingPolicy from a dictionary."""
@@ -4352,6 +5472,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the OnlineTableSpecTriggeredSchedulingPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OnlineTableSpecTriggeredSchedulingPolicy:
         """Deserializes the OnlineTableSpecTriggeredSchedulingPolicy from a dictionary."""
@@ -4413,6 +5538,17 @@ def as_dict(self) -> dict:
             body['triggered_update_status'] = self.triggered_update_status.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the OnlineTableStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.continuous_update_status: body['continuous_update_status'] = self.continuous_update_status
+        if self.detailed_state is not None: body['detailed_state'] = self.detailed_state
+        if self.failed_status: body['failed_status'] = self.failed_status
+        if self.message is not None: body['message'] = self.message
+        if self.provisioning_status: body['provisioning_status'] = self.provisioning_status
+        if self.triggered_update_status: body['triggered_update_status'] = self.triggered_update_status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OnlineTableStatus:
         """Deserializes the OnlineTableStatus from a dictionary."""
@@ -4443,6 +5579,14 @@ def as_dict(self) -> dict:
         if self.remove: body['remove'] = [v.value for v in self.remove]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PermissionsChange into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.add: body['add'] = self.add
+        if self.principal is not None: body['principal'] = self.principal
+        if self.remove: body['remove'] = self.remove
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionsChange:
         """Deserializes the PermissionsChange from a dictionary."""
@@ -4463,6 +5607,12 @@ def as_dict(self) -> dict:
             body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PermissionsList into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionsList:
         """Deserializes the PermissionsList from a dictionary."""
@@ -4502,6 +5652,19 @@ def as_dict(self) -> dict:
         if self.total_row_count is not None: body['total_row_count'] = self.total_row_count
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineProgress into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.estimated_completion_time_seconds is not None:
+            body['estimated_completion_time_seconds'] = self.estimated_completion_time_seconds
+        if self.latest_version_currently_processing is not None:
+            body['latest_version_currently_processing'] = self.latest_version_currently_processing
+        if self.sync_progress_completion is not None:
+            body['sync_progress_completion'] = self.sync_progress_completion
+        if self.synced_row_count is not None: body['synced_row_count'] = self.synced_row_count
+        if self.total_row_count is not None: body['total_row_count'] = self.total_row_count
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineProgress:
         """Deserializes the PipelineProgress from a dictionary."""
@@ -4527,6 +5690,13 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PrimaryKeyConstraint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.child_columns: body['child_columns'] = self.child_columns
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PrimaryKeyConstraint:
         """Deserializes the PrimaryKeyConstraint from a dictionary."""
@@ -4545,6 +5715,7 @@ class Privilege(Enum):
     CREATE_EXTERNAL_TABLE = 'CREATE_EXTERNAL_TABLE'
     CREATE_EXTERNAL_VOLUME = 'CREATE_EXTERNAL_VOLUME'
     CREATE_FOREIGN_CATALOG = 'CREATE_FOREIGN_CATALOG'
+    CREATE_FOREIGN_SECURABLE = 'CREATE_FOREIGN_SECURABLE'
     CREATE_FUNCTION = 'CREATE_FUNCTION'
     CREATE_MANAGED_STORAGE = 'CREATE_MANAGED_STORAGE'
     CREATE_MATERIALIZED_VIEW = 'CREATE_MATERIALIZED_VIEW'
@@ -4596,6 +5767,13 @@ def as_dict(self) -> dict:
         if self.privileges: body['privileges'] = [v.value for v in self.privileges]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PrivilegeAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.principal is not None: body['principal'] = self.principal
+        if self.privileges: body['privileges'] = self.privileges
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PrivilegeAssignment:
         """Deserializes the PrivilegeAssignment from a dictionary."""
@@ -4617,6 +5795,12 @@ def as_dict(self) -> dict:
         if self.state is not None: body['state'] = self.state.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ProvisioningInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.state is not None: body['state'] = self.state
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ProvisioningInfo:
         """Deserializes the ProvisioningInfo from a dictionary."""
@@ -4648,6 +5832,13 @@ def as_dict(self) -> dict:
             body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ProvisioningStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.initial_pipeline_sync_progress:
+            body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ProvisioningStatus:
         """Deserializes the ProvisioningStatus from a dictionary."""
@@ -4687,6 +5878,17 @@ def as_dict(self) -> dict:
         if self.quota_name is not None: body['quota_name'] = self.quota_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QuotaInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.last_refreshed_at is not None: body['last_refreshed_at'] = self.last_refreshed_at
+        if self.parent_full_name is not None: body['parent_full_name'] = self.parent_full_name
+        if self.parent_securable_type is not None: body['parent_securable_type'] = self.parent_securable_type
+        if self.quota_count is not None: body['quota_count'] = self.quota_count
+        if self.quota_limit is not None: body['quota_limit'] = self.quota_limit
+        if self.quota_name is not None: body['quota_name'] = self.quota_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QuotaInfo:
         """Deserializes the QuotaInfo from a dictionary."""
@@ -4720,6 +5922,14 @@ def as_dict(self) -> dict:
         if self.session_token is not None: body['session_token'] = self.session_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the R2Credentials into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_key_id is not None: body['access_key_id'] = self.access_key_id
+        if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
+        if self.session_token is not None: body['session_token'] = self.session_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> R2Credentials:
         """Deserializes the R2Credentials from a dictionary."""
@@ -4744,6 +5954,13 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegenerateDashboardRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.table_name is not None: body['table_name'] = self.table_name
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegenerateDashboardRequest:
         """Deserializes the RegenerateDashboardRequest from a dictionary."""
@@ -4765,6 +5982,13 @@ def as_dict(self) -> dict:
         if self.parent_folder is not None: body['parent_folder'] = self.parent_folder
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegenerateDashboardResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.parent_folder is not None: body['parent_folder'] = self.parent_folder
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegenerateDashboardResponse:
         """Deserializes the RegenerateDashboardResponse from a dictionary."""
@@ -4788,6 +6012,13 @@ def as_dict(self) -> dict:
         if self.version_num is not None: body['version_num'] = self.version_num
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegisteredModelAlias into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alias_name is not None: body['alias_name'] = self.alias_name
+        if self.version_num is not None: body['version_num'] = self.version_num
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelAlias:
         """Deserializes the RegisteredModelAlias from a dictionary."""
@@ -4858,6 +6089,25 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegisteredModelInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aliases: body['aliases'] = self.aliases
+        if self.browse_only is not None: body['browse_only'] = self.browse_only
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelInfo:
         """Deserializes the RegisteredModelInfo from a dictionary."""
@@ -4960,6 +6210,31 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SchemaInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.browse_only is not None: body['browse_only'] = self.browse_only
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.catalog_type is not None: body['catalog_type'] = self.catalog_type
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.effective_predictive_optimization_flag:
+            body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag
+        if self.enable_predictive_optimization is not None:
+            body['enable_predictive_optimization'] = self.enable_predictive_optimization
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.properties: body['properties'] = self.properties
+        if self.schema_id is not None: body['schema_id'] = self.schema_id
+        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SchemaInfo:
         """Deserializes the SchemaInfo from a dictionary."""
@@ -5024,6 +6299,13 @@ def as_dict(self) -> dict:
         if self.artifact_type is not None: body['artifact_type'] = self.artifact_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetArtifactAllowlist into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.artifact_matchers: body['artifact_matchers'] = self.artifact_matchers
+        if self.artifact_type is not None: body['artifact_type'] = self.artifact_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetArtifactAllowlist:
         """Deserializes the SetArtifactAllowlist from a dictionary."""
@@ -5050,6 +6332,14 @@ def as_dict(self) -> dict:
         if self.version_num is not None: body['version_num'] = self.version_num
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetRegisteredModelAliasRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alias is not None: body['alias'] = self.alias
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.version_num is not None: body['version_num'] = self.version_num
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetRegisteredModelAliasRequest:
         """Deserializes the SetRegisteredModelAliasRequest from a dictionary."""
@@ -5075,6 +6365,13 @@ def as_dict(self) -> dict:
         if self.aws_kms_key_arn is not None: body['aws_kms_key_arn'] = self.aws_kms_key_arn
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SseEncryptionDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.algorithm is not None: body['algorithm'] = self.algorithm
+        if self.aws_kms_key_arn is not None: body['aws_kms_key_arn'] = self.aws_kms_key_arn
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SseEncryptionDetails:
         """Deserializes the SseEncryptionDetails from a dictionary."""
@@ -5170,6 +6467,31 @@ def as_dict(self) -> dict:
             body['used_for_managed_storage'] = self.used_for_managed_storage
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StorageCredentialInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
+        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
+        if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.id is not None: body['id'] = self.id
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.used_for_managed_storage is not None:
+            body['used_for_managed_storage'] = self.used_for_managed_storage
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StorageCredentialInfo:
         """Deserializes the StorageCredentialInfo from a dictionary."""
@@ -5211,6 +6533,13 @@ def as_dict(self) -> dict:
         if self.state is not None: body['state'] = self.state.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SystemSchemaInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.schema is not None: body['schema'] = self.schema
+        if self.state is not None: body['state'] = self.state
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SystemSchemaInfo:
         """Deserializes the SystemSchemaInfo from a dictionary."""
@@ -5247,6 +6576,14 @@ def as_dict(self) -> dict:
         if self.primary_key_constraint: body['primary_key_constraint'] = self.primary_key_constraint.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TableConstraint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.foreign_key_constraint: body['foreign_key_constraint'] = self.foreign_key_constraint
+        if self.named_table_constraint: body['named_table_constraint'] = self.named_table_constraint
+        if self.primary_key_constraint: body['primary_key_constraint'] = self.primary_key_constraint
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableConstraint:
         """Deserializes the TableConstraint from a dictionary."""
@@ -5269,6 +6606,12 @@ def as_dict(self) -> dict:
         if self.table_full_name is not None: body['table_full_name'] = self.table_full_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TableDependency into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.table_full_name is not None: body['table_full_name'] = self.table_full_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableDependency:
         """Deserializes the TableDependency from a dictionary."""
@@ -5286,6 +6629,12 @@ def as_dict(self) -> dict:
         if self.table_exists is not None: body['table_exists'] = self.table_exists
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TableExistsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.table_exists is not None: body['table_exists'] = self.table_exists
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableExistsResponse:
         """Deserializes the TableExistsResponse from a dictionary."""
@@ -5438,6 +6787,48 @@ def as_dict(self) -> dict:
         if self.view_dependencies: body['view_dependencies'] = self.view_dependencies.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TableInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_point is not None: body['access_point'] = self.access_point
+        if self.browse_only is not None: body['browse_only'] = self.browse_only
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.columns: body['columns'] = self.columns
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.data_access_configuration_id is not None:
+            body['data_access_configuration_id'] = self.data_access_configuration_id
+        if self.data_source_format is not None: body['data_source_format'] = self.data_source_format
+        if self.deleted_at is not None: body['deleted_at'] = self.deleted_at
+        if self.delta_runtime_properties_kvpairs:
+            body['delta_runtime_properties_kvpairs'] = self.delta_runtime_properties_kvpairs
+        if self.effective_predictive_optimization_flag:
+            body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag
+        if self.enable_predictive_optimization is not None:
+            body['enable_predictive_optimization'] = self.enable_predictive_optimization
+        if self.encryption_details: body['encryption_details'] = self.encryption_details
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.properties: body['properties'] = self.properties
+        if self.row_filter: body['row_filter'] = self.row_filter
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.sql_path is not None: body['sql_path'] = self.sql_path
+        if self.storage_credential_name is not None:
+            body['storage_credential_name'] = self.storage_credential_name
+        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        if self.table_constraints: body['table_constraints'] = self.table_constraints
+        if self.table_id is not None: body['table_id'] = self.table_id
+        if self.table_type is not None: body['table_type'] = self.table_type
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.view_definition is not None: body['view_definition'] = self.view_definition
+        if self.view_dependencies: body['view_dependencies'] = self.view_dependencies
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableInfo:
         """Deserializes the TableInfo from a dictionary."""
@@ -5500,6 +6891,13 @@ def as_dict(self) -> dict:
         if self.input_column_names: body['input_column_names'] = [v for v in self.input_column_names]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TableRowFilter into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.function_name is not None: body['function_name'] = self.function_name
+        if self.input_column_names: body['input_column_names'] = self.input_column_names
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableRowFilter:
         """Deserializes the TableRowFilter from a dictionary."""
@@ -5521,6 +6919,13 @@ def as_dict(self) -> dict:
         if self.table_type is not None: body['table_type'] = self.table_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TableSummary into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.table_type is not None: body['table_type'] = self.table_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableSummary:
         """Deserializes the TableSummary from a dictionary."""
@@ -5562,6 +6967,14 @@ def as_dict(self) -> dict:
         if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TemporaryCredentials into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials
+        if self.azure_aad: body['azure_aad'] = self.azure_aad
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TemporaryCredentials:
         """Deserializes the TemporaryCredentials from a dictionary."""
@@ -5596,6 +7009,15 @@ def as_dict(self) -> dict:
             body['triggered_update_progress'] = self.triggered_update_progress.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TriggeredUpdateStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.last_processed_commit_version is not None:
+            body['last_processed_commit_version'] = self.last_processed_commit_version
+        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        if self.triggered_update_progress: body['triggered_update_progress'] = self.triggered_update_progress
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TriggeredUpdateStatus:
         """Deserializes the TriggeredUpdateStatus from a dictionary."""
@@ -5612,6 +7034,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UnassignResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UnassignResponse:
         """Deserializes the UnassignResponse from a dictionary."""
@@ -5626,6 +7053,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateAssignmentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateAssignmentResponse:
         """Deserializes the UpdateAssignmentResponse from a dictionary."""
@@ -5676,6 +7108,19 @@ def as_dict(self) -> dict:
         if self.properties: body['properties'] = self.properties
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCatalog into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.enable_predictive_optimization is not None:
+            body['enable_predictive_optimization'] = self.enable_predictive_optimization
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
+        if self.name is not None: body['name'] = self.name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.properties: body['properties'] = self.properties
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCatalog:
         """Deserializes the UpdateCatalog from a dictionary."""
@@ -5712,6 +7157,15 @@ def as_dict(self) -> dict:
         if self.owner is not None: body['owner'] = self.owner
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateConnection into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.options: body['options'] = self.options
+        if self.owner is not None: body['owner'] = self.owner
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateConnection:
         """Deserializes the UpdateConnection from a dictionary."""
@@ -5730,11 +7184,14 @@ class UpdateCredentialRequest:
     """The Azure managed identity configuration."""
 
     azure_service_principal: Optional[AzureServicePrincipal] = None
-    """The Azure service principal configuration."""
+    """The Azure service principal configuration. Only applicable when purpose is **STORAGE**."""
 
     comment: Optional[str] = None
     """Comment associated with the credential."""
 
+    databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None
+    """GCP long-lived credential. Databricks-created Google Cloud Storage service account."""
+
     force: Optional[bool] = None
     """Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
     external locations and external tables (when purpose is **STORAGE**)."""
@@ -5766,6 +7223,8 @@ def as_dict(self) -> dict:
         if self.azure_service_principal:
             body['azure_service_principal'] = self.azure_service_principal.as_dict()
         if self.comment is not None: body['comment'] = self.comment
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
         if self.force is not None: body['force'] = self.force
         if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
         if self.name_arg is not None: body['name_arg'] = self.name_arg
@@ -5775,6 +7234,24 @@ def as_dict(self) -> dict:
         if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCredentialRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
+        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
+        if self.comment is not None: body['comment'] = self.comment
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
+        if self.force is not None: body['force'] = self.force
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
+        if self.name_arg is not None: body['name_arg'] = self.name_arg
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCredentialRequest:
         """Deserializes the UpdateCredentialRequest from a dictionary."""
@@ -5782,6 +7259,8 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateCredentialRequest:
                    azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
                    azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
                    comment=d.get('comment', None),
+                   databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
+                                                             DatabricksGcpServiceAccount),
                    force=d.get('force', None),
                    isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
                    name_arg=d.get('name_arg', None),
@@ -5851,6 +7330,24 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateExternalLocation into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_point is not None: body['access_point'] = self.access_point
+        if self.comment is not None: body['comment'] = self.comment
+        if self.credential_name is not None: body['credential_name'] = self.credential_name
+        if self.encryption_details: body['encryption_details'] = self.encryption_details
+        if self.fallback is not None: body['fallback'] = self.fallback
+        if self.force is not None: body['force'] = self.force
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
+        if self.name is not None: body['name'] = self.name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExternalLocation:
         """Deserializes the UpdateExternalLocation from a dictionary."""
@@ -5885,6 +7382,13 @@ def as_dict(self) -> dict:
         if self.owner is not None: body['owner'] = self.owner
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateFunction into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateFunction:
         """Deserializes the UpdateFunction from a dictionary."""
@@ -5936,6 +7440,24 @@ def as_dict(self) -> dict:
             body['storage_root_credential_id'] = self.storage_root_credential_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateMetastore into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.delta_sharing_organization_name is not None:
+            body['delta_sharing_organization_name'] = self.delta_sharing_organization_name
+        if self.delta_sharing_recipient_token_lifetime_in_seconds is not None:
+            body[
+                'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds
+        if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope
+        if self.id is not None: body['id'] = self.id
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.privilege_model_version is not None:
+            body['privilege_model_version'] = self.privilege_model_version
+        if self.storage_root_credential_id is not None:
+            body['storage_root_credential_id'] = self.storage_root_credential_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateMetastore:
         """Deserializes the UpdateMetastore from a dictionary."""
@@ -5970,6 +7492,14 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateMetastoreAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateMetastoreAssignment:
         """Deserializes the UpdateMetastoreAssignment from a dictionary."""
@@ -6004,6 +7534,14 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateModelVersionRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateModelVersionRequest:
         """Deserializes the UpdateModelVersionRequest from a dictionary."""
@@ -6074,6 +7612,24 @@ def as_dict(self) -> dict:
         if self.time_series: body['time_series'] = self.time_series.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateMonitor into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name
+        if self.custom_metrics: body['custom_metrics'] = self.custom_metrics
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.data_classification_config:
+            body['data_classification_config'] = self.data_classification_config
+        if self.inference_log: body['inference_log'] = self.inference_log
+        if self.notifications: body['notifications'] = self.notifications
+        if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
+        if self.schedule: body['schedule'] = self.schedule
+        if self.slicing_exprs: body['slicing_exprs'] = self.slicing_exprs
+        if self.snapshot: body['snapshot'] = self.snapshot
+        if self.table_name is not None: body['table_name'] = self.table_name
+        if self.time_series: body['time_series'] = self.time_series
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateMonitor:
         """Deserializes the UpdateMonitor from a dictionary."""
@@ -6111,6 +7667,14 @@ def as_dict(self) -> dict:
         if self.securable_type is not None: body['securable_type'] = self.securable_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdatePermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.changes: body['changes'] = self.changes
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.securable_type is not None: body['securable_type'] = self.securable_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdatePermissions:
         """Deserializes the UpdatePermissions from a dictionary."""
@@ -6142,6 +7706,15 @@ def as_dict(self) -> dict:
         if self.owner is not None: body['owner'] = self.owner
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateRegisteredModelRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRegisteredModelRequest:
         """Deserializes the UpdateRegisteredModelRequest from a dictionary."""
@@ -6159,6 +7732,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
         """Deserializes the UpdateResponse from a dictionary."""
@@ -6197,6 +7775,18 @@ def as_dict(self) -> dict:
         if self.properties: body['properties'] = self.properties
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateSchema into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.enable_predictive_optimization is not None:
+            body['enable_predictive_optimization'] = self.enable_predictive_optimization
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.properties: body['properties'] = self.properties
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateSchema:
         """Deserializes the UpdateSchema from a dictionary."""
@@ -6269,6 +7859,25 @@ def as_dict(self) -> dict:
         if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateStorageCredential into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
+        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
+        if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token
+        if self.comment is not None: body['comment'] = self.comment
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
+        if self.force is not None: body['force'] = self.force
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
+        if self.name is not None: body['name'] = self.name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateStorageCredential:
         """Deserializes the UpdateStorageCredential from a dictionary."""
@@ -6312,6 +7921,15 @@ def as_dict(self) -> dict:
         if self.owner is not None: body['owner'] = self.owner
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateVolumeRequestContent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateVolumeRequestContent:
         """Deserializes the UpdateVolumeRequestContent from a dictionary."""
@@ -6340,6 +7958,14 @@ def as_dict(self) -> dict:
         if self.unassign_workspaces: body['unassign_workspaces'] = [v for v in self.unassign_workspaces]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateWorkspaceBindings into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.assign_workspaces: body['assign_workspaces'] = self.assign_workspaces
+        if self.name is not None: body['name'] = self.name
+        if self.unassign_workspaces: body['unassign_workspaces'] = self.unassign_workspaces
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceBindings:
         """Deserializes the UpdateWorkspaceBindings from a dictionary."""
@@ -6371,6 +7997,15 @@ def as_dict(self) -> dict:
         if self.securable_type is not None: body['securable_type'] = self.securable_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateWorkspaceBindingsParameters into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.add: body['add'] = self.add
+        if self.remove: body['remove'] = self.remove
+        if self.securable_name is not None: body['securable_name'] = self.securable_name
+        if self.securable_type is not None: body['securable_type'] = self.securable_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceBindingsParameters:
         """Deserializes the UpdateWorkspaceBindingsParameters from a dictionary."""
@@ -6418,6 +8053,19 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ValidateCredentialRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
+        if self.credential_name is not None: body['credential_name'] = self.credential_name
+        if self.external_location_name is not None:
+            body['external_location_name'] = self.external_location_name
+        if self.purpose is not None: body['purpose'] = self.purpose
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ValidateCredentialRequest:
         """Deserializes the ValidateCredentialRequest from a dictionary."""
@@ -6446,6 +8094,13 @@ def as_dict(self) -> dict:
         if self.results: body['results'] = [v.as_dict() for v in self.results]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ValidateCredentialResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_dir is not None: body['isDir'] = self.is_dir
+        if self.results: body['results'] = self.results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ValidateCredentialResponse:
         """Deserializes the ValidateCredentialResponse from a dictionary."""
@@ -6508,6 +8163,23 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ValidateStorageCredential into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
+        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
+        if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
+        if self.external_location_name is not None:
+            body['external_location_name'] = self.external_location_name
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.storage_credential_name is not None:
+            body['storage_credential_name'] = self.storage_credential_name
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ValidateStorageCredential:
         """Deserializes the ValidateStorageCredential from a dictionary."""
@@ -6539,6 +8211,13 @@ def as_dict(self) -> dict:
         if self.results: body['results'] = [v.as_dict() for v in self.results]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ValidateStorageCredentialResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_dir is not None: body['isDir'] = self.is_dir
+        if self.results: body['results'] = self.results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ValidateStorageCredentialResponse:
         """Deserializes the ValidateStorageCredentialResponse from a dictionary."""
@@ -6564,6 +8243,14 @@ def as_dict(self) -> dict:
         if self.result is not None: body['result'] = self.result.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ValidationResult into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        if self.operation is not None: body['operation'] = self.operation
+        if self.result is not None: body['result'] = self.result
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ValidationResult:
         """Deserializes the ValidationResult from a dictionary."""
@@ -6663,6 +8350,28 @@ def as_dict(self) -> dict:
         if self.volume_type is not None: body['volume_type'] = self.volume_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the VolumeInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_point is not None: body['access_point'] = self.access_point
+        if self.browse_only is not None: body['browse_only'] = self.browse_only
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.encryption_details: body['encryption_details'] = self.encryption_details
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.volume_id is not None: body['volume_id'] = self.volume_id
+        if self.volume_type is not None: body['volume_type'] = self.volume_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> VolumeInfo:
         """Deserializes the VolumeInfo from a dictionary."""
@@ -6704,6 +8413,13 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspaceBinding into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.binding_type is not None: body['binding_type'] = self.binding_type
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceBinding:
         """Deserializes the WorkspaceBinding from a dictionary."""
@@ -6735,6 +8451,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspaceBindingsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.bindings: body['bindings'] = self.bindings
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceBindingsResponse:
         """Deserializes the WorkspaceBindingsResponse from a dictionary."""
@@ -7502,9 +9225,9 @@ class CredentialsAPI:
     tenant. Each credential is subject to Unity Catalog access-control policies that control which users and
     groups can access the credential.
     
-    To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE CREDENTIAL
+    To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE CREDENTIAL`
     privilege. The user who creates the credential can delegate ownership to another user or group to manage
-    permissions on it"""
+    permissions on it."""
 
     def __init__(self, api_client):
         self._api = api_client
@@ -7516,7 +9239,7 @@ def create_credential(self,
                           azure_managed_identity: Optional[AzureManagedIdentity] = None,
                           azure_service_principal: Optional[AzureServicePrincipal] = None,
                           comment: Optional[str] = None,
-                          gcp_service_account_key: Optional[GcpServiceAccountKey] = None,
+                          databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None,
                           purpose: Optional[CredentialPurpose] = None,
                           read_only: Optional[bool] = None,
                           skip_validation: Optional[bool] = None) -> CredentialInfo:
@@ -7536,10 +9259,11 @@ def create_credential(self,
         :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
           The Azure managed identity configuration.
         :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
-          The Azure service principal configuration.
+          The Azure service principal configuration. Only applicable when purpose is **STORAGE**.
         :param comment: str (optional)
           Comment associated with the credential.
-        :param gcp_service_account_key: :class:`GcpServiceAccountKey` (optional)
+        :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional)
+          GCP long-lived credential. Databricks-created Google Cloud Storage service account.
         :param purpose: :class:`CredentialPurpose` (optional)
           Indicates the purpose of the credential.
         :param read_only: bool (optional)
@@ -7557,8 +9281,8 @@ def create_credential(self,
         if azure_service_principal is not None:
             body['azure_service_principal'] = azure_service_principal.as_dict()
         if comment is not None: body['comment'] = comment
-        if gcp_service_account_key is not None:
-            body['gcp_service_account_key'] = gcp_service_account_key.as_dict()
+        if databricks_gcp_service_account is not None:
+            body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict()
         if name is not None: body['name'] = name
         if purpose is not None: body['purpose'] = purpose.value
         if read_only is not None: body['read_only'] = read_only
@@ -7590,10 +9314,11 @@ def delete_credential(self, name_arg: str, *, force: Optional[bool] = None):
         self._api.do('DELETE', f'/api/2.1/unity-catalog/credentials/{name_arg}', query=query, headers=headers)
 
     def generate_temporary_service_credential(
-        self,
-        credential_name: str,
-        *,
-        azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None
+            self,
+            credential_name: str,
+            *,
+            azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None,
+            gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions] = None
     ) -> TemporaryCredentials:
         """Generate a temporary service credential.
         
@@ -7603,13 +9328,16 @@ def generate_temporary_service_credential(
         :param credential_name: str
           The name of the service credential used to generate a temporary credential
         :param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional)
-          Options to customize the requested temporary credential
+          The Azure cloud options to customize the requested temporary credential
+        :param gcp_options: :class:`GenerateTemporaryServiceCredentialGcpOptions` (optional)
+          The GCP cloud options to customize the requested temporary credential
         
         :returns: :class:`TemporaryCredentials`
         """
         body = {}
         if azure_options is not None: body['azure_options'] = azure_options.as_dict()
         if credential_name is not None: body['credential_name'] = credential_name
+        if gcp_options is not None: body['gcp_options'] = gcp_options.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST',
@@ -7683,6 +9411,7 @@ def update_credential(self,
                           azure_managed_identity: Optional[AzureManagedIdentity] = None,
                           azure_service_principal: Optional[AzureServicePrincipal] = None,
                           comment: Optional[str] = None,
+                          databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None,
                           force: Optional[bool] = None,
                           isolation_mode: Optional[IsolationMode] = None,
                           new_name: Optional[str] = None,
@@ -7703,9 +9432,11 @@ def update_credential(self,
         :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
           The Azure managed identity configuration.
         :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
-          The Azure service principal configuration.
+          The Azure service principal configuration. Only applicable when purpose is **STORAGE**.
         :param comment: str (optional)
           Comment associated with the credential.
+        :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional)
+          GCP long-lived credential. Databricks-created Google Cloud Storage service account.
         :param force: bool (optional)
           Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
           external locations and external tables (when purpose is **STORAGE**).
@@ -7730,6 +9461,8 @@ def update_credential(self,
         if azure_service_principal is not None:
             body['azure_service_principal'] = azure_service_principal.as_dict()
         if comment is not None: body['comment'] = comment
+        if databricks_gcp_service_account is not None:
+            body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict()
         if force is not None: body['force'] = force
         if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value
         if new_name is not None: body['new_name'] = new_name
diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py
new file mode 100755
index 000000000..393c68a0b
--- /dev/null
+++ b/databricks/sdk/service/cleanrooms.py
@@ -0,0 +1,1281 @@
+# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+from __future__ import annotations
+
+import logging
+from dataclasses import dataclass
+from enum import Enum
+from typing import Dict, Iterator, List, Optional
+
+from ._internal import _enum, _from_dict, _repeated_dict
+
+_LOG = logging.getLogger('databricks.sdk')
+
+from databricks.sdk.service import catalog, jobs, settings, sharing
+
+# all definitions in this file are in alphabetical order
+
+
+@dataclass
+class CleanRoom:
+    access_restricted: Optional[CleanRoomAccessRestricted] = None
+    """Whether clean room access is restricted due to [CSP]
+    
+    [CSP]: https://docs.databricks.com/en/security/privacy/security-profile.html"""
+
+    comment: Optional[str] = None
+
+    created_at: Optional[int] = None
+    """When the clean room was created, in epoch milliseconds."""
+
+    local_collaborator_alias: Optional[str] = None
+    """The alias of the collaborator tied to the local clean room."""
+
+    name: Optional[str] = None
+    """The name of the clean room. It should follow [UC securable naming requirements].
+    
+    [UC securable naming requirements]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements"""
+
+    output_catalog: Optional[CleanRoomOutputCatalog] = None
+    """Output catalog of the clean room. It is an output only field. Output catalog is manipulated
+    using the separate CreateCleanRoomOutputCatalog API."""
+
+    owner: Optional[str] = None
+    """This is Databricks username of the owner of the local clean room securable for permission
+    management."""
+
+    remote_detailed_info: Optional[CleanRoomRemoteDetail] = None
+    """Central clean room details. During creation, users need to specify cloud_vendor, region, and
+    collaborators.global_metastore_id. This field will not be filled in the ListCleanRooms call."""
+
+    status: Optional[CleanRoomStatusEnum] = None
+    """Clean room status."""
+
+    updated_at: Optional[int] = None
+    """When the clean room was last updated, in epoch milliseconds."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoom into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.access_restricted is not None: body['access_restricted'] = self.access_restricted.value
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.local_collaborator_alias is not None:
+            body['local_collaborator_alias'] = self.local_collaborator_alias
+        if self.name is not None: body['name'] = self.name
+        if self.output_catalog: body['output_catalog'] = self.output_catalog.as_dict()
+        if self.owner is not None: body['owner'] = self.owner
+        if self.remote_detailed_info: body['remote_detailed_info'] = self.remote_detailed_info.as_dict()
+        if self.status is not None: body['status'] = self.status.value
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoom into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_restricted is not None: body['access_restricted'] = self.access_restricted
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.local_collaborator_alias is not None:
+            body['local_collaborator_alias'] = self.local_collaborator_alias
+        if self.name is not None: body['name'] = self.name
+        if self.output_catalog: body['output_catalog'] = self.output_catalog
+        if self.owner is not None: body['owner'] = self.owner
+        if self.remote_detailed_info: body['remote_detailed_info'] = self.remote_detailed_info
+        if self.status is not None: body['status'] = self.status
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoom:
+        """Deserializes the CleanRoom from a dictionary."""
+        return cls(access_restricted=_enum(d, 'access_restricted', CleanRoomAccessRestricted),
+                   comment=d.get('comment', None),
+                   created_at=d.get('created_at', None),
+                   local_collaborator_alias=d.get('local_collaborator_alias', None),
+                   name=d.get('name', None),
+                   output_catalog=_from_dict(d, 'output_catalog', CleanRoomOutputCatalog),
+                   owner=d.get('owner', None),
+                   remote_detailed_info=_from_dict(d, 'remote_detailed_info', CleanRoomRemoteDetail),
+                   status=_enum(d, 'status', CleanRoomStatusEnum),
+                   updated_at=d.get('updated_at', None))
+
+
+class CleanRoomAccessRestricted(Enum):
+
+    CSP_MISMATCH = 'CSP_MISMATCH'
+    NO_RESTRICTION = 'NO_RESTRICTION'
+
+
+@dataclass
+class CleanRoomAsset:
+    """Metadata of the clean room asset"""
+
+    added_at: Optional[int] = None
+    """When the asset is added to the clean room, in epoch milliseconds."""
+
+    asset_type: Optional[CleanRoomAssetAssetType] = None
+    """The type of the asset."""
+
+    foreign_table: Optional[CleanRoomAssetForeignTable] = None
+    """Foreign table details available to all collaborators of the clean room. Present if and only if
+    **asset_type** is **FOREIGN_TABLE**"""
+
+    foreign_table_local_details: Optional[CleanRoomAssetForeignTableLocalDetails] = None
+    """Local details for a foreign that are only available to its owner. Present if and only if
+    **asset_type** is **FOREIGN_TABLE**"""
+
+    name: Optional[str] = None
+    """A fully qualified name that uniquely identifies the asset within the clean room. This is also
+    the name displayed in the clean room UI.
+    
+    For UC securable assets (tables, volumes, etc.), the format is
+    *shared_catalog*.*shared_schema*.*asset_name*
+    
+    For notebooks, the name is the notebook file name."""
+
+    notebook: Optional[CleanRoomAssetNotebook] = None
+    """Notebook details available to all collaborators of the clean room. Present if and only if
+    **asset_type** is **NOTEBOOK_FILE**"""
+
+    owner_collaborator_alias: Optional[str] = None
+    """The alias of the collaborator who owns this asset"""
+
+    status: Optional[CleanRoomAssetStatusEnum] = None
+    """Status of the asset"""
+
+    table: Optional[CleanRoomAssetTable] = None
+    """Table details available to all collaborators of the clean room. Present if and only if
+    **asset_type** is **TABLE**"""
+
+    table_local_details: Optional[CleanRoomAssetTableLocalDetails] = None
+    """Local details for a table that are only available to its owner. Present if and only if
+    **asset_type** is **TABLE**"""
+
+    view: Optional[CleanRoomAssetView] = None
+    """View details available to all collaborators of the clean room. Present if and only if
+    **asset_type** is **VIEW**"""
+
+    view_local_details: Optional[CleanRoomAssetViewLocalDetails] = None
+    """Local details for a view that are only available to its owner. Present if and only if
+    **asset_type** is **VIEW**"""
+
+    volume_local_details: Optional[CleanRoomAssetVolumeLocalDetails] = None
+    """Local details for a volume that are only available to its owner. Present if and only if
+    **asset_type** is **VOLUME**"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomAsset into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.added_at is not None: body['added_at'] = self.added_at
+        if self.asset_type is not None: body['asset_type'] = self.asset_type.value
+        if self.foreign_table: body['foreign_table'] = self.foreign_table.as_dict()
+        if self.foreign_table_local_details:
+            body['foreign_table_local_details'] = self.foreign_table_local_details.as_dict()
+        if self.name is not None: body['name'] = self.name
+        if self.notebook: body['notebook'] = self.notebook.as_dict()
+        if self.owner_collaborator_alias is not None:
+            body['owner_collaborator_alias'] = self.owner_collaborator_alias
+        if self.status is not None: body['status'] = self.status.value
+        if self.table: body['table'] = self.table.as_dict()
+        if self.table_local_details: body['table_local_details'] = self.table_local_details.as_dict()
+        if self.view: body['view'] = self.view.as_dict()
+        if self.view_local_details: body['view_local_details'] = self.view_local_details.as_dict()
+        if self.volume_local_details: body['volume_local_details'] = self.volume_local_details.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomAsset into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.added_at is not None: body['added_at'] = self.added_at
+        if self.asset_type is not None: body['asset_type'] = self.asset_type
+        if self.foreign_table: body['foreign_table'] = self.foreign_table
+        if self.foreign_table_local_details:
+            body['foreign_table_local_details'] = self.foreign_table_local_details
+        if self.name is not None: body['name'] = self.name
+        if self.notebook: body['notebook'] = self.notebook
+        if self.owner_collaborator_alias is not None:
+            body['owner_collaborator_alias'] = self.owner_collaborator_alias
+        if self.status is not None: body['status'] = self.status
+        if self.table: body['table'] = self.table
+        if self.table_local_details: body['table_local_details'] = self.table_local_details
+        if self.view: body['view'] = self.view
+        if self.view_local_details: body['view_local_details'] = self.view_local_details
+        if self.volume_local_details: body['volume_local_details'] = self.volume_local_details
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAsset:
+        """Deserializes the CleanRoomAsset from a dictionary."""
+        return cls(added_at=d.get('added_at', None),
+                   asset_type=_enum(d, 'asset_type', CleanRoomAssetAssetType),
+                   foreign_table=_from_dict(d, 'foreign_table', CleanRoomAssetForeignTable),
+                   foreign_table_local_details=_from_dict(d, 'foreign_table_local_details',
+                                                          CleanRoomAssetForeignTableLocalDetails),
+                   name=d.get('name', None),
+                   notebook=_from_dict(d, 'notebook', CleanRoomAssetNotebook),
+                   owner_collaborator_alias=d.get('owner_collaborator_alias', None),
+                   status=_enum(d, 'status', CleanRoomAssetStatusEnum),
+                   table=_from_dict(d, 'table', CleanRoomAssetTable),
+                   table_local_details=_from_dict(d, 'table_local_details', CleanRoomAssetTableLocalDetails),
+                   view=_from_dict(d, 'view', CleanRoomAssetView),
+                   view_local_details=_from_dict(d, 'view_local_details', CleanRoomAssetViewLocalDetails),
+                   volume_local_details=_from_dict(d, 'volume_local_details',
+                                                   CleanRoomAssetVolumeLocalDetails))
+
+
+class CleanRoomAssetAssetType(Enum):
+
+    FOREIGN_TABLE = 'FOREIGN_TABLE'
+    NOTEBOOK_FILE = 'NOTEBOOK_FILE'
+    TABLE = 'TABLE'
+    VIEW = 'VIEW'
+    VOLUME = 'VOLUME'
+
+
+@dataclass
+class CleanRoomAssetForeignTable:
+    columns: Optional[List[catalog.ColumnInfo]] = None
+    """The metadata information of the columns in the foreign table"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomAssetForeignTable into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomAssetForeignTable into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.columns: body['columns'] = self.columns
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetForeignTable:
+        """Deserializes the CleanRoomAssetForeignTable from a dictionary."""
+        return cls(columns=_repeated_dict(d, 'columns', catalog.ColumnInfo))
+
+
+@dataclass
+class CleanRoomAssetForeignTableLocalDetails:
+    local_name: Optional[str] = None
+    """The fully qualified name of the foreign table in its owner's local metastore, in the format of
+    *catalog*.*schema*.*foreign_table_name*"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomAssetForeignTableLocalDetails into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.local_name is not None: body['local_name'] = self.local_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomAssetForeignTableLocalDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.local_name is not None: body['local_name'] = self.local_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetForeignTableLocalDetails:
+        """Deserializes the CleanRoomAssetForeignTableLocalDetails from a dictionary."""
+        return cls(local_name=d.get('local_name', None))
+
+
+@dataclass
+class CleanRoomAssetNotebook:
+    etag: Optional[str] = None
+    """Server generated checksum that represents the notebook version."""
+
+    notebook_content: Optional[str] = None
+    """Base 64 representation of the notebook contents. This is the same format as returned by
+    :method:workspace/export with the format of **HTML**."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomAssetNotebook into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        if self.notebook_content is not None: body['notebook_content'] = self.notebook_content
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomAssetNotebook into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        if self.notebook_content is not None: body['notebook_content'] = self.notebook_content
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetNotebook:
+        """Deserializes the CleanRoomAssetNotebook from a dictionary."""
+        return cls(etag=d.get('etag', None), notebook_content=d.get('notebook_content', None))
+
+
+class CleanRoomAssetStatusEnum(Enum):
+
+    ACTIVE = 'ACTIVE'
+    PERMISSION_DENIED = 'PERMISSION_DENIED'
+
+
+@dataclass
+class CleanRoomAssetTable:
+    columns: Optional[List[catalog.ColumnInfo]] = None
+    """The metadata information of the columns in the table"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomAssetTable into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomAssetTable into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.columns: body['columns'] = self.columns
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetTable:
+        """Deserializes the CleanRoomAssetTable from a dictionary."""
+        return cls(columns=_repeated_dict(d, 'columns', catalog.ColumnInfo))
+
+
+@dataclass
+class CleanRoomAssetTableLocalDetails:
+    local_name: Optional[str] = None
+    """The fully qualified name of the table in its owner's local metastore, in the format of
+    *catalog*.*schema*.*table_name*"""
+
+    partitions: Optional[List[sharing.PartitionSpecificationPartition]] = None
+    """Partition filtering specification for a shared table."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomAssetTableLocalDetails into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.local_name is not None: body['local_name'] = self.local_name
+        if self.partitions: body['partitions'] = [v.as_dict() for v in self.partitions]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomAssetTableLocalDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.local_name is not None: body['local_name'] = self.local_name
+        if self.partitions: body['partitions'] = self.partitions
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetTableLocalDetails:
+        """Deserializes the CleanRoomAssetTableLocalDetails from a dictionary."""
+        return cls(local_name=d.get('local_name', None),
+                   partitions=_repeated_dict(d, 'partitions', sharing.PartitionSpecificationPartition))
+
+
+@dataclass
+class CleanRoomAssetView:
+    columns: Optional[List[catalog.ColumnInfo]] = None
+    """The metadata information of the columns in the view"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomAssetView into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomAssetView into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.columns: body['columns'] = self.columns
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetView:
+        """Deserializes the CleanRoomAssetView from a dictionary."""
+        return cls(columns=_repeated_dict(d, 'columns', catalog.ColumnInfo))
+
+
+@dataclass
+class CleanRoomAssetViewLocalDetails:
+    local_name: Optional[str] = None
+    """The fully qualified name of the view in its owner's local metastore, in the format of
+    *catalog*.*schema*.*view_name*"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomAssetViewLocalDetails into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.local_name is not None: body['local_name'] = self.local_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomAssetViewLocalDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.local_name is not None: body['local_name'] = self.local_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetViewLocalDetails:
+        """Deserializes the CleanRoomAssetViewLocalDetails from a dictionary."""
+        return cls(local_name=d.get('local_name', None))
+
+
+@dataclass
+class CleanRoomAssetVolumeLocalDetails:
+    local_name: Optional[str] = None
+    """The fully qualified name of the volume in its owner's local metastore, in the format of
+    *catalog*.*schema*.*volume_name*"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomAssetVolumeLocalDetails into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.local_name is not None: body['local_name'] = self.local_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomAssetVolumeLocalDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.local_name is not None: body['local_name'] = self.local_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetVolumeLocalDetails:
+        """Deserializes the CleanRoomAssetVolumeLocalDetails from a dictionary."""
+        return cls(local_name=d.get('local_name', None))
+
+
+@dataclass
+class CleanRoomCollaborator:
+    """Publicly visible clean room collaborator."""
+
+    collaborator_alias: Optional[str] = None
+    """Collaborator alias specified by the clean room creator. It is unique across all collaborators of
+    this clean room, and used to derive multiple values internally such as catalog alias and clean
+    room name for single metastore clean rooms. It should follow [UC securable naming requirements].
+    
+    [UC securable naming requirements]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements"""
+
+    display_name: Optional[str] = None
+    """Generated display name for the collaborator. In the case of a single metastore clean room, it is
+    the clean room name. For x-metastore clean rooms, it is the organization name of the metastore.
+    It is not restricted to these values and could change in the future"""
+
+    global_metastore_id: Optional[str] = None
+    """The global Unity Catalog metastore id of the collaborator. The identifier is of format
+    cloud:region:metastore-uuid."""
+
+    invite_recipient_email: Optional[str] = None
+    """Email of the user who is receiving the clean room "invitation". It should be empty for the
+    creator of the clean room, and non-empty for the invitees of the clean room. It is only returned
+    in the output when clean room creator calls GET"""
+
+    invite_recipient_workspace_id: Optional[int] = None
+    """Workspace ID of the user who is receiving the clean room "invitation". Must be specified if
+    invite_recipient_email is specified. It should be empty when the collaborator is the creator of
+    the clean room."""
+
+    organization_name: Optional[str] = None
+    """[Organization name](:method:metastores/list#metastores-delta_sharing_organization_name)
+    configured in the metastore"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomCollaborator into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
+        if self.invite_recipient_email is not None:
+            body['invite_recipient_email'] = self.invite_recipient_email
+        if self.invite_recipient_workspace_id is not None:
+            body['invite_recipient_workspace_id'] = self.invite_recipient_workspace_id
+        if self.organization_name is not None: body['organization_name'] = self.organization_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomCollaborator into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
+        if self.invite_recipient_email is not None:
+            body['invite_recipient_email'] = self.invite_recipient_email
+        if self.invite_recipient_workspace_id is not None:
+            body['invite_recipient_workspace_id'] = self.invite_recipient_workspace_id
+        if self.organization_name is not None: body['organization_name'] = self.organization_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomCollaborator:
+        """Deserializes the CleanRoomCollaborator from a dictionary."""
+        return cls(collaborator_alias=d.get('collaborator_alias', None),
+                   display_name=d.get('display_name', None),
+                   global_metastore_id=d.get('global_metastore_id', None),
+                   invite_recipient_email=d.get('invite_recipient_email', None),
+                   invite_recipient_workspace_id=d.get('invite_recipient_workspace_id', None),
+                   organization_name=d.get('organization_name', None))
+
+
+@dataclass
+class CleanRoomNotebookTaskRun:
+    """Stores information about a single task run."""
+
+    collaborator_job_run_info: Optional[CollaboratorJobRunInfo] = None
+    """Job run info of the task in the runner's local workspace. This field is only included in the
+    LIST API. if the task was run within the same workspace the API is being called. If the task run
+    was in a different workspace under the same metastore, only the workspace_id is included."""
+
+    notebook_job_run_state: Optional[jobs.CleanRoomTaskRunState] = None
+    """State of the task run."""
+
+    notebook_name: Optional[str] = None
+    """Asset name of the notebook executed in this task run."""
+
+    output_schema_expiration_time: Optional[int] = None
+    """Expiration time of the output schema of the task run (if any), in epoch milliseconds."""
+
+    output_schema_name: Optional[str] = None
+    """Name of the output schema associated with the clean rooms notebook task run."""
+
+    run_duration: Optional[int] = None
+    """Duration of the task run, in milliseconds."""
+
+    start_time: Optional[int] = None
+    """When the task run started, in epoch milliseconds."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomNotebookTaskRun into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.collaborator_job_run_info:
+            body['collaborator_job_run_info'] = self.collaborator_job_run_info.as_dict()
+        if self.notebook_job_run_state: body['notebook_job_run_state'] = self.notebook_job_run_state.as_dict()
+        if self.notebook_name is not None: body['notebook_name'] = self.notebook_name
+        if self.output_schema_expiration_time is not None:
+            body['output_schema_expiration_time'] = self.output_schema_expiration_time
+        if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
+        if self.run_duration is not None: body['run_duration'] = self.run_duration
+        if self.start_time is not None: body['start_time'] = self.start_time
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomNotebookTaskRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.collaborator_job_run_info: body['collaborator_job_run_info'] = self.collaborator_job_run_info
+        if self.notebook_job_run_state: body['notebook_job_run_state'] = self.notebook_job_run_state
+        if self.notebook_name is not None: body['notebook_name'] = self.notebook_name
+        if self.output_schema_expiration_time is not None:
+            body['output_schema_expiration_time'] = self.output_schema_expiration_time
+        if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
+        if self.run_duration is not None: body['run_duration'] = self.run_duration
+        if self.start_time is not None: body['start_time'] = self.start_time
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomNotebookTaskRun:
+        """Deserializes the CleanRoomNotebookTaskRun from a dictionary."""
+        return cls(collaborator_job_run_info=_from_dict(d, 'collaborator_job_run_info',
+                                                        CollaboratorJobRunInfo),
+                   notebook_job_run_state=_from_dict(d, 'notebook_job_run_state', jobs.CleanRoomTaskRunState),
+                   notebook_name=d.get('notebook_name', None),
+                   output_schema_expiration_time=d.get('output_schema_expiration_time', None),
+                   output_schema_name=d.get('output_schema_name', None),
+                   run_duration=d.get('run_duration', None),
+                   start_time=d.get('start_time', None))
+
+
+@dataclass
+class CleanRoomOutputCatalog:
+    catalog_name: Optional[str] = None
+    """The name of the output catalog in UC. It should follow [UC securable naming requirements]. The
+    field will always exist if status is CREATED.
+    
+    [UC securable naming requirements]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements"""
+
+    status: Optional[CleanRoomOutputCatalogOutputCatalogStatus] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomOutputCatalog into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.status is not None: body['status'] = self.status.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomOutputCatalog into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.status is not None: body['status'] = self.status
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomOutputCatalog:
+        """Deserializes the CleanRoomOutputCatalog from a dictionary."""
+        return cls(catalog_name=d.get('catalog_name', None),
+                   status=_enum(d, 'status', CleanRoomOutputCatalogOutputCatalogStatus))
+
+
+class CleanRoomOutputCatalogOutputCatalogStatus(Enum):
+
+    CREATED = 'CREATED'
+    NOT_CREATED = 'NOT_CREATED'
+    NOT_ELIGIBLE = 'NOT_ELIGIBLE'
+
+
+@dataclass
+class CleanRoomRemoteDetail:
+    """Publicly visible central clean room details."""
+
+    central_clean_room_id: Optional[str] = None
+    """Central clean room ID."""
+
+    cloud_vendor: Optional[str] = None
+    """Cloud vendor (aws,azure,gcp) of the central clean room."""
+
+    collaborators: Optional[List[CleanRoomCollaborator]] = None
+    """Collaborators in the central clean room. There should one and only one collaborator in the list
+    that satisfies the owner condition:
+    
+    1. It has the creator's global_metastore_id (determined by caller of CreateCleanRoom).
+    
+    2. Its invite_recipient_email is empty."""
+
+    compliance_security_profile: Optional[ComplianceSecurityProfile] = None
+    """The compliance security profile used to process regulated data following compliance standards."""
+
+    creator: Optional[CleanRoomCollaborator] = None
+    """Collaborator who creates the clean room."""
+
+    egress_network_policy: Optional[settings.EgressNetworkPolicy] = None
+    """Egress network policy to apply to the central clean room workspace."""
+
+    region: Optional[str] = None
+    """Region of the central clean room."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomRemoteDetail into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.central_clean_room_id is not None: body['central_clean_room_id'] = self.central_clean_room_id
+        if self.cloud_vendor is not None: body['cloud_vendor'] = self.cloud_vendor
+        if self.collaborators: body['collaborators'] = [v.as_dict() for v in self.collaborators]
+        if self.compliance_security_profile:
+            body['compliance_security_profile'] = self.compliance_security_profile.as_dict()
+        if self.creator: body['creator'] = self.creator.as_dict()
+        if self.egress_network_policy: body['egress_network_policy'] = self.egress_network_policy.as_dict()
+        if self.region is not None: body['region'] = self.region
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomRemoteDetail into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.central_clean_room_id is not None: body['central_clean_room_id'] = self.central_clean_room_id
+        if self.cloud_vendor is not None: body['cloud_vendor'] = self.cloud_vendor
+        if self.collaborators: body['collaborators'] = self.collaborators
+        if self.compliance_security_profile:
+            body['compliance_security_profile'] = self.compliance_security_profile
+        if self.creator: body['creator'] = self.creator
+        if self.egress_network_policy: body['egress_network_policy'] = self.egress_network_policy
+        if self.region is not None: body['region'] = self.region
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomRemoteDetail:
+        """Deserializes the CleanRoomRemoteDetail from a dictionary."""
+        return cls(central_clean_room_id=d.get('central_clean_room_id', None),
+                   cloud_vendor=d.get('cloud_vendor', None),
+                   collaborators=_repeated_dict(d, 'collaborators', CleanRoomCollaborator),
+                   compliance_security_profile=_from_dict(d, 'compliance_security_profile',
+                                                          ComplianceSecurityProfile),
+                   creator=_from_dict(d, 'creator', CleanRoomCollaborator),
+                   egress_network_policy=_from_dict(d, 'egress_network_policy', settings.EgressNetworkPolicy),
+                   region=d.get('region', None))
+
+
+class CleanRoomStatusEnum(Enum):
+
+    ACTIVE = 'ACTIVE'
+    DELETED = 'DELETED'
+    FAILED = 'FAILED'
+    PROVISIONING = 'PROVISIONING'
+
+
+@dataclass
+class CollaboratorJobRunInfo:
+    collaborator_alias: Optional[str] = None
+    """Alias of the collaborator that triggered the task run."""
+
+    collaborator_job_id: Optional[int] = None
+    """Job ID of the task run in the collaborator's workspace."""
+
+    collaborator_job_run_id: Optional[int] = None
+    """Job run ID of the task run in the collaborator's workspace."""
+
+    collaborator_task_run_id: Optional[int] = None
+    """Task run ID of the task run in the collaborator's workspace."""
+
+    collaborator_workspace_id: Optional[int] = None
+    """ID of the collaborator's workspace that triggered the task run."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CollaboratorJobRunInfo into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias
+        if self.collaborator_job_id is not None: body['collaborator_job_id'] = self.collaborator_job_id
+        if self.collaborator_job_run_id is not None:
+            body['collaborator_job_run_id'] = self.collaborator_job_run_id
+        if self.collaborator_task_run_id is not None:
+            body['collaborator_task_run_id'] = self.collaborator_task_run_id
+        if self.collaborator_workspace_id is not None:
+            body['collaborator_workspace_id'] = self.collaborator_workspace_id
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CollaboratorJobRunInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias
+        if self.collaborator_job_id is not None: body['collaborator_job_id'] = self.collaborator_job_id
+        if self.collaborator_job_run_id is not None:
+            body['collaborator_job_run_id'] = self.collaborator_job_run_id
+        if self.collaborator_task_run_id is not None:
+            body['collaborator_task_run_id'] = self.collaborator_task_run_id
+        if self.collaborator_workspace_id is not None:
+            body['collaborator_workspace_id'] = self.collaborator_workspace_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CollaboratorJobRunInfo:
+        """Deserializes the CollaboratorJobRunInfo from a dictionary."""
+        return cls(collaborator_alias=d.get('collaborator_alias', None),
+                   collaborator_job_id=d.get('collaborator_job_id', None),
+                   collaborator_job_run_id=d.get('collaborator_job_run_id', None),
+                   collaborator_task_run_id=d.get('collaborator_task_run_id', None),
+                   collaborator_workspace_id=d.get('collaborator_workspace_id', None))
+
+
+@dataclass
+class ComplianceSecurityProfile:
+    """The compliance security profile used to process regulated data following compliance standards."""
+
+    compliance_standards: Optional[List[settings.ComplianceStandard]] = None
+    """The list of compliance standards that the compliance security profile is configured to enforce."""
+
+    is_enabled: Optional[bool] = None
+    """Whether the compliance security profile is enabled."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ComplianceSecurityProfile into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.compliance_standards:
+            body['compliance_standards'] = [v.as_dict() for v in self.compliance_standards]
+        if self.is_enabled is not None: body['is_enabled'] = self.is_enabled
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ComplianceSecurityProfile into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.compliance_standards: body['compliance_standards'] = self.compliance_standards
+        if self.is_enabled is not None: body['is_enabled'] = self.is_enabled
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ComplianceSecurityProfile:
+        """Deserializes the ComplianceSecurityProfile from a dictionary."""
+        return cls(compliance_standards=_repeated_dict(d, 'compliance_standards',
+                                                       settings.ComplianceStandard),
+                   is_enabled=d.get('is_enabled', None))
+
+
+@dataclass
+class CreateCleanRoomOutputCatalogResponse:
+    output_catalog: Optional[CleanRoomOutputCatalog] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateCleanRoomOutputCatalogResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.output_catalog: body['output_catalog'] = self.output_catalog.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCleanRoomOutputCatalogResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.output_catalog: body['output_catalog'] = self.output_catalog
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateCleanRoomOutputCatalogResponse:
+        """Deserializes the CreateCleanRoomOutputCatalogResponse from a dictionary."""
+        return cls(output_catalog=_from_dict(d, 'output_catalog', CleanRoomOutputCatalog))
+
+
+@dataclass
+class DeleteCleanRoomAssetResponse:
+    """Response for delete clean room request. Using an empty message since the generic Empty proto
+    does not externd UnshadedMessageMarker."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteCleanRoomAssetResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteCleanRoomAssetResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteCleanRoomAssetResponse:
+        """Deserializes the DeleteCleanRoomAssetResponse from a dictionary."""
+        return cls()
+
+
+@dataclass
+class DeleteResponse:
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
+        """Deserializes the DeleteResponse from a dictionary."""
+        return cls()
+
+
+@dataclass
+class ListCleanRoomAssetsResponse:
+    assets: Optional[List[CleanRoomAsset]] = None
+    """Assets in the clean room."""
+
+    next_page_token: Optional[str] = None
+    """Opaque token to retrieve the next page of results. Absent if there are no more pages. page_token
+    should be set to this value for the next request (for the next page of results)."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListCleanRoomAssetsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.assets: body['assets'] = [v.as_dict() for v in self.assets]
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListCleanRoomAssetsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.assets: body['assets'] = self.assets
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListCleanRoomAssetsResponse:
+        """Deserializes the ListCleanRoomAssetsResponse from a dictionary."""
+        return cls(assets=_repeated_dict(d, 'assets', CleanRoomAsset),
+                   next_page_token=d.get('next_page_token', None))
+
+
+@dataclass
+class ListCleanRoomNotebookTaskRunsResponse:
+    next_page_token: Optional[str] = None
+    """Opaque token to retrieve the next page of results. Absent if there are no more pages. page_token
+    should be set to this value for the next request (for the next page of results)."""
+
+    runs: Optional[List[CleanRoomNotebookTaskRun]] = None
+    """Name of the clean room."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListCleanRoomNotebookTaskRunsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.runs: body['runs'] = [v.as_dict() for v in self.runs]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListCleanRoomNotebookTaskRunsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.runs: body['runs'] = self.runs
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListCleanRoomNotebookTaskRunsResponse:
+        """Deserializes the ListCleanRoomNotebookTaskRunsResponse from a dictionary."""
+        return cls(next_page_token=d.get('next_page_token', None),
+                   runs=_repeated_dict(d, 'runs', CleanRoomNotebookTaskRun))
+
+
+@dataclass
+class ListCleanRoomsResponse:
+    clean_rooms: Optional[List[CleanRoom]] = None
+
+    next_page_token: Optional[str] = None
+    """Opaque token to retrieve the next page of results. Absent if there are no more pages. page_token
+    should be set to this value for the next request (for the next page of results)."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListCleanRoomsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.clean_rooms: body['clean_rooms'] = [v.as_dict() for v in self.clean_rooms]
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListCleanRoomsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.clean_rooms: body['clean_rooms'] = self.clean_rooms
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListCleanRoomsResponse:
+        """Deserializes the ListCleanRoomsResponse from a dictionary."""
+        return cls(clean_rooms=_repeated_dict(d, 'clean_rooms', CleanRoom),
+                   next_page_token=d.get('next_page_token', None))
+
+
+@dataclass
+class UpdateCleanRoomRequest:
+    clean_room: Optional[CleanRoom] = None
+
+    name: Optional[str] = None
+    """Name of the clean room."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateCleanRoomRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.clean_room: body['clean_room'] = self.clean_room.as_dict()
+        if self.name is not None: body['name'] = self.name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCleanRoomRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.clean_room: body['clean_room'] = self.clean_room
+        if self.name is not None: body['name'] = self.name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateCleanRoomRequest:
+        """Deserializes the UpdateCleanRoomRequest from a dictionary."""
+        return cls(clean_room=_from_dict(d, 'clean_room', CleanRoom), name=d.get('name', None))
+
+
+class CleanRoomAssetsAPI:
+    """Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the
+    clean room."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def create(self, clean_room_name: str, *, asset: Optional[CleanRoomAsset] = None) -> CleanRoomAsset:
+        """Create an asset.
+        
+        Create a clean room asset —share an asset like a notebook or table into the clean room. For each UC
+        asset that is added through this method, the clean room owner must also have enough privilege on the
+        asset to consume it. The privilege must be maintained indefinitely for the clean room to be able to
+        access the asset. Typically, you should use a group as the clean room owner.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset: :class:`CleanRoomAsset` (optional)
+          Metadata of the clean room asset
+        
+        :returns: :class:`CleanRoomAsset`
+        """
+        body = asset.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST',
+                           f'/api/2.0/clean-rooms/{clean_room_name}/assets',
+                           body=body,
+                           headers=headers)
+        return CleanRoomAsset.from_dict(res)
+
+    def delete(self, clean_room_name: str, asset_type: CleanRoomAssetAssetType, asset_full_name: str):
+        """Delete an asset.
+        
+        Delete a clean room asset - unshare/remove the asset from the clean room
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset_type: :class:`CleanRoomAssetAssetType`
+          The type of the asset.
+        :param asset_full_name: str
+          The fully qualified name of the asset, it is same as the name field in CleanRoomAsset.
+        
+        
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        self._api.do('DELETE',
+                     f'/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{asset_full_name}',
+                     headers=headers)
+
+    def get(self, clean_room_name: str, asset_type: CleanRoomAssetAssetType,
+            asset_full_name: str) -> CleanRoomAsset:
+        """Get an asset.
+        
+        Get the details of a clean room asset by its type and full name.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset_type: :class:`CleanRoomAssetAssetType`
+          The type of the asset.
+        :param asset_full_name: str
+          The fully qualified name of the asset, it is same as the name field in CleanRoomAsset.
+        
+        :returns: :class:`CleanRoomAsset`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'GET',
+            f'/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{asset_full_name}',
+            headers=headers)
+        return CleanRoomAsset.from_dict(res)
+
+    def list(self, clean_room_name: str, *, page_token: Optional[str] = None) -> Iterator[CleanRoomAsset]:
+        """List assets.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`CleanRoomAsset`
+        """
+
+        query = {}
+        if page_token is not None: query['page_token'] = page_token
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET',
+                                f'/api/2.0/clean-rooms/{clean_room_name}/assets',
+                                query=query,
+                                headers=headers)
+            if 'assets' in json:
+                for v in json['assets']:
+                    yield CleanRoomAsset.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+    def update(self,
+               clean_room_name: str,
+               asset_type: CleanRoomAssetAssetType,
+               name: str,
+               *,
+               asset: Optional[CleanRoomAsset] = None) -> CleanRoomAsset:
+        """Update an asset.
+        
+        Update a clean room asset. For example, updating the content of a notebook; changing the shared
+        partitions of a table; etc.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset_type: :class:`CleanRoomAssetAssetType`
+          The type of the asset.
+        :param name: str
+          A fully qualified name that uniquely identifies the asset within the clean room. This is also the
+          name displayed in the clean room UI.
+          
+          For UC securable assets (tables, volumes, etc.), the format is
+          *shared_catalog*.*shared_schema*.*asset_name*
+          
+          For notebooks, the name is the notebook file name.
+        :param asset: :class:`CleanRoomAsset` (optional)
+          Metadata of the clean room asset
+        
+        :returns: :class:`CleanRoomAsset`
+        """
+        body = asset.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH',
+                           f'/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{name}',
+                           body=body,
+                           headers=headers)
+        return CleanRoomAsset.from_dict(res)
+
+
+class CleanRoomTaskRunsAPI:
+    """Clean room task runs are the executions of notebooks in a clean room."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def list(self,
+             clean_room_name: str,
+             *,
+             notebook_name: Optional[str] = None,
+             page_size: Optional[int] = None,
+             page_token: Optional[str] = None) -> Iterator[CleanRoomNotebookTaskRun]:
+        """List notebook task runs.
+        
+        List all the historical notebook task runs in a clean room.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param notebook_name: str (optional)
+          Notebook name
+        :param page_size: int (optional)
+          The maximum number of task runs to return
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`CleanRoomNotebookTaskRun`
+        """
+
+        query = {}
+        if notebook_name is not None: query['notebook_name'] = notebook_name
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET',
+                                f'/api/2.0/clean-rooms/{clean_room_name}/runs',
+                                query=query,
+                                headers=headers)
+            if 'runs' in json:
+                for v in json['runs']:
+                    yield CleanRoomNotebookTaskRun.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+
+class CleanRoomsAPI:
+    """A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting
+    environment where multiple parties can work together on sensitive enterprise data without direct access to
+    each other’s data."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def create(self, *, clean_room: Optional[CleanRoom] = None) -> CleanRoom:
+        """Create a clean room.
+        
+        Create a new clean room with the specified collaborators. This method is asynchronous; the returned
+        name field inside the clean_room field can be used to poll the clean room status, using the
+        :method:cleanrooms/get method. When this method returns, the cluster will be in a PROVISIONING state.
+        The cluster will be usable once it enters an ACTIVE state.
+        
+        The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore.
+        
+        :param clean_room: :class:`CleanRoom` (optional)
+        
+        :returns: :class:`CleanRoom`
+        """
+        body = clean_room.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST', '/api/2.0/clean-rooms', body=body, headers=headers)
+        return CleanRoom.from_dict(res)
+
+    def create_output_catalog(
+            self,
+            clean_room_name: str,
+            *,
+            output_catalog: Optional[CleanRoomOutputCatalog] = None) -> CreateCleanRoomOutputCatalogResponse:
+        """Create an output catalog.
+        
+        Create the output catalog of the clean room.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param output_catalog: :class:`CleanRoomOutputCatalog` (optional)
+        
+        :returns: :class:`CreateCleanRoomOutputCatalogResponse`
+        """
+        body = output_catalog.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST',
+                           f'/api/2.0/clean-rooms/{clean_room_name}/output-catalogs',
+                           body=body,
+                           headers=headers)
+        return CreateCleanRoomOutputCatalogResponse.from_dict(res)
+
+    def delete(self, name: str):
+        """Delete a clean room.
+        
+        Delete a clean room. After deletion, the clean room will be removed from the metastore. If the other
+        collaborators have not deleted the clean room, they will still have the clean room in their metastore,
+        but it will be in a DELETED state and no operations other than deletion can be performed on it.
+        
+        :param name: str
+          Name of the clean room.
+        
+        
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        self._api.do('DELETE', f'/api/2.0/clean-rooms/{name}', headers=headers)
+
+    def get(self, name: str) -> CleanRoom:
+        """Get a clean room.
+        
+        Get the details of a clean room given its name.
+        
+        :param name: str
+        
+        :returns: :class:`CleanRoom`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET', f'/api/2.0/clean-rooms/{name}', headers=headers)
+        return CleanRoom.from_dict(res)
+
+    def list(self,
+             *,
+             page_size: Optional[int] = None,
+             page_token: Optional[str] = None) -> Iterator[CleanRoom]:
+        """List clean rooms.
+        
+        Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are
+        returned.
+        
+        :param page_size: int (optional)
+          Maximum number of clean rooms to return (i.e., the page length). Defaults to 100.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`CleanRoom`
+        """
+
+        query = {}
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET', '/api/2.0/clean-rooms', query=query, headers=headers)
+            if 'clean_rooms' in json:
+                for v in json['clean_rooms']:
+                    yield CleanRoom.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+    def update(self, name: str, *, clean_room: Optional[CleanRoom] = None) -> CleanRoom:
+        """Update a clean room.
+        
+        Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM**
+        privilege, or be metastore admin.
+        
+        When the caller is a metastore admin, only the __owner__ field can be updated.
+        
+        :param name: str
+          Name of the clean room.
+        :param clean_room: :class:`CleanRoom` (optional)
+        
+        :returns: :class:`CleanRoom`
+        """
+        body = {}
+        if clean_room is not None: body['clean_room'] = clean_room.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH', f'/api/2.0/clean-rooms/{name}', body=body, headers=headers)
+        return CleanRoom.from_dict(res)
diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py
index 17567ab62..d8be32003 100755
--- a/databricks/sdk/service/compute.py
+++ b/databricks/sdk/service/compute.py
@@ -55,6 +55,16 @@ def as_dict(self) -> dict:
         if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AddInstanceProfile into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.is_meta_instance_profile is not None:
+            body['is_meta_instance_profile'] = self.is_meta_instance_profile
+        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AddInstanceProfile:
         """Deserializes the AddInstanceProfile from a dictionary."""
@@ -72,6 +82,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AddResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AddResponse:
         """Deserializes the AddResponse from a dictionary."""
@@ -90,6 +105,12 @@ def as_dict(self) -> dict:
         if self.destination is not None: body['destination'] = self.destination
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Adlsgen2Info into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination is not None: body['destination'] = self.destination
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Adlsgen2Info:
         """Deserializes the Adlsgen2Info from a dictionary."""
@@ -113,6 +134,13 @@ def as_dict(self) -> dict:
         if self.min_workers is not None: body['min_workers'] = self.min_workers
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AutoScale into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.max_workers is not None: body['max_workers'] = self.max_workers
+        if self.min_workers is not None: body['min_workers'] = self.min_workers
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AutoScale:
         """Deserializes the AutoScale from a dictionary."""
@@ -216,6 +244,22 @@ def as_dict(self) -> dict:
         if self.zone_id is not None: body['zone_id'] = self.zone_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AwsAttributes into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.availability is not None: body['availability'] = self.availability
+        if self.ebs_volume_count is not None: body['ebs_volume_count'] = self.ebs_volume_count
+        if self.ebs_volume_iops is not None: body['ebs_volume_iops'] = self.ebs_volume_iops
+        if self.ebs_volume_size is not None: body['ebs_volume_size'] = self.ebs_volume_size
+        if self.ebs_volume_throughput is not None: body['ebs_volume_throughput'] = self.ebs_volume_throughput
+        if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type
+        if self.first_on_demand is not None: body['first_on_demand'] = self.first_on_demand
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.spot_bid_price_percent is not None:
+            body['spot_bid_price_percent'] = self.spot_bid_price_percent
+        if self.zone_id is not None: body['zone_id'] = self.zone_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsAttributes:
         """Deserializes the AwsAttributes from a dictionary."""
@@ -275,6 +319,15 @@ def as_dict(self) -> dict:
         if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AzureAttributes into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.availability is not None: body['availability'] = self.availability
+        if self.first_on_demand is not None: body['first_on_demand'] = self.first_on_demand
+        if self.log_analytics_info: body['log_analytics_info'] = self.log_analytics_info
+        if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureAttributes:
         """Deserializes the AzureAttributes from a dictionary."""
@@ -310,6 +363,14 @@ def as_dict(self) -> dict:
         if self.context_id is not None: body['contextId'] = self.context_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelCommand into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['clusterId'] = self.cluster_id
+        if self.command_id is not None: body['commandId'] = self.command_id
+        if self.context_id is not None: body['contextId'] = self.context_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelCommand:
         """Deserializes the CancelCommand from a dictionary."""
@@ -326,6 +387,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelResponse:
         """Deserializes the CancelResponse from a dictionary."""
@@ -347,6 +413,13 @@ def as_dict(self) -> dict:
         if self.owner_username is not None: body['owner_username'] = self.owner_username
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ChangeClusterOwner into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.owner_username is not None: body['owner_username'] = self.owner_username
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ChangeClusterOwner:
         """Deserializes the ChangeClusterOwner from a dictionary."""
@@ -361,6 +434,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ChangeClusterOwnerResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ChangeClusterOwnerResponse:
         """Deserializes the ChangeClusterOwnerResponse from a dictionary."""
@@ -382,6 +460,13 @@ def as_dict(self) -> dict:
         if self.notebooks is not None: body['notebooks'] = self.notebooks
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClientsTypes into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.jobs is not None: body['jobs'] = self.jobs
+        if self.notebooks is not None: body['notebooks'] = self.notebooks
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClientsTypes:
         """Deserializes the ClientsTypes from a dictionary."""
@@ -399,6 +484,12 @@ def as_dict(self) -> dict:
         if self.source_cluster_id is not None: body['source_cluster_id'] = self.source_cluster_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CloneCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.source_cluster_id is not None: body['source_cluster_id'] = self.source_cluster_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CloneCluster:
         """Deserializes the CloneCluster from a dictionary."""
@@ -415,6 +506,12 @@ def as_dict(self) -> dict:
         if self.status: body['status'] = [v.value for v in self.status]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CloudProviderNodeInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.status: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CloudProviderNodeInfo:
         """Deserializes the CloudProviderNodeInfo from a dictionary."""
@@ -451,6 +548,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAccessControlRequest:
         """Deserializes the ClusterAccessControlRequest from a dictionary."""
@@ -488,6 +595,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAccessControlResponse:
         """Deserializes the ClusterAccessControlResponse from a dictionary."""
@@ -666,6 +784,38 @@ def as_dict(self) -> dict:
         if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterAttributes into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.autotermination_minutes is not None:
+            body['autotermination_minutes'] = self.autotermination_minutes
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
+        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode
+        if self.docker_image: body['docker_image'] = self.docker_image
+        if self.driver_instance_pool_id is not None:
+            body['driver_instance_pool_id'] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+        if self.enable_local_disk_encryption is not None:
+            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.init_scripts: body['init_scripts'] = self.init_scripts
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine
+        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
+        if self.spark_conf: body['spark_conf'] = self.spark_conf
+        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
+        if self.spark_version is not None: body['spark_version'] = self.spark_version
+        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+        if self.workload_type: body['workload_type'] = self.workload_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAttributes:
         """Deserializes the ClusterAttributes from a dictionary."""
@@ -716,6 +866,14 @@ def as_dict(self) -> dict:
         if self.violations: body['violations'] = self.violations
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterCompliance into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
+        if self.violations: body['violations'] = self.violations
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterCompliance:
         """Deserializes the ClusterCompliance from a dictionary."""
@@ -1008,6 +1166,59 @@ def as_dict(self) -> dict:
         if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.autoscale: body['autoscale'] = self.autoscale
+        if self.autotermination_minutes is not None:
+            body['autotermination_minutes'] = self.autotermination_minutes
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.cluster_cores is not None: body['cluster_cores'] = self.cluster_cores
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
+        if self.cluster_log_status: body['cluster_log_status'] = self.cluster_log_status
+        if self.cluster_memory_mb is not None: body['cluster_memory_mb'] = self.cluster_memory_mb
+        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
+        if self.cluster_source is not None: body['cluster_source'] = self.cluster_source
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode
+        if self.default_tags: body['default_tags'] = self.default_tags
+        if self.docker_image: body['docker_image'] = self.docker_image
+        if self.driver: body['driver'] = self.driver
+        if self.driver_instance_pool_id is not None:
+            body['driver_instance_pool_id'] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+        if self.enable_local_disk_encryption is not None:
+            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
+        if self.executors: body['executors'] = self.executors
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.init_scripts: body['init_scripts'] = self.init_scripts
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.jdbc_port is not None: body['jdbc_port'] = self.jdbc_port
+        if self.last_restarted_time is not None: body['last_restarted_time'] = self.last_restarted_time
+        if self.last_state_loss_time is not None: body['last_state_loss_time'] = self.last_state_loss_time
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine
+        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
+        if self.spark_conf: body['spark_conf'] = self.spark_conf
+        if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id
+        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
+        if self.spark_version is not None: body['spark_version'] = self.spark_version
+        if self.spec: body['spec'] = self.spec
+        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.state is not None: body['state'] = self.state
+        if self.state_message is not None: body['state_message'] = self.state_message
+        if self.terminated_time is not None: body['terminated_time'] = self.terminated_time
+        if self.termination_reason: body['termination_reason'] = self.termination_reason
+        if self.workload_type: body['workload_type'] = self.workload_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterDetails:
         """Deserializes the ClusterDetails from a dictionary."""
@@ -1086,6 +1297,16 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterEvent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.data_plane_event_details: body['data_plane_event_details'] = self.data_plane_event_details
+        if self.details: body['details'] = self.details
+        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterEvent:
         """Deserializes the ClusterEvent from a dictionary."""
@@ -1111,6 +1332,13 @@ def as_dict(self) -> dict:
         if self.library_statuses: body['library_statuses'] = [v.as_dict() for v in self.library_statuses]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterLibraryStatuses into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.library_statuses: body['library_statuses'] = self.library_statuses
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterLibraryStatuses:
         """Deserializes the ClusterLibraryStatuses from a dictionary."""
@@ -1137,6 +1365,13 @@ def as_dict(self) -> dict:
         if self.s3: body['s3'] = self.s3.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterLogConf into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dbfs: body['dbfs'] = self.dbfs
+        if self.s3: body['s3'] = self.s3
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterLogConf:
         """Deserializes the ClusterLogConf from a dictionary."""
@@ -1160,6 +1395,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPermission:
         """Deserializes the ClusterPermission from a dictionary."""
@@ -1193,6 +1436,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPermissions:
         """Deserializes the ClusterPermissions from a dictionary."""
@@ -1215,6 +1466,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPermissionsDescription:
         """Deserializes the ClusterPermissionsDescription from a dictionary."""
@@ -1237,6 +1495,13 @@ def as_dict(self) -> dict:
         if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPermissionsRequest:
         """Deserializes the ClusterPermissionsRequest from a dictionary."""
@@ -1268,6 +1533,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPolicyAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyAccessControlRequest:
         """Deserializes the ClusterPolicyAccessControlRequest from a dictionary."""
@@ -1305,6 +1580,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPolicyAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyAccessControlResponse:
         """Deserializes the ClusterPolicyAccessControlResponse from a dictionary."""
@@ -1332,6 +1618,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPolicyPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyPermission:
         """Deserializes the ClusterPolicyPermission from a dictionary."""
@@ -1363,6 +1657,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPolicyPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyPermissions:
         """Deserializes the ClusterPolicyPermissions from a dictionary."""
@@ -1386,6 +1688,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPolicyPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyPermissionsDescription:
         """Deserializes the ClusterPolicyPermissionsDescription from a dictionary."""
@@ -1408,6 +1717,13 @@ def as_dict(self) -> dict:
         if self.cluster_policy_id is not None: body['cluster_policy_id'] = self.cluster_policy_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPolicyPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.cluster_policy_id is not None: body['cluster_policy_id'] = self.cluster_policy_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyPermissionsRequest:
         """Deserializes the ClusterPolicyPermissionsRequest from a dictionary."""
@@ -1442,6 +1758,14 @@ def as_dict(self) -> dict:
         if self.previous_value is not None: body['previous_value'] = self.previous_value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterSettingsChange into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.field is not None: body['field'] = self.field
+        if self.new_value is not None: body['new_value'] = self.new_value
+        if self.previous_value is not None: body['previous_value'] = self.previous_value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterSettingsChange:
         """Deserializes the ClusterSettingsChange from a dictionary."""
@@ -1473,6 +1797,13 @@ def as_dict(self) -> dict:
         if self.num_workers is not None: body['num_workers'] = self.num_workers
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterSize into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.autoscale: body['autoscale'] = self.autoscale
+        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterSize:
         """Deserializes the ClusterSize from a dictionary."""
@@ -1682,6 +2013,42 @@ def as_dict(self) -> dict:
         if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apply_policy_default_values is not None:
+            body['apply_policy_default_values'] = self.apply_policy_default_values
+        if self.autoscale: body['autoscale'] = self.autoscale
+        if self.autotermination_minutes is not None:
+            body['autotermination_minutes'] = self.autotermination_minutes
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
+        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode
+        if self.docker_image: body['docker_image'] = self.docker_image
+        if self.driver_instance_pool_id is not None:
+            body['driver_instance_pool_id'] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+        if self.enable_local_disk_encryption is not None:
+            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.init_scripts: body['init_scripts'] = self.init_scripts
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine
+        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
+        if self.spark_conf: body['spark_conf'] = self.spark_conf
+        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
+        if self.spark_version is not None: body['spark_version'] = self.spark_version
+        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+        if self.workload_type: body['workload_type'] = self.workload_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterSpec:
         """Deserializes the ClusterSpec from a dictionary."""
@@ -1736,6 +2103,15 @@ def as_dict(self) -> dict:
         if self.language is not None: body['language'] = self.language.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Command into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['clusterId'] = self.cluster_id
+        if self.command is not None: body['command'] = self.command
+        if self.context_id is not None: body['contextId'] = self.context_id
+        if self.language is not None: body['language'] = self.language
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Command:
         """Deserializes the Command from a dictionary."""
@@ -1771,6 +2147,14 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CommandStatusResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.results: body['results'] = self.results
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CommandStatusResponse:
         """Deserializes the CommandStatusResponse from a dictionary."""
@@ -1799,6 +2183,13 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ContextStatusResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ContextStatusResponse:
         """Deserializes the ContextStatusResponse from a dictionary."""
@@ -2000,14 +2391,51 @@ def as_dict(self) -> dict:
         if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
         return body
 
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateCluster:
-        """Deserializes the CreateCluster from a dictionary."""
-        return cls(apply_policy_default_values=d.get('apply_policy_default_values', None),
-                   autoscale=_from_dict(d, 'autoscale', AutoScale),
-                   autotermination_minutes=d.get('autotermination_minutes', None),
-                   aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes),
-                   azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes),
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apply_policy_default_values is not None:
+            body['apply_policy_default_values'] = self.apply_policy_default_values
+        if self.autoscale: body['autoscale'] = self.autoscale
+        if self.autotermination_minutes is not None:
+            body['autotermination_minutes'] = self.autotermination_minutes
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.clone_from: body['clone_from'] = self.clone_from
+        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
+        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode
+        if self.docker_image: body['docker_image'] = self.docker_image
+        if self.driver_instance_pool_id is not None:
+            body['driver_instance_pool_id'] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+        if self.enable_local_disk_encryption is not None:
+            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.init_scripts: body['init_scripts'] = self.init_scripts
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine
+        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
+        if self.spark_conf: body['spark_conf'] = self.spark_conf
+        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
+        if self.spark_version is not None: body['spark_version'] = self.spark_version
+        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+        if self.workload_type: body['workload_type'] = self.workload_type
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateCluster:
+        """Deserializes the CreateCluster from a dictionary."""
+        return cls(apply_policy_default_values=d.get('apply_policy_default_values', None),
+                   autoscale=_from_dict(d, 'autoscale', AutoScale),
+                   autotermination_minutes=d.get('autotermination_minutes', None),
+                   aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes),
+                   azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes),
                    clone_from=_from_dict(d, 'clone_from', CloneCluster),
                    cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf),
                    cluster_name=d.get('cluster_name', None),
@@ -2043,6 +2471,12 @@ def as_dict(self) -> dict:
         if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateClusterResponse:
         """Deserializes the CreateClusterResponse from a dictionary."""
@@ -2063,6 +2497,13 @@ def as_dict(self) -> dict:
         if self.language is not None: body['language'] = self.language.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateContext into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['clusterId'] = self.cluster_id
+        if self.language is not None: body['language'] = self.language
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateContext:
         """Deserializes the CreateContext from a dictionary."""
@@ -2152,6 +2593,25 @@ def as_dict(self) -> dict:
             body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateInstancePool into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.disk_spec: body['disk_spec'] = self.disk_spec
+        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.idle_instance_autotermination_minutes is not None:
+            body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
+        if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name
+        if self.max_capacity is not None: body['max_capacity'] = self.max_capacity
+        if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.preloaded_docker_images: body['preloaded_docker_images'] = self.preloaded_docker_images
+        if self.preloaded_spark_versions: body['preloaded_spark_versions'] = self.preloaded_spark_versions
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateInstancePool:
         """Deserializes the CreateInstancePool from a dictionary."""
@@ -2181,6 +2641,12 @@ def as_dict(self) -> dict:
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateInstancePoolResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateInstancePoolResponse:
         """Deserializes the CreateInstancePoolResponse from a dictionary."""
@@ -2238,6 +2704,19 @@ def as_dict(self) -> dict:
         if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreatePolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.definition is not None: body['definition'] = self.definition
+        if self.description is not None: body['description'] = self.description
+        if self.libraries: body['libraries'] = self.libraries
+        if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user
+        if self.name is not None: body['name'] = self.name
+        if self.policy_family_definition_overrides is not None:
+            body['policy_family_definition_overrides'] = self.policy_family_definition_overrides
+        if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePolicy:
         """Deserializes the CreatePolicy from a dictionary."""
@@ -2261,6 +2740,12 @@ def as_dict(self) -> dict:
         if self.policy_id is not None: body['policy_id'] = self.policy_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreatePolicyResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePolicyResponse:
         """Deserializes the CreatePolicyResponse from a dictionary."""
@@ -2278,6 +2763,12 @@ def as_dict(self) -> dict:
         if self.script_id is not None: body['script_id'] = self.script_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.script_id is not None: body['script_id'] = self.script_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateResponse:
         """Deserializes the CreateResponse from a dictionary."""
@@ -2294,6 +2785,12 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Created into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Created:
         """Deserializes the Created from a dictionary."""
@@ -2323,6 +2820,15 @@ def as_dict(self) -> dict:
         if self.timestamp is not None: body['timestamp'] = self.timestamp
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DataPlaneEventDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.event_type is not None: body['event_type'] = self.event_type
+        if self.executor_failures is not None: body['executor_failures'] = self.executor_failures
+        if self.host_id is not None: body['host_id'] = self.host_id
+        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DataPlaneEventDetails:
         """Deserializes the DataPlaneEventDetails from a dictionary."""
@@ -2379,6 +2885,12 @@ def as_dict(self) -> dict:
         if self.destination is not None: body['destination'] = self.destination
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DbfsStorageInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination is not None: body['destination'] = self.destination
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DbfsStorageInfo:
         """Deserializes the DbfsStorageInfo from a dictionary."""
@@ -2396,6 +2908,12 @@ def as_dict(self) -> dict:
         if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteCluster:
         """Deserializes the DeleteCluster from a dictionary."""
@@ -2410,6 +2928,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteClusterResponse:
         """Deserializes the DeleteClusterResponse from a dictionary."""
@@ -2427,6 +2950,12 @@ def as_dict(self) -> dict:
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteInstancePool into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteInstancePool:
         """Deserializes the DeleteInstancePool from a dictionary."""
@@ -2441,6 +2970,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteInstancePoolResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteInstancePoolResponse:
         """Deserializes the DeleteInstancePoolResponse from a dictionary."""
@@ -2458,6 +2992,12 @@ def as_dict(self) -> dict:
         if self.policy_id is not None: body['policy_id'] = self.policy_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeletePolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeletePolicy:
         """Deserializes the DeletePolicy from a dictionary."""
@@ -2472,6 +3012,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeletePolicyResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeletePolicyResponse:
         """Deserializes the DeletePolicyResponse from a dictionary."""
@@ -2486,6 +3031,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -2505,6 +3055,13 @@ def as_dict(self) -> dict:
         if self.context_id is not None: body['contextId'] = self.context_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DestroyContext into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['clusterId'] = self.cluster_id
+        if self.context_id is not None: body['contextId'] = self.context_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DestroyContext:
         """Deserializes the DestroyContext from a dictionary."""
@@ -2519,6 +3076,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DestroyResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DestroyResponse:
         """Deserializes the DestroyResponse from a dictionary."""
@@ -2567,6 +3129,16 @@ def as_dict(self) -> dict:
         if self.disk_type: body['disk_type'] = self.disk_type.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DiskSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.disk_count is not None: body['disk_count'] = self.disk_count
+        if self.disk_iops is not None: body['disk_iops'] = self.disk_iops
+        if self.disk_size is not None: body['disk_size'] = self.disk_size
+        if self.disk_throughput is not None: body['disk_throughput'] = self.disk_throughput
+        if self.disk_type: body['disk_type'] = self.disk_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DiskSpec:
         """Deserializes the DiskSpec from a dictionary."""
@@ -2591,6 +3163,14 @@ def as_dict(self) -> dict:
         if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DiskType into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.azure_disk_volume_type is not None:
+            body['azure_disk_volume_type'] = self.azure_disk_volume_type
+        if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DiskType:
         """Deserializes the DiskType from a dictionary."""
@@ -2625,6 +3205,13 @@ def as_dict(self) -> dict:
         if self.username is not None: body['username'] = self.username
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DockerBasicAuth into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.password is not None: body['password'] = self.password
+        if self.username is not None: body['username'] = self.username
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DockerBasicAuth:
         """Deserializes the DockerBasicAuth from a dictionary."""
@@ -2645,6 +3232,13 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DockerImage into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.basic_auth: body['basic_auth'] = self.basic_auth
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DockerImage:
         """Deserializes the DockerImage from a dictionary."""
@@ -2852,6 +3446,43 @@ def as_dict(self) -> dict:
         if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apply_policy_default_values is not None:
+            body['apply_policy_default_values'] = self.apply_policy_default_values
+        if self.autoscale: body['autoscale'] = self.autoscale
+        if self.autotermination_minutes is not None:
+            body['autotermination_minutes'] = self.autotermination_minutes
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
+        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode
+        if self.docker_image: body['docker_image'] = self.docker_image
+        if self.driver_instance_pool_id is not None:
+            body['driver_instance_pool_id'] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+        if self.enable_local_disk_encryption is not None:
+            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.init_scripts: body['init_scripts'] = self.init_scripts
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine
+        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
+        if self.spark_conf: body['spark_conf'] = self.spark_conf
+        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
+        if self.spark_version is not None: body['spark_version'] = self.spark_version
+        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+        if self.workload_type: body['workload_type'] = self.workload_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditCluster:
         """Deserializes the EditCluster from a dictionary."""
@@ -2893,6 +3524,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditClusterResponse:
         """Deserializes the EditClusterResponse from a dictionary."""
@@ -2948,6 +3584,19 @@ def as_dict(self) -> dict:
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditInstancePool into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.idle_instance_autotermination_minutes is not None:
+            body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name
+        if self.max_capacity is not None: body['max_capacity'] = self.max_capacity
+        if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditInstancePool:
         """Deserializes the EditInstancePool from a dictionary."""
@@ -2968,6 +3617,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditInstancePoolResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditInstancePoolResponse:
         """Deserializes the EditInstancePoolResponse from a dictionary."""
@@ -3029,6 +3683,20 @@ def as_dict(self) -> dict:
         if self.policy_id is not None: body['policy_id'] = self.policy_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.definition is not None: body['definition'] = self.definition
+        if self.description is not None: body['description'] = self.description
+        if self.libraries: body['libraries'] = self.libraries
+        if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user
+        if self.name is not None: body['name'] = self.name
+        if self.policy_family_definition_overrides is not None:
+            body['policy_family_definition_overrides'] = self.policy_family_definition_overrides
+        if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditPolicy:
         """Deserializes the EditPolicy from a dictionary."""
@@ -3050,6 +3718,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditPolicyResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditPolicyResponse:
         """Deserializes the EditPolicyResponse from a dictionary."""
@@ -3064,6 +3737,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditResponse:
         """Deserializes the EditResponse from a dictionary."""
@@ -3086,6 +3764,13 @@ def as_dict(self) -> dict:
         if self.validate_only is not None: body['validate_only'] = self.validate_only
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EnforceClusterComplianceRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.validate_only is not None: body['validate_only'] = self.validate_only
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnforceClusterComplianceRequest:
         """Deserializes the EnforceClusterComplianceRequest from a dictionary."""
@@ -3109,6 +3794,13 @@ def as_dict(self) -> dict:
         if self.has_changes is not None: body['has_changes'] = self.has_changes
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EnforceClusterComplianceResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.changes: body['changes'] = self.changes
+        if self.has_changes is not None: body['has_changes'] = self.has_changes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnforceClusterComplianceResponse:
         """Deserializes the EnforceClusterComplianceResponse from a dictionary."""
@@ -3141,6 +3833,13 @@ def as_dict(self) -> dict:
         if self.dependencies: body['dependencies'] = [v for v in self.dependencies]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Environment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.client is not None: body['client'] = self.client
+        if self.dependencies: body['dependencies'] = self.dependencies
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Environment:
         """Deserializes the Environment from a dictionary."""
@@ -3238,6 +3937,32 @@ def as_dict(self) -> dict:
         if self.user is not None: body['user'] = self.user
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EventDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.attributes: body['attributes'] = self.attributes
+        if self.cause is not None: body['cause'] = self.cause
+        if self.cluster_size: body['cluster_size'] = self.cluster_size
+        if self.current_num_vcpus is not None: body['current_num_vcpus'] = self.current_num_vcpus
+        if self.current_num_workers is not None: body['current_num_workers'] = self.current_num_workers
+        if self.did_not_expand_reason is not None: body['did_not_expand_reason'] = self.did_not_expand_reason
+        if self.disk_size is not None: body['disk_size'] = self.disk_size
+        if self.driver_state_message is not None: body['driver_state_message'] = self.driver_state_message
+        if self.enable_termination_for_node_blocklisted is not None:
+            body['enable_termination_for_node_blocklisted'] = self.enable_termination_for_node_blocklisted
+        if self.free_space is not None: body['free_space'] = self.free_space
+        if self.init_scripts: body['init_scripts'] = self.init_scripts
+        if self.instance_id is not None: body['instance_id'] = self.instance_id
+        if self.job_run_name is not None: body['job_run_name'] = self.job_run_name
+        if self.previous_attributes: body['previous_attributes'] = self.previous_attributes
+        if self.previous_cluster_size: body['previous_cluster_size'] = self.previous_cluster_size
+        if self.previous_disk_size is not None: body['previous_disk_size'] = self.previous_disk_size
+        if self.reason: body['reason'] = self.reason
+        if self.target_num_vcpus is not None: body['target_num_vcpus'] = self.target_num_vcpus
+        if self.target_num_workers is not None: body['target_num_workers'] = self.target_num_workers
+        if self.user is not None: body['user'] = self.user
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EventDetails:
         """Deserializes the EventDetails from a dictionary."""
@@ -3348,6 +4073,19 @@ def as_dict(self) -> dict:
         if self.zone_id is not None: body['zone_id'] = self.zone_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GcpAttributes into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.availability is not None: body['availability'] = self.availability
+        if self.boot_disk_size is not None: body['boot_disk_size'] = self.boot_disk_size
+        if self.google_service_account is not None:
+            body['google_service_account'] = self.google_service_account
+        if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count
+        if self.use_preemptible_executors is not None:
+            body['use_preemptible_executors'] = self.use_preemptible_executors
+        if self.zone_id is not None: body['zone_id'] = self.zone_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GcpAttributes:
         """Deserializes the GcpAttributes from a dictionary."""
@@ -3379,6 +4117,12 @@ def as_dict(self) -> dict:
         if self.destination is not None: body['destination'] = self.destination
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GcsStorageInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination is not None: body['destination'] = self.destination
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GcsStorageInfo:
         """Deserializes the GcsStorageInfo from a dictionary."""
@@ -3403,6 +4147,13 @@ def as_dict(self) -> dict:
         if self.violations: body['violations'] = self.violations
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetClusterComplianceResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
+        if self.violations: body['violations'] = self.violations
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetClusterComplianceResponse:
         """Deserializes the GetClusterComplianceResponse from a dictionary."""
@@ -3420,6 +4171,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetClusterPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetClusterPermissionLevelsResponse:
         """Deserializes the GetClusterPermissionLevelsResponse from a dictionary."""
@@ -3437,6 +4194,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetClusterPolicyPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetClusterPolicyPermissionLevelsResponse:
         """Deserializes the GetClusterPolicyPermissionLevelsResponse from a dictionary."""
@@ -3482,6 +4245,18 @@ def as_dict(self) -> dict:
         if self.start_time is not None: body['start_time'] = self.start_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetEvents into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.end_time is not None: body['end_time'] = self.end_time
+        if self.event_types: body['event_types'] = self.event_types
+        if self.limit is not None: body['limit'] = self.limit
+        if self.offset is not None: body['offset'] = self.offset
+        if self.order is not None: body['order'] = self.order
+        if self.start_time is not None: body['start_time'] = self.start_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetEvents:
         """Deserializes the GetEvents from a dictionary."""
@@ -3521,6 +4296,14 @@ def as_dict(self) -> dict:
         if self.total_count is not None: body['total_count'] = self.total_count
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetEventsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.events: body['events'] = self.events
+        if self.next_page: body['next_page'] = self.next_page
+        if self.total_count is not None: body['total_count'] = self.total_count
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetEventsResponse:
         """Deserializes the GetEventsResponse from a dictionary."""
@@ -3640,6 +4423,30 @@ def as_dict(self) -> dict:
         if self.status: body['status'] = self.status.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetInstancePool into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.default_tags: body['default_tags'] = self.default_tags
+        if self.disk_spec: body['disk_spec'] = self.disk_spec
+        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.idle_instance_autotermination_minutes is not None:
+            body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name
+        if self.max_capacity is not None: body['max_capacity'] = self.max_capacity
+        if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.preloaded_docker_images: body['preloaded_docker_images'] = self.preloaded_docker_images
+        if self.preloaded_spark_versions: body['preloaded_spark_versions'] = self.preloaded_spark_versions
+        if self.state is not None: body['state'] = self.state
+        if self.stats: body['stats'] = self.stats
+        if self.status: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetInstancePool:
         """Deserializes the GetInstancePool from a dictionary."""
@@ -3674,6 +4481,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetInstancePoolPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetInstancePoolPermissionLevelsResponse:
         """Deserializes the GetInstancePoolPermissionLevelsResponse from a dictionary."""
@@ -3692,6 +4505,12 @@ def as_dict(self) -> dict:
         if self.versions: body['versions'] = [v.as_dict() for v in self.versions]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetSparkVersionsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.versions: body['versions'] = self.versions
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetSparkVersionsResponse:
         """Deserializes the GetSparkVersionsResponse from a dictionary."""
@@ -3729,6 +4548,15 @@ def as_dict(self) -> dict:
         if self.script is not None: body['script'] = self.script
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GlobalInitScriptCreateRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.name is not None: body['name'] = self.name
+        if self.position is not None: body['position'] = self.position
+        if self.script is not None: body['script'] = self.script
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GlobalInitScriptCreateRequest:
         """Deserializes the GlobalInitScriptCreateRequest from a dictionary."""
@@ -3778,6 +4606,19 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GlobalInitScriptDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.name is not None: body['name'] = self.name
+        if self.position is not None: body['position'] = self.position
+        if self.script_id is not None: body['script_id'] = self.script_id
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GlobalInitScriptDetails:
         """Deserializes the GlobalInitScriptDetails from a dictionary."""
@@ -3835,6 +4676,20 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GlobalInitScriptDetailsWithContent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.name is not None: body['name'] = self.name
+        if self.position is not None: body['position'] = self.position
+        if self.script is not None: body['script'] = self.script
+        if self.script_id is not None: body['script_id'] = self.script_id
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GlobalInitScriptDetailsWithContent:
         """Deserializes the GlobalInitScriptDetailsWithContent from a dictionary."""
@@ -3884,6 +4739,16 @@ def as_dict(self) -> dict:
         if self.script_id is not None: body['script_id'] = self.script_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GlobalInitScriptUpdateRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.name is not None: body['name'] = self.name
+        if self.position is not None: body['position'] = self.position
+        if self.script is not None: body['script'] = self.script
+        if self.script_id is not None: body['script_id'] = self.script_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GlobalInitScriptUpdateRequest:
         """Deserializes the GlobalInitScriptUpdateRequest from a dictionary."""
@@ -3913,6 +4778,14 @@ def as_dict(self) -> dict:
         if self.reported_for_node is not None: body['reported_for_node'] = self.reported_for_node
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InitScriptEventDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster: body['cluster'] = self.cluster
+        if self.global_: body['global'] = self.global_
+        if self.reported_for_node is not None: body['reported_for_node'] = self.reported_for_node
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InitScriptEventDetails:
         """Deserializes the InitScriptEventDetails from a dictionary."""
@@ -3941,6 +4814,15 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InitScriptExecutionDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.error_message is not None: body['error_message'] = self.error_message
+        if self.execution_duration_seconds is not None:
+            body['execution_duration_seconds'] = self.execution_duration_seconds
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InitScriptExecutionDetails:
         """Deserializes the InitScriptExecutionDetails from a dictionary."""
@@ -4003,6 +4885,18 @@ def as_dict(self) -> dict:
         if self.workspace: body['workspace'] = self.workspace.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InitScriptInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.abfss: body['abfss'] = self.abfss
+        if self.dbfs: body['dbfs'] = self.dbfs
+        if self.file: body['file'] = self.file
+        if self.gcs: body['gcs'] = self.gcs
+        if self.s3: body['s3'] = self.s3
+        if self.volumes: body['volumes'] = self.volumes
+        if self.workspace: body['workspace'] = self.workspace
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InitScriptInfo:
         """Deserializes the InitScriptInfo from a dictionary."""
@@ -4030,6 +4924,13 @@ def as_dict(self) -> dict:
         if self.script: body['script'] = self.script.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InitScriptInfoAndExecutionDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.execution_details: body['execution_details'] = self.execution_details
+        if self.script: body['script'] = self.script
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InitScriptInfoAndExecutionDetails:
         """Deserializes the InitScriptInfoAndExecutionDetails from a dictionary."""
@@ -4052,6 +4953,13 @@ def as_dict(self) -> dict:
         if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstallLibraries into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.libraries: body['libraries'] = self.libraries
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstallLibraries:
         """Deserializes the InstallLibraries from a dictionary."""
@@ -4066,6 +4974,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstallLibrariesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstallLibrariesResponse:
         """Deserializes the InstallLibrariesResponse from a dictionary."""
@@ -4096,6 +5009,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolAccessControlRequest:
         """Deserializes the InstancePoolAccessControlRequest from a dictionary."""
@@ -4133,6 +5056,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolAccessControlResponse:
         """Deserializes the InstancePoolAccessControlResponse from a dictionary."""
@@ -4254,6 +5188,30 @@ def as_dict(self) -> dict:
         if self.status: body['status'] = self.status.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolAndStats into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.default_tags: body['default_tags'] = self.default_tags
+        if self.disk_spec: body['disk_spec'] = self.disk_spec
+        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.idle_instance_autotermination_minutes is not None:
+            body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name
+        if self.max_capacity is not None: body['max_capacity'] = self.max_capacity
+        if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.preloaded_docker_images: body['preloaded_docker_images'] = self.preloaded_docker_images
+        if self.preloaded_spark_versions: body['preloaded_spark_versions'] = self.preloaded_spark_versions
+        if self.state is not None: body['state'] = self.state
+        if self.stats: body['stats'] = self.stats
+        if self.status: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolAndStats:
         """Deserializes the InstancePoolAndStats from a dictionary."""
@@ -4313,6 +5271,15 @@ def as_dict(self) -> dict:
         if self.zone_id is not None: body['zone_id'] = self.zone_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolAwsAttributes into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.availability is not None: body['availability'] = self.availability
+        if self.spot_bid_price_percent is not None:
+            body['spot_bid_price_percent'] = self.spot_bid_price_percent
+        if self.zone_id is not None: body['zone_id'] = self.zone_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolAwsAttributes:
         """Deserializes the InstancePoolAwsAttributes from a dictionary."""
@@ -4348,6 +5315,13 @@ def as_dict(self) -> dict:
         if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolAzureAttributes into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.availability is not None: body['availability'] = self.availability
+        if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolAzureAttributes:
         """Deserializes the InstancePoolAzureAttributes from a dictionary."""
@@ -4399,6 +5373,14 @@ def as_dict(self) -> dict:
         if self.zone_id is not None: body['zone_id'] = self.zone_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolGcpAttributes into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.gcp_availability is not None: body['gcp_availability'] = self.gcp_availability
+        if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count
+        if self.zone_id is not None: body['zone_id'] = self.zone_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolGcpAttributes:
         """Deserializes the InstancePoolGcpAttributes from a dictionary."""
@@ -4424,6 +5406,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolPermission:
         """Deserializes the InstancePoolPermission from a dictionary."""
@@ -4456,6 +5446,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolPermissions:
         """Deserializes the InstancePoolPermissions from a dictionary."""
@@ -4479,6 +5477,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolPermissionsDescription:
         """Deserializes the InstancePoolPermissionsDescription from a dictionary."""
@@ -4501,6 +5506,13 @@ def as_dict(self) -> dict:
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolPermissionsRequest:
         """Deserializes the InstancePoolPermissionsRequest from a dictionary."""
@@ -4540,6 +5552,15 @@ def as_dict(self) -> dict:
         if self.used_count is not None: body['used_count'] = self.used_count
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolStats into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.idle_count is not None: body['idle_count'] = self.idle_count
+        if self.pending_idle_count is not None: body['pending_idle_count'] = self.pending_idle_count
+        if self.pending_used_count is not None: body['pending_used_count'] = self.pending_used_count
+        if self.used_count is not None: body['used_count'] = self.used_count
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolStats:
         """Deserializes the InstancePoolStats from a dictionary."""
@@ -4563,6 +5584,12 @@ def as_dict(self) -> dict:
             body['pending_instance_errors'] = [v.as_dict() for v in self.pending_instance_errors]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.pending_instance_errors: body['pending_instance_errors'] = self.pending_instance_errors
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolStatus:
         """Deserializes the InstancePoolStatus from a dictionary."""
@@ -4598,6 +5625,15 @@ def as_dict(self) -> dict:
             body['is_meta_instance_profile'] = self.is_meta_instance_profile
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstanceProfile into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.is_meta_instance_profile is not None:
+            body['is_meta_instance_profile'] = self.is_meta_instance_profile
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstanceProfile:
         """Deserializes the InstanceProfile from a dictionary."""
@@ -4660,6 +5696,18 @@ def as_dict(self) -> dict:
         if self.whl is not None: body['whl'] = self.whl
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Library into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cran: body['cran'] = self.cran
+        if self.egg is not None: body['egg'] = self.egg
+        if self.jar is not None: body['jar'] = self.jar
+        if self.maven: body['maven'] = self.maven
+        if self.pypi: body['pypi'] = self.pypi
+        if self.requirements is not None: body['requirements'] = self.requirements
+        if self.whl is not None: body['whl'] = self.whl
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Library:
         """Deserializes the Library from a dictionary."""
@@ -4698,6 +5746,16 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LibraryFullStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_library_for_all_clusters is not None:
+            body['is_library_for_all_clusters'] = self.is_library_for_all_clusters
+        if self.library: body['library'] = self.library
+        if self.messages: body['messages'] = self.messages
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LibraryFullStatus:
         """Deserializes the LibraryFullStatus from a dictionary."""
@@ -4731,6 +5789,12 @@ def as_dict(self) -> dict:
         if self.statuses: body['statuses'] = [v.as_dict() for v in self.statuses]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAllClusterLibraryStatusesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.statuses: body['statuses'] = self.statuses
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAllClusterLibraryStatusesResponse:
         """Deserializes the ListAllClusterLibraryStatusesResponse from a dictionary."""
@@ -4752,6 +5816,13 @@ def as_dict(self) -> dict:
         if self.zones: body['zones'] = [v for v in self.zones]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAvailableZonesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.default_zone is not None: body['default_zone'] = self.default_zone
+        if self.zones: body['zones'] = self.zones
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAvailableZonesResponse:
         """Deserializes the ListAvailableZonesResponse from a dictionary."""
@@ -4779,6 +5850,14 @@ def as_dict(self) -> dict:
         if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListClusterCompliancesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.clusters: body['clusters'] = self.clusters
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListClusterCompliancesResponse:
         """Deserializes the ListClusterCompliancesResponse from a dictionary."""
@@ -4810,6 +5889,15 @@ def as_dict(self) -> dict:
         if self.policy_id is not None: body['policy_id'] = self.policy_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListClustersFilterBy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_sources: body['cluster_sources'] = self.cluster_sources
+        if self.cluster_states: body['cluster_states'] = self.cluster_states
+        if self.is_pinned is not None: body['is_pinned'] = self.is_pinned
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListClustersFilterBy:
         """Deserializes the ListClustersFilterBy from a dictionary."""
@@ -4840,6 +5928,14 @@ def as_dict(self) -> dict:
         if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListClustersResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.clusters: body['clusters'] = self.clusters
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListClustersResponse:
         """Deserializes the ListClustersResponse from a dictionary."""
@@ -4864,6 +5960,13 @@ def as_dict(self) -> dict:
         if self.field is not None: body['field'] = self.field.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListClustersSortBy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.direction is not None: body['direction'] = self.direction
+        if self.field is not None: body['field'] = self.field
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListClustersSortBy:
         """Deserializes the ListClustersSortBy from a dictionary."""
@@ -4896,6 +5999,12 @@ def as_dict(self) -> dict:
         if self.scripts: body['scripts'] = [v.as_dict() for v in self.scripts]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListGlobalInitScriptsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.scripts: body['scripts'] = self.scripts
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListGlobalInitScriptsResponse:
         """Deserializes the ListGlobalInitScriptsResponse from a dictionary."""
@@ -4912,6 +6021,12 @@ def as_dict(self) -> dict:
         if self.instance_pools: body['instance_pools'] = [v.as_dict() for v in self.instance_pools]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListInstancePools into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.instance_pools: body['instance_pools'] = self.instance_pools
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListInstancePools:
         """Deserializes the ListInstancePools from a dictionary."""
@@ -4929,6 +6044,12 @@ def as_dict(self) -> dict:
         if self.instance_profiles: body['instance_profiles'] = [v.as_dict() for v in self.instance_profiles]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListInstanceProfilesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.instance_profiles: body['instance_profiles'] = self.instance_profiles
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListInstanceProfilesResponse:
         """Deserializes the ListInstanceProfilesResponse from a dictionary."""
@@ -4946,6 +6067,12 @@ def as_dict(self) -> dict:
         if self.node_types: body['node_types'] = [v.as_dict() for v in self.node_types]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListNodeTypesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.node_types: body['node_types'] = self.node_types
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListNodeTypesResponse:
         """Deserializes the ListNodeTypesResponse from a dictionary."""
@@ -4963,6 +6090,12 @@ def as_dict(self) -> dict:
         if self.policies: body['policies'] = [v.as_dict() for v in self.policies]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListPoliciesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.policies: body['policies'] = self.policies
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListPoliciesResponse:
         """Deserializes the ListPoliciesResponse from a dictionary."""
@@ -4985,6 +6118,13 @@ def as_dict(self) -> dict:
         if self.policy_families: body['policy_families'] = [v.as_dict() for v in self.policy_families]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListPolicyFamiliesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.policy_families: body['policy_families'] = self.policy_families
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListPolicyFamiliesResponse:
         """Deserializes the ListPolicyFamiliesResponse from a dictionary."""
@@ -5016,6 +6156,12 @@ def as_dict(self) -> dict:
         if self.destination is not None: body['destination'] = self.destination
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LocalFileInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination is not None: body['destination'] = self.destination
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LocalFileInfo:
         """Deserializes the LocalFileInfo from a dictionary."""
@@ -5039,6 +6185,15 @@ def as_dict(self) -> dict:
             body['log_analytics_workspace_id'] = self.log_analytics_workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogAnalyticsInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.log_analytics_primary_key is not None:
+            body['log_analytics_primary_key'] = self.log_analytics_primary_key
+        if self.log_analytics_workspace_id is not None:
+            body['log_analytics_workspace_id'] = self.log_analytics_workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogAnalyticsInfo:
         """Deserializes the LogAnalyticsInfo from a dictionary."""
@@ -5063,6 +6218,13 @@ def as_dict(self) -> dict:
         if self.last_exception is not None: body['last_exception'] = self.last_exception
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogSyncStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.last_attempted is not None: body['last_attempted'] = self.last_attempted
+        if self.last_exception is not None: body['last_exception'] = self.last_exception
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogSyncStatus:
         """Deserializes the LogSyncStatus from a dictionary."""
@@ -5092,6 +6254,14 @@ def as_dict(self) -> dict:
         if self.repo is not None: body['repo'] = self.repo
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MavenLibrary into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.coordinates is not None: body['coordinates'] = self.coordinates
+        if self.exclusions: body['exclusions'] = self.exclusions
+        if self.repo is not None: body['repo'] = self.repo
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MavenLibrary:
         """Deserializes the MavenLibrary from a dictionary."""
@@ -5123,6 +6293,17 @@ def as_dict(self) -> dict:
         if self.local_nvme_disks is not None: body['local_nvme_disks'] = self.local_nvme_disks
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NodeInstanceType into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id
+        if self.local_disk_size_gb is not None: body['local_disk_size_gb'] = self.local_disk_size_gb
+        if self.local_disks is not None: body['local_disks'] = self.local_disks
+        if self.local_nvme_disk_size_gb is not None:
+            body['local_nvme_disk_size_gb'] = self.local_nvme_disk_size_gb
+        if self.local_nvme_disks is not None: body['local_nvme_disks'] = self.local_nvme_disks
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NodeInstanceType:
         """Deserializes the NodeInstanceType from a dictionary."""
@@ -5217,6 +6398,34 @@ def as_dict(self) -> dict:
         if self.supports_elastic_disk is not None: body['supports_elastic_disk'] = self.supports_elastic_disk
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NodeType into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.category is not None: body['category'] = self.category
+        if self.description is not None: body['description'] = self.description
+        if self.display_order is not None: body['display_order'] = self.display_order
+        if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id
+        if self.is_deprecated is not None: body['is_deprecated'] = self.is_deprecated
+        if self.is_encrypted_in_transit is not None:
+            body['is_encrypted_in_transit'] = self.is_encrypted_in_transit
+        if self.is_graviton is not None: body['is_graviton'] = self.is_graviton
+        if self.is_hidden is not None: body['is_hidden'] = self.is_hidden
+        if self.is_io_cache_enabled is not None: body['is_io_cache_enabled'] = self.is_io_cache_enabled
+        if self.memory_mb is not None: body['memory_mb'] = self.memory_mb
+        if self.node_info: body['node_info'] = self.node_info
+        if self.node_instance_type: body['node_instance_type'] = self.node_instance_type
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.num_cores is not None: body['num_cores'] = self.num_cores
+        if self.num_gpus is not None: body['num_gpus'] = self.num_gpus
+        if self.photon_driver_capable is not None: body['photon_driver_capable'] = self.photon_driver_capable
+        if self.photon_worker_capable is not None: body['photon_worker_capable'] = self.photon_worker_capable
+        if self.support_cluster_tags is not None: body['support_cluster_tags'] = self.support_cluster_tags
+        if self.support_ebs_volumes is not None: body['support_ebs_volumes'] = self.support_ebs_volumes
+        if self.support_port_forwarding is not None:
+            body['support_port_forwarding'] = self.support_port_forwarding
+        if self.supports_elastic_disk is not None: body['supports_elastic_disk'] = self.supports_elastic_disk
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NodeType:
         """Deserializes the NodeType from a dictionary."""
@@ -5256,6 +6465,13 @@ def as_dict(self) -> dict:
         if self.message is not None: body['message'] = self.message
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PendingInstanceError into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.instance_id is not None: body['instance_id'] = self.instance_id
+        if self.message is not None: body['message'] = self.message
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PendingInstanceError:
         """Deserializes the PendingInstanceError from a dictionary."""
@@ -5273,6 +6489,12 @@ def as_dict(self) -> dict:
         if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PermanentDeleteCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermanentDeleteCluster:
         """Deserializes the PermanentDeleteCluster from a dictionary."""
@@ -5287,6 +6509,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PermanentDeleteClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermanentDeleteClusterResponse:
         """Deserializes the PermanentDeleteClusterResponse from a dictionary."""
@@ -5304,6 +6531,12 @@ def as_dict(self) -> dict:
         if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PinCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PinCluster:
         """Deserializes the PinCluster from a dictionary."""
@@ -5318,6 +6551,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PinClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PinClusterResponse:
         """Deserializes the PinClusterResponse from a dictionary."""
@@ -5395,6 +6633,23 @@ def as_dict(self) -> dict:
         if self.policy_id is not None: body['policy_id'] = self.policy_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Policy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at_timestamp is not None: body['created_at_timestamp'] = self.created_at_timestamp
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.definition is not None: body['definition'] = self.definition
+        if self.description is not None: body['description'] = self.description
+        if self.is_default is not None: body['is_default'] = self.is_default
+        if self.libraries: body['libraries'] = self.libraries
+        if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user
+        if self.name is not None: body['name'] = self.name
+        if self.policy_family_definition_overrides is not None:
+            body['policy_family_definition_overrides'] = self.policy_family_definition_overrides
+        if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Policy:
         """Deserializes the Policy from a dictionary."""
@@ -5436,6 +6691,15 @@ def as_dict(self) -> dict:
         if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PolicyFamily into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.definition is not None: body['definition'] = self.definition
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PolicyFamily:
         """Deserializes the PolicyFamily from a dictionary."""
@@ -5461,6 +6725,13 @@ def as_dict(self) -> dict:
         if self.repo is not None: body['repo'] = self.repo
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PythonPyPiLibrary into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.package is not None: body['package'] = self.package
+        if self.repo is not None: body['repo'] = self.repo
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PythonPyPiLibrary:
         """Deserializes the PythonPyPiLibrary from a dictionary."""
@@ -5482,6 +6753,13 @@ def as_dict(self) -> dict:
         if self.repo is not None: body['repo'] = self.repo
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RCranLibrary into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.package is not None: body['package'] = self.package
+        if self.repo is not None: body['repo'] = self.repo
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RCranLibrary:
         """Deserializes the RCranLibrary from a dictionary."""
@@ -5499,6 +6777,12 @@ def as_dict(self) -> dict:
         if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RemoveInstanceProfile into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RemoveInstanceProfile:
         """Deserializes the RemoveInstanceProfile from a dictionary."""
@@ -5513,6 +6797,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RemoveResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RemoveResponse:
         """Deserializes the RemoveResponse from a dictionary."""
@@ -5546,6 +6835,14 @@ def as_dict(self) -> dict:
         if self.num_workers is not None: body['num_workers'] = self.num_workers
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResizeCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.autoscale: body['autoscale'] = self.autoscale
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResizeCluster:
         """Deserializes the ResizeCluster from a dictionary."""
@@ -5562,6 +6859,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResizeClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResizeClusterResponse:
         """Deserializes the ResizeClusterResponse from a dictionary."""
@@ -5583,6 +6885,13 @@ def as_dict(self) -> dict:
         if self.restart_user is not None: body['restart_user'] = self.restart_user
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestartCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.restart_user is not None: body['restart_user'] = self.restart_user
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestartCluster:
         """Deserializes the RestartCluster from a dictionary."""
@@ -5597,6 +6906,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestartClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestartClusterResponse:
         """Deserializes the RestartClusterResponse from a dictionary."""
@@ -5656,6 +6970,21 @@ def as_dict(self) -> dict:
         if self.truncated is not None: body['truncated'] = self.truncated
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Results into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cause is not None: body['cause'] = self.cause
+        if self.data: body['data'] = self.data
+        if self.file_name is not None: body['fileName'] = self.file_name
+        if self.file_names: body['fileNames'] = self.file_names
+        if self.is_json_schema is not None: body['isJsonSchema'] = self.is_json_schema
+        if self.pos is not None: body['pos'] = self.pos
+        if self.result_type is not None: body['resultType'] = self.result_type
+        if self.schema: body['schema'] = self.schema
+        if self.summary is not None: body['summary'] = self.summary
+        if self.truncated is not None: body['truncated'] = self.truncated
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Results:
         """Deserializes the Results from a dictionary."""
@@ -5732,6 +7061,18 @@ def as_dict(self) -> dict:
         if self.region is not None: body['region'] = self.region
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the S3StorageInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.canned_acl is not None: body['canned_acl'] = self.canned_acl
+        if self.destination is not None: body['destination'] = self.destination
+        if self.enable_encryption is not None: body['enable_encryption'] = self.enable_encryption
+        if self.encryption_type is not None: body['encryption_type'] = self.encryption_type
+        if self.endpoint is not None: body['endpoint'] = self.endpoint
+        if self.kms_key is not None: body['kms_key'] = self.kms_key
+        if self.region is not None: body['region'] = self.region
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> S3StorageInfo:
         """Deserializes the S3StorageInfo from a dictionary."""
@@ -5788,6 +7129,18 @@ def as_dict(self) -> dict:
         if self.start_timestamp is not None: body['start_timestamp'] = self.start_timestamp
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SparkNode into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.host_private_ip is not None: body['host_private_ip'] = self.host_private_ip
+        if self.instance_id is not None: body['instance_id'] = self.instance_id
+        if self.node_aws_attributes: body['node_aws_attributes'] = self.node_aws_attributes
+        if self.node_id is not None: body['node_id'] = self.node_id
+        if self.private_ip is not None: body['private_ip'] = self.private_ip
+        if self.public_dns is not None: body['public_dns'] = self.public_dns
+        if self.start_timestamp is not None: body['start_timestamp'] = self.start_timestamp
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparkNode:
         """Deserializes the SparkNode from a dictionary."""
@@ -5811,6 +7164,12 @@ def as_dict(self) -> dict:
         if self.is_spot is not None: body['is_spot'] = self.is_spot
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SparkNodeAwsAttributes into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_spot is not None: body['is_spot'] = self.is_spot
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparkNodeAwsAttributes:
         """Deserializes the SparkNodeAwsAttributes from a dictionary."""
@@ -5835,6 +7194,13 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SparkVersion into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparkVersion:
         """Deserializes the SparkVersion from a dictionary."""
@@ -5852,6 +7218,12 @@ def as_dict(self) -> dict:
         if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StartCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StartCluster:
         """Deserializes the StartCluster from a dictionary."""
@@ -5866,6 +7238,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StartClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StartClusterResponse:
         """Deserializes the StartClusterResponse from a dictionary."""
@@ -5904,6 +7281,14 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TerminationReason into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.code is not None: body['code'] = self.code
+        if self.parameters: body['parameters'] = self.parameters
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TerminationReason:
         """Deserializes the TerminationReason from a dictionary."""
@@ -6020,6 +7405,13 @@ def as_dict(self) -> dict:
         if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UninstallLibraries into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.libraries: body['libraries'] = self.libraries
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UninstallLibraries:
         """Deserializes the UninstallLibraries from a dictionary."""
@@ -6034,6 +7426,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UninstallLibrariesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UninstallLibrariesResponse:
         """Deserializes the UninstallLibrariesResponse from a dictionary."""
@@ -6051,6 +7448,12 @@ def as_dict(self) -> dict:
         if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UnpinCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UnpinCluster:
         """Deserializes the UnpinCluster from a dictionary."""
@@ -6065,6 +7468,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UnpinClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UnpinClusterResponse:
         """Deserializes the UnpinClusterResponse from a dictionary."""
@@ -6093,6 +7501,14 @@ def as_dict(self) -> dict:
         if self.update_mask is not None: body['update_mask'] = self.update_mask
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster: body['cluster'] = self.cluster
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.update_mask is not None: body['update_mask'] = self.update_mask
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCluster:
         """Deserializes the UpdateCluster from a dictionary."""
@@ -6285,6 +7701,40 @@ def as_dict(self) -> dict:
         if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateClusterResource into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.autoscale: body['autoscale'] = self.autoscale
+        if self.autotermination_minutes is not None:
+            body['autotermination_minutes'] = self.autotermination_minutes
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
+        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode
+        if self.docker_image: body['docker_image'] = self.docker_image
+        if self.driver_instance_pool_id is not None:
+            body['driver_instance_pool_id'] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+        if self.enable_local_disk_encryption is not None:
+            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.init_scripts: body['init_scripts'] = self.init_scripts
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine
+        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
+        if self.spark_conf: body['spark_conf'] = self.spark_conf
+        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
+        if self.spark_version is not None: body['spark_version'] = self.spark_version
+        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+        if self.workload_type: body['workload_type'] = self.workload_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateClusterResource:
         """Deserializes the UpdateClusterResource from a dictionary."""
@@ -6324,6 +7774,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateClusterResponse:
         """Deserializes the UpdateClusterResponse from a dictionary."""
@@ -6338,6 +7793,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
         """Deserializes the UpdateResponse from a dictionary."""
@@ -6355,6 +7815,12 @@ def as_dict(self) -> dict:
         if self.destination is not None: body['destination'] = self.destination
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the VolumesStorageInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination is not None: body['destination'] = self.destination
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> VolumesStorageInfo:
         """Deserializes the VolumesStorageInfo from a dictionary."""
@@ -6372,6 +7838,12 @@ def as_dict(self) -> dict:
         if self.clients: body['clients'] = self.clients.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkloadType into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.clients: body['clients'] = self.clients
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkloadType:
         """Deserializes the WorkloadType from a dictionary."""
@@ -6389,6 +7861,12 @@ def as_dict(self) -> dict:
         if self.destination is not None: body['destination'] = self.destination
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspaceStorageInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination is not None: body['destination'] = self.destination
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceStorageInfo:
         """Deserializes the WorkspaceStorageInfo from a dictionary."""
diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py
index 5f9fe2c2c..da908cb2d 100755
--- a/databricks/sdk/service/dashboards.py
+++ b/databricks/sdk/service/dashboards.py
@@ -42,6 +42,14 @@ def as_dict(self) -> dict:
         if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CronSchedule into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.quartz_cron_expression is not None:
+            body['quartz_cron_expression'] = self.quartz_cron_expression
+        if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CronSchedule:
         """Deserializes the CronSchedule from a dictionary."""
@@ -105,6 +113,21 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Dashboard into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.etag is not None: body['etag'] = self.etag
+        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state
+        if self.parent_path is not None: body['parent_path'] = self.parent_path
+        if self.path is not None: body['path'] = self.path
+        if self.serialized_dashboard is not None: body['serialized_dashboard'] = self.serialized_dashboard
+        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Dashboard:
         """Deserializes the Dashboard from a dictionary."""
@@ -125,6 +148,27 @@ class DashboardView(Enum):
     DASHBOARD_VIEW_BASIC = 'DASHBOARD_VIEW_BASIC'
 
 
+class DataType(Enum):
+
+    DATA_TYPE_ARRAY = 'DATA_TYPE_ARRAY'
+    DATA_TYPE_BIG_INT = 'DATA_TYPE_BIG_INT'
+    DATA_TYPE_BINARY = 'DATA_TYPE_BINARY'
+    DATA_TYPE_BOOLEAN = 'DATA_TYPE_BOOLEAN'
+    DATA_TYPE_DATE = 'DATA_TYPE_DATE'
+    DATA_TYPE_DECIMAL = 'DATA_TYPE_DECIMAL'
+    DATA_TYPE_DOUBLE = 'DATA_TYPE_DOUBLE'
+    DATA_TYPE_FLOAT = 'DATA_TYPE_FLOAT'
+    DATA_TYPE_INT = 'DATA_TYPE_INT'
+    DATA_TYPE_INTERVAL = 'DATA_TYPE_INTERVAL'
+    DATA_TYPE_MAP = 'DATA_TYPE_MAP'
+    DATA_TYPE_SMALL_INT = 'DATA_TYPE_SMALL_INT'
+    DATA_TYPE_STRING = 'DATA_TYPE_STRING'
+    DATA_TYPE_STRUCT = 'DATA_TYPE_STRUCT'
+    DATA_TYPE_TIMESTAMP = 'DATA_TYPE_TIMESTAMP'
+    DATA_TYPE_TINY_INT = 'DATA_TYPE_TINY_INT'
+    DATA_TYPE_VOID = 'DATA_TYPE_VOID'
+
+
 @dataclass
 class DeleteScheduleResponse:
 
@@ -133,6 +177,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteScheduleResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteScheduleResponse:
         """Deserializes the DeleteScheduleResponse from a dictionary."""
@@ -147,6 +196,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteSubscriptionResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteSubscriptionResponse:
         """Deserializes the DeleteSubscriptionResponse from a dictionary."""
@@ -168,6 +222,13 @@ def as_dict(self) -> dict:
         if self.text: body['text'] = self.text.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenieAttachment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.query: body['query'] = self.query
+        if self.text: body['text'] = self.text
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieAttachment:
         """Deserializes the GenieAttachment from a dictionary."""
@@ -206,6 +267,18 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenieConversation into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp
+        if self.id is not None: body['id'] = self.id
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.space_id is not None: body['space_id'] = self.space_id
+        if self.title is not None: body['title'] = self.title
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieConversation:
         """Deserializes the GenieConversation from a dictionary."""
@@ -236,6 +309,14 @@ def as_dict(self) -> dict:
         if self.space_id is not None: body['space_id'] = self.space_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenieCreateConversationMessageRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.content is not None: body['content'] = self.content
+        if self.conversation_id is not None: body['conversation_id'] = self.conversation_id
+        if self.space_id is not None: body['space_id'] = self.space_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieCreateConversationMessageRequest:
         """Deserializes the GenieCreateConversationMessageRequest from a dictionary."""
@@ -256,6 +337,12 @@ def as_dict(self) -> dict:
         if self.statement_response: body['statement_response'] = self.statement_response.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenieGetMessageQueryResultResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.statement_response: body['statement_response'] = self.statement_response
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieGetMessageQueryResultResponse:
         """Deserializes the GenieGetMessageQueryResultResponse from a dictionary."""
@@ -325,6 +412,23 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenieMessage into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.attachments: body['attachments'] = self.attachments
+        if self.content is not None: body['content'] = self.content
+        if self.conversation_id is not None: body['conversation_id'] = self.conversation_id
+        if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp
+        if self.error: body['error'] = self.error
+        if self.id is not None: body['id'] = self.id
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.query_result: body['query_result'] = self.query_result
+        if self.space_id is not None: body['space_id'] = self.space_id
+        if self.status is not None: body['status'] = self.status
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieMessage:
         """Deserializes the GenieMessage from a dictionary."""
@@ -356,6 +460,13 @@ def as_dict(self) -> dict:
         if self.space_id is not None: body['space_id'] = self.space_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenieStartConversationMessageRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.content is not None: body['content'] = self.content
+        if self.space_id is not None: body['space_id'] = self.space_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieStartConversationMessageRequest:
         """Deserializes the GenieStartConversationMessageRequest from a dictionary."""
@@ -383,6 +494,15 @@ def as_dict(self) -> dict:
         if self.message_id is not None: body['message_id'] = self.message_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenieStartConversationResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.conversation: body['conversation'] = self.conversation
+        if self.conversation_id is not None: body['conversation_id'] = self.conversation_id
+        if self.message: body['message'] = self.message
+        if self.message_id is not None: body['message_id'] = self.message_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieStartConversationResponse:
         """Deserializes the GenieStartConversationResponse from a dictionary."""
@@ -413,6 +533,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListDashboardsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboards: body['dashboards'] = self.dashboards
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListDashboardsResponse:
         """Deserializes the ListDashboardsResponse from a dictionary."""
@@ -435,6 +562,13 @@ def as_dict(self) -> dict:
         if self.schedules: body['schedules'] = [v.as_dict() for v in self.schedules]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListSchedulesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.schedules: body['schedules'] = self.schedules
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSchedulesResponse:
         """Deserializes the ListSchedulesResponse from a dictionary."""
@@ -457,6 +591,13 @@ def as_dict(self) -> dict:
         if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListSubscriptionsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.subscriptions: body['subscriptions'] = self.subscriptions
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSubscriptionsResponse:
         """Deserializes the ListSubscriptionsResponse from a dictionary."""
@@ -477,6 +618,13 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MessageError into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.error is not None: body['error'] = self.error
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MessageError:
         """Deserializes the MessageError from a dictionary."""
@@ -571,6 +719,14 @@ def as_dict(self) -> dict:
         if self.source_dashboard_id is not None: body['source_dashboard_id'] = self.source_dashboard_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MigrateDashboardRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.parent_path is not None: body['parent_path'] = self.parent_path
+        if self.source_dashboard_id is not None: body['source_dashboard_id'] = self.source_dashboard_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MigrateDashboardRequest:
         """Deserializes the MigrateDashboardRequest from a dictionary."""
@@ -599,6 +755,14 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PublishRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.embed_credentials is not None: body['embed_credentials'] = self.embed_credentials
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PublishRequest:
         """Deserializes the PublishRequest from a dictionary."""
@@ -630,6 +794,15 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PublishedDashboard into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.embed_credentials is not None: body['embed_credentials'] = self.embed_credentials
+        if self.revision_create_time is not None: body['revision_create_time'] = self.revision_create_time
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PublishedDashboard:
         """Deserializes the PublishedDashboard from a dictionary."""
@@ -641,6 +814,8 @@ def from_dict(cls, d: Dict[str, any]) -> PublishedDashboard:
 
 @dataclass
 class QueryAttachment:
+    cached_query_schema: Optional[QuerySchema] = None
+
     description: Optional[str] = None
     """Description of the query"""
 
@@ -665,6 +840,21 @@ class QueryAttachment:
     def as_dict(self) -> dict:
         """Serializes the QueryAttachment into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.cached_query_schema: body['cached_query_schema'] = self.cached_query_schema.as_dict()
+        if self.description is not None: body['description'] = self.description
+        if self.id is not None: body['id'] = self.id
+        if self.instruction_id is not None: body['instruction_id'] = self.instruction_id
+        if self.instruction_title is not None: body['instruction_title'] = self.instruction_title
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.query is not None: body['query'] = self.query
+        if self.title is not None: body['title'] = self.title
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryAttachment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cached_query_schema: body['cached_query_schema'] = self.cached_query_schema
         if self.description is not None: body['description'] = self.description
         if self.id is not None: body['id'] = self.id
         if self.instruction_id is not None: body['instruction_id'] = self.instruction_id
@@ -678,7 +868,8 @@ def as_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryAttachment:
         """Deserializes the QueryAttachment from a dictionary."""
-        return cls(description=d.get('description', None),
+        return cls(cached_query_schema=_from_dict(d, 'cached_query_schema', QuerySchema),
+                   description=d.get('description', None),
                    id=d.get('id', None),
                    instruction_id=d.get('instruction_id', None),
                    instruction_title=d.get('instruction_title', None),
@@ -687,6 +878,69 @@ def from_dict(cls, d: Dict[str, any]) -> QueryAttachment:
                    title=d.get('title', None))
 
 
+@dataclass
+class QuerySchema:
+    columns: Optional[List[QuerySchemaColumn]] = None
+
+    statement_id: Optional[str] = None
+    """Used to determine if the stored query schema is compatible with the latest run. The service
+    should always clear the schema when the query is re-executed."""
+
+    def as_dict(self) -> dict:
+        """Serializes the QuerySchema into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
+        if self.statement_id is not None: body['statement_id'] = self.statement_id
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QuerySchema into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.columns: body['columns'] = self.columns
+        if self.statement_id is not None: body['statement_id'] = self.statement_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> QuerySchema:
+        """Deserializes the QuerySchema from a dictionary."""
+        return cls(columns=_repeated_dict(d, 'columns', QuerySchemaColumn),
+                   statement_id=d.get('statement_id', None))
+
+
+@dataclass
+class QuerySchemaColumn:
+    name: str
+
+    type_text: str
+    """Corresponds to type desc"""
+
+    data_type: DataType
+    """Populated from https://docs.databricks.com/sql/language-manual/sql-ref-datatypes.html"""
+
+    def as_dict(self) -> dict:
+        """Serializes the QuerySchemaColumn into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.data_type is not None: body['data_type'] = self.data_type.value
+        if self.name is not None: body['name'] = self.name
+        if self.type_text is not None: body['type_text'] = self.type_text
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QuerySchemaColumn into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data_type is not None: body['data_type'] = self.data_type
+        if self.name is not None: body['name'] = self.name
+        if self.type_text is not None: body['type_text'] = self.type_text
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> QuerySchemaColumn:
+        """Deserializes the QuerySchemaColumn from a dictionary."""
+        return cls(data_type=_enum(d, 'data_type', DataType),
+                   name=d.get('name', None),
+                   type_text=d.get('type_text', None))
+
+
 @dataclass
 class Result:
     is_truncated: Optional[bool] = None
@@ -707,6 +961,14 @@ def as_dict(self) -> dict:
         if self.statement_id is not None: body['statement_id'] = self.statement_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Result into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_truncated is not None: body['is_truncated'] = self.is_truncated
+        if self.row_count is not None: body['row_count'] = self.row_count
+        if self.statement_id is not None: body['statement_id'] = self.statement_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Result:
         """Deserializes the Result from a dictionary."""
@@ -760,6 +1022,20 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Schedule into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.cron_schedule: body['cron_schedule'] = self.cron_schedule
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.etag is not None: body['etag'] = self.etag
+        if self.pause_status is not None: body['pause_status'] = self.pause_status
+        if self.schedule_id is not None: body['schedule_id'] = self.schedule_id
+        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Schedule:
         """Deserializes the Schedule from a dictionary."""
@@ -797,6 +1073,13 @@ def as_dict(self) -> dict:
         if self.user_subscriber: body['user_subscriber'] = self.user_subscriber.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Subscriber into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination_subscriber: body['destination_subscriber'] = self.destination_subscriber
+        if self.user_subscriber: body['user_subscriber'] = self.user_subscriber
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Subscriber:
         """Deserializes the Subscriber from a dictionary."""
@@ -846,6 +1129,19 @@ def as_dict(self) -> dict:
         if self.update_time is not None: body['update_time'] = self.update_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Subscription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.created_by_user_id is not None: body['created_by_user_id'] = self.created_by_user_id
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.etag is not None: body['etag'] = self.etag
+        if self.schedule_id is not None: body['schedule_id'] = self.schedule_id
+        if self.subscriber: body['subscriber'] = self.subscriber
+        if self.subscription_id is not None: body['subscription_id'] = self.subscription_id
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Subscription:
         """Deserializes the Subscription from a dictionary."""
@@ -870,6 +1166,12 @@ def as_dict(self) -> dict:
         if self.destination_id is not None: body['destination_id'] = self.destination_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SubscriptionSubscriberDestination into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination_id is not None: body['destination_id'] = self.destination_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SubscriptionSubscriberDestination:
         """Deserializes the SubscriptionSubscriberDestination from a dictionary."""
@@ -887,6 +1189,12 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SubscriptionSubscriberUser into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SubscriptionSubscriberUser:
         """Deserializes the SubscriptionSubscriberUser from a dictionary."""
@@ -907,6 +1215,13 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TextAttachment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.content is not None: body['content'] = self.content
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TextAttachment:
         """Deserializes the TextAttachment from a dictionary."""
@@ -921,6 +1236,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TrashDashboardResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TrashDashboardResponse:
         """Deserializes the TrashDashboardResponse from a dictionary."""
@@ -935,6 +1255,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UnpublishDashboardResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UnpublishDashboardResponse:
         """Deserializes the UnpublishDashboardResponse from a dictionary."""
diff --git a/databricks/sdk/service/files.py b/databricks/sdk/service/files.py
index 255e1c1a0..07cdaea54 100755
--- a/databricks/sdk/service/files.py
+++ b/databricks/sdk/service/files.py
@@ -28,6 +28,13 @@ def as_dict(self) -> dict:
         if self.handle is not None: body['handle'] = self.handle
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AddBlock into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data is not None: body['data'] = self.data
+        if self.handle is not None: body['handle'] = self.handle
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AddBlock:
         """Deserializes the AddBlock from a dictionary."""
@@ -42,6 +49,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AddBlockResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AddBlockResponse:
         """Deserializes the AddBlockResponse from a dictionary."""
@@ -59,6 +71,12 @@ def as_dict(self) -> dict:
         if self.handle is not None: body['handle'] = self.handle
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Close into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.handle is not None: body['handle'] = self.handle
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Close:
         """Deserializes the Close from a dictionary."""
@@ -73,6 +91,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CloseResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CloseResponse:
         """Deserializes the CloseResponse from a dictionary."""
@@ -94,6 +117,13 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Create into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.overwrite is not None: body['overwrite'] = self.overwrite
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Create:
         """Deserializes the Create from a dictionary."""
@@ -108,6 +138,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateDirectoryResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateDirectoryResponse:
         """Deserializes the CreateDirectoryResponse from a dictionary."""
@@ -126,6 +161,12 @@ def as_dict(self) -> dict:
         if self.handle is not None: body['handle'] = self.handle
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.handle is not None: body['handle'] = self.handle
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateResponse:
         """Deserializes the CreateResponse from a dictionary."""
@@ -148,6 +189,13 @@ def as_dict(self) -> dict:
         if self.recursive is not None: body['recursive'] = self.recursive
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Delete into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.path is not None: body['path'] = self.path
+        if self.recursive is not None: body['recursive'] = self.recursive
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Delete:
         """Deserializes the Delete from a dictionary."""
@@ -162,6 +210,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteDirectoryResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDirectoryResponse:
         """Deserializes the DeleteDirectoryResponse from a dictionary."""
@@ -176,6 +229,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -209,6 +267,16 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DirectoryEntry into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.file_size is not None: body['file_size'] = self.file_size
+        if self.is_directory is not None: body['is_directory'] = self.is_directory
+        if self.last_modified is not None: body['last_modified'] = self.last_modified
+        if self.name is not None: body['name'] = self.name
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DirectoryEntry:
         """Deserializes the DirectoryEntry from a dictionary."""
@@ -238,6 +306,15 @@ def as_dict(self) -> dict:
         if self.last_modified is not None: body['last-modified'] = self.last_modified
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DownloadResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.content_length is not None: body['content-length'] = self.content_length
+        if self.content_type is not None: body['content-type'] = self.content_type
+        if self.contents: body['contents'] = self.contents
+        if self.last_modified is not None: body['last-modified'] = self.last_modified
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DownloadResponse:
         """Deserializes the DownloadResponse from a dictionary."""
@@ -270,6 +347,15 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FileInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.file_size is not None: body['file_size'] = self.file_size
+        if self.is_dir is not None: body['is_dir'] = self.is_dir
+        if self.modification_time is not None: body['modification_time'] = self.modification_time
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FileInfo:
         """Deserializes the FileInfo from a dictionary."""
@@ -287,6 +373,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetDirectoryMetadataResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetDirectoryMetadataResponse:
         """Deserializes the GetDirectoryMetadataResponse from a dictionary."""
@@ -309,6 +400,14 @@ def as_dict(self) -> dict:
         if self.last_modified is not None: body['last-modified'] = self.last_modified
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetMetadataResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.content_length is not None: body['content-length'] = self.content_length
+        if self.content_type is not None: body['content-type'] = self.content_type
+        if self.last_modified is not None: body['last-modified'] = self.last_modified
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetMetadataResponse:
         """Deserializes the GetMetadataResponse from a dictionary."""
@@ -332,6 +431,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListDirectoryResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.contents: body['contents'] = self.contents
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListDirectoryResponse:
         """Deserializes the ListDirectoryResponse from a dictionary."""
@@ -350,6 +456,12 @@ def as_dict(self) -> dict:
         if self.files: body['files'] = [v.as_dict() for v in self.files]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListStatusResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.files: body['files'] = self.files
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListStatusResponse:
         """Deserializes the ListStatusResponse from a dictionary."""
@@ -367,6 +479,12 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MkDirs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MkDirs:
         """Deserializes the MkDirs from a dictionary."""
@@ -381,6 +499,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MkDirsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MkDirsResponse:
         """Deserializes the MkDirsResponse from a dictionary."""
@@ -402,6 +525,13 @@ def as_dict(self) -> dict:
         if self.source_path is not None: body['source_path'] = self.source_path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Move into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination_path is not None: body['destination_path'] = self.destination_path
+        if self.source_path is not None: body['source_path'] = self.source_path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Move:
         """Deserializes the Move from a dictionary."""
@@ -416,6 +546,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MoveResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MoveResponse:
         """Deserializes the MoveResponse from a dictionary."""
@@ -441,6 +576,14 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Put into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.contents is not None: body['contents'] = self.contents
+        if self.overwrite is not None: body['overwrite'] = self.overwrite
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Put:
         """Deserializes the Put from a dictionary."""
@@ -457,6 +600,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PutResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutResponse:
         """Deserializes the PutResponse from a dictionary."""
@@ -479,6 +627,13 @@ def as_dict(self) -> dict:
         if self.data is not None: body['data'] = self.data
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ReadResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.bytes_read is not None: body['bytes_read'] = self.bytes_read
+        if self.data is not None: body['data'] = self.data
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ReadResponse:
         """Deserializes the ReadResponse from a dictionary."""
@@ -493,6 +648,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UploadResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UploadResponse:
         """Deserializes the UploadResponse from a dictionary."""
@@ -833,8 +993,8 @@ def delete_directory(self, directory_path: str):
     def download(self, file_path: str) -> DownloadResponse:
         """Download a file.
         
-        Downloads a file of up to 5 GiB. The file contents are the response body. This is a standard HTTP file
-        download, not a JSON RPC.
+        Downloads a file. The file contents are the response body. This is a standard HTTP file download, not
+        a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers.
         
         :param file_path: str
           The absolute path of the file.
diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py
index fc0122b2b..28e5247a6 100755
--- a/databricks/sdk/service/iam.py
+++ b/databricks/sdk/service/iam.py
@@ -38,6 +38,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccessControlRequest:
         """Deserializes the AccessControlRequest from a dictionary."""
@@ -75,6 +85,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccessControlResponse:
         """Deserializes the AccessControlResponse from a dictionary."""
@@ -107,6 +128,16 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ComplexValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.display is not None: body['display'] = self.display
+        if self.primary is not None: body['primary'] = self.primary
+        if self.ref is not None: body['$ref'] = self.ref
+        if self.type is not None: body['type'] = self.type
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ComplexValue:
         """Deserializes the ComplexValue from a dictionary."""
@@ -125,6 +156,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -139,6 +175,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteWorkspacePermissionAssignmentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteWorkspacePermissionAssignmentResponse:
         """Deserializes the DeleteWorkspacePermissionAssignmentResponse from a dictionary."""
@@ -155,6 +196,12 @@ def as_dict(self) -> dict:
         if self.roles: body['roles'] = [v.as_dict() for v in self.roles]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetAssignableRolesForResourceResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.roles: body['roles'] = self.roles
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetAssignableRolesForResourceResponse:
         """Deserializes the GetAssignableRolesForResourceResponse from a dictionary."""
@@ -172,6 +219,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPasswordPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPasswordPermissionLevelsResponse:
         """Deserializes the GetPasswordPermissionLevelsResponse from a dictionary."""
@@ -189,6 +242,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPermissionLevelsResponse:
         """Deserializes the GetPermissionLevelsResponse from a dictionary."""
@@ -216,6 +275,13 @@ def as_dict(self) -> dict:
         if self.role is not None: body['role'] = self.role
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GrantRule into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.principals: body['principals'] = self.principals
+        if self.role is not None: body['role'] = self.role
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GrantRule:
         """Deserializes the GrantRule from a dictionary."""
@@ -265,6 +331,20 @@ def as_dict(self) -> dict:
         if self.schemas: body['schemas'] = [v.value for v in self.schemas]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Group into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.display_name is not None: body['displayName'] = self.display_name
+        if self.entitlements: body['entitlements'] = self.entitlements
+        if self.external_id is not None: body['externalId'] = self.external_id
+        if self.groups: body['groups'] = self.groups
+        if self.id is not None: body['id'] = self.id
+        if self.members: body['members'] = self.members
+        if self.meta: body['meta'] = self.meta
+        if self.roles: body['roles'] = self.roles
+        if self.schemas: body['schemas'] = self.schemas
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Group:
         """Deserializes the Group from a dictionary."""
@@ -311,6 +391,16 @@ def as_dict(self) -> dict:
         if self.total_results is not None: body['totalResults'] = self.total_results
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListGroupsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page
+        if self.resources: body['Resources'] = self.resources
+        if self.schemas: body['schemas'] = self.schemas
+        if self.start_index is not None: body['startIndex'] = self.start_index
+        if self.total_results is not None: body['totalResults'] = self.total_results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListGroupsResponse:
         """Deserializes the ListGroupsResponse from a dictionary."""
@@ -353,6 +443,16 @@ def as_dict(self) -> dict:
         if self.total_results is not None: body['totalResults'] = self.total_results
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListServicePrincipalResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page
+        if self.resources: body['Resources'] = self.resources
+        if self.schemas: body['schemas'] = self.schemas
+        if self.start_index is not None: body['startIndex'] = self.start_index
+        if self.total_results is not None: body['totalResults'] = self.total_results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListServicePrincipalResponse:
         """Deserializes the ListServicePrincipalResponse from a dictionary."""
@@ -396,6 +496,16 @@ def as_dict(self) -> dict:
         if self.total_results is not None: body['totalResults'] = self.total_results
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListUsersResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page
+        if self.resources: body['Resources'] = self.resources
+        if self.schemas: body['schemas'] = self.schemas
+        if self.start_index is not None: body['startIndex'] = self.start_index
+        if self.total_results is not None: body['totalResults'] = self.total_results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListUsersResponse:
         """Deserializes the ListUsersResponse from a dictionary."""
@@ -430,6 +540,16 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MigratePermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.from_workspace_group_name is not None:
+            body['from_workspace_group_name'] = self.from_workspace_group_name
+        if self.size is not None: body['size'] = self.size
+        if self.to_account_group_name is not None: body['to_account_group_name'] = self.to_account_group_name
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MigratePermissionsRequest:
         """Deserializes the MigratePermissionsRequest from a dictionary."""
@@ -450,6 +570,12 @@ def as_dict(self) -> dict:
         if self.permissions_migrated is not None: body['permissions_migrated'] = self.permissions_migrated
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MigratePermissionsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permissions_migrated is not None: body['permissions_migrated'] = self.permissions_migrated
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MigratePermissionsResponse:
         """Deserializes the MigratePermissionsResponse from a dictionary."""
@@ -471,6 +597,13 @@ def as_dict(self) -> dict:
         if self.given_name is not None: body['givenName'] = self.given_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Name into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.family_name is not None: body['familyName'] = self.family_name
+        if self.given_name is not None: body['givenName'] = self.given_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Name:
         """Deserializes the Name from a dictionary."""
@@ -494,6 +627,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ObjectPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ObjectPermissions:
         """Deserializes the ObjectPermissions from a dictionary."""
@@ -520,6 +661,14 @@ def as_dict(self) -> dict:
         if self.schemas: body['schemas'] = [v.value for v in self.schemas]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PartialUpdate into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.operations: body['Operations'] = self.operations
+        if self.schemas: body['schemas'] = self.schemas
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PartialUpdate:
         """Deserializes the PartialUpdate from a dictionary."""
@@ -552,6 +701,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PasswordAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PasswordAccessControlRequest:
         """Deserializes the PasswordAccessControlRequest from a dictionary."""
@@ -589,6 +748,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PasswordAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PasswordAccessControlResponse:
         """Deserializes the PasswordAccessControlResponse from a dictionary."""
@@ -616,6 +786,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PasswordPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PasswordPermission:
         """Deserializes the PasswordPermission from a dictionary."""
@@ -647,6 +825,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PasswordPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PasswordPermissions:
         """Deserializes the PasswordPermissions from a dictionary."""
@@ -670,6 +856,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PasswordPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PasswordPermissionsDescription:
         """Deserializes the PasswordPermissionsDescription from a dictionary."""
@@ -688,6 +881,12 @@ def as_dict(self) -> dict:
             body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PasswordPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PasswordPermissionsRequest:
         """Deserializes the PasswordPermissionsRequest from a dictionary."""
@@ -713,6 +912,14 @@ def as_dict(self) -> dict:
         if self.value: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Patch into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.op is not None: body['op'] = self.op
+        if self.path is not None: body['path'] = self.path
+        if self.value: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Patch:
         """Deserializes the Patch from a dictionary."""
@@ -735,6 +942,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PatchResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PatchResponse:
         """Deserializes the PatchResponse from a dictionary."""
@@ -763,6 +975,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Permission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Permission:
         """Deserializes the Permission from a dictionary."""
@@ -793,6 +1013,14 @@ def as_dict(self) -> dict:
         if self.principal: body['principal'] = self.principal.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PermissionAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.error is not None: body['error'] = self.error
+        if self.permissions: body['permissions'] = self.permissions
+        if self.principal: body['principal'] = self.principal
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionAssignment:
         """Deserializes the PermissionAssignment from a dictionary."""
@@ -813,6 +1041,12 @@ def as_dict(self) -> dict:
             body['permission_assignments'] = [v.as_dict() for v in self.permission_assignments]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PermissionAssignments into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_assignments: body['permission_assignments'] = self.permission_assignments
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionAssignments:
         """Deserializes the PermissionAssignments from a dictionary."""
@@ -855,6 +1089,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PermissionOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionOutput:
         """Deserializes the PermissionOutput from a dictionary."""
@@ -876,6 +1117,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionsDescription:
         """Deserializes the PermissionsDescription from a dictionary."""
@@ -904,6 +1152,14 @@ def as_dict(self) -> dict:
         if self.request_object_type is not None: body['request_object_type'] = self.request_object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.request_object_id is not None: body['request_object_id'] = self.request_object_id
+        if self.request_object_type is not None: body['request_object_type'] = self.request_object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionsRequest:
         """Deserializes the PermissionsRequest from a dictionary."""
@@ -942,6 +1198,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PrincipalOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.principal_id is not None: body['principal_id'] = self.principal_id
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PrincipalOutput:
         """Deserializes the PrincipalOutput from a dictionary."""
@@ -964,6 +1231,12 @@ def as_dict(self) -> dict:
         if self.resource_type is not None: body['resourceType'] = self.resource_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResourceMeta into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.resource_type is not None: body['resourceType'] = self.resource_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResourceMeta:
         """Deserializes the ResourceMeta from a dictionary."""
@@ -981,6 +1254,12 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Role into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Role:
         """Deserializes the Role from a dictionary."""
@@ -1005,6 +1284,14 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RuleSetResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        if self.grant_rules: body['grant_rules'] = self.grant_rules
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RuleSetResponse:
         """Deserializes the RuleSetResponse from a dictionary."""
@@ -1032,6 +1319,14 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RuleSetUpdateRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        if self.grant_rules: body['grant_rules'] = self.grant_rules
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RuleSetUpdateRequest:
         """Deserializes the RuleSetUpdateRequest from a dictionary."""
@@ -1084,6 +1379,20 @@ def as_dict(self) -> dict:
         if self.schemas: body['schemas'] = [v.value for v in self.schemas]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServicePrincipal into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.active is not None: body['active'] = self.active
+        if self.application_id is not None: body['applicationId'] = self.application_id
+        if self.display_name is not None: body['displayName'] = self.display_name
+        if self.entitlements: body['entitlements'] = self.entitlements
+        if self.external_id is not None: body['externalId'] = self.external_id
+        if self.groups: body['groups'] = self.groups
+        if self.id is not None: body['id'] = self.id
+        if self.roles: body['roles'] = self.roles
+        if self.schemas: body['schemas'] = self.schemas
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServicePrincipal:
         """Deserializes the ServicePrincipal from a dictionary."""
@@ -1111,6 +1420,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
         """Deserializes the UpdateResponse from a dictionary."""
@@ -1131,6 +1445,13 @@ def as_dict(self) -> dict:
         if self.rule_set: body['rule_set'] = self.rule_set.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateRuleSetRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.rule_set: body['rule_set'] = self.rule_set
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRuleSetRequest:
         """Deserializes the UpdateRuleSetRequest from a dictionary."""
@@ -1160,6 +1481,14 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateWorkspaceAssignments into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permissions: body['permissions'] = self.permissions
+        if self.principal_id is not None: body['principal_id'] = self.principal_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceAssignments:
         """Deserializes the UpdateWorkspaceAssignments from a dictionary."""
@@ -1225,6 +1554,22 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['userName'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the User into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.active is not None: body['active'] = self.active
+        if self.display_name is not None: body['displayName'] = self.display_name
+        if self.emails: body['emails'] = self.emails
+        if self.entitlements: body['entitlements'] = self.entitlements
+        if self.external_id is not None: body['externalId'] = self.external_id
+        if self.groups: body['groups'] = self.groups
+        if self.id is not None: body['id'] = self.id
+        if self.name: body['name'] = self.name
+        if self.roles: body['roles'] = self.roles
+        if self.schemas: body['schemas'] = self.schemas
+        if self.user_name is not None: body['userName'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> User:
         """Deserializes the User from a dictionary."""
@@ -1265,6 +1610,12 @@ def as_dict(self) -> dict:
         if self.permissions: body['permissions'] = [v.as_dict() for v in self.permissions]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspacePermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permissions: body['permissions'] = self.permissions
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspacePermissions:
         """Deserializes the WorkspacePermissions from a dictionary."""
diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index ab485b33c..a991c7c50 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -53,6 +53,17 @@ def as_dict(self) -> dict:
         if self.settings: body['settings'] = self.settings.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BaseJob into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_time is not None: body['created_time'] = self.created_time
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.effective_budget_policy_id is not None:
+            body['effective_budget_policy_id'] = self.effective_budget_policy_id
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.settings: body['settings'] = self.settings
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BaseJob:
         """Deserializes the BaseJob from a dictionary."""
@@ -240,6 +251,43 @@ def as_dict(self) -> dict:
         if self.trigger_info: body['trigger_info'] = self.trigger_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BaseRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.attempt_number is not None: body['attempt_number'] = self.attempt_number
+        if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration
+        if self.cluster_instance: body['cluster_instance'] = self.cluster_instance
+        if self.cluster_spec: body['cluster_spec'] = self.cluster_spec
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.description is not None: body['description'] = self.description
+        if self.end_time is not None: body['end_time'] = self.end_time
+        if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
+        if self.git_source: body['git_source'] = self.git_source
+        if self.job_clusters: body['job_clusters'] = self.job_clusters
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.job_parameters: body['job_parameters'] = self.job_parameters
+        if self.job_run_id is not None: body['job_run_id'] = self.job_run_id
+        if self.number_in_job is not None: body['number_in_job'] = self.number_in_job
+        if self.original_attempt_run_id is not None:
+            body['original_attempt_run_id'] = self.original_attempt_run_id
+        if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters
+        if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
+        if self.repair_history: body['repair_history'] = self.repair_history
+        if self.run_duration is not None: body['run_duration'] = self.run_duration
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_name is not None: body['run_name'] = self.run_name
+        if self.run_page_url is not None: body['run_page_url'] = self.run_page_url
+        if self.run_type is not None: body['run_type'] = self.run_type
+        if self.schedule: body['schedule'] = self.schedule
+        if self.setup_duration is not None: body['setup_duration'] = self.setup_duration
+        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.state: body['state'] = self.state
+        if self.status: body['status'] = self.status
+        if self.tasks: body['tasks'] = self.tasks
+        if self.trigger is not None: body['trigger'] = self.trigger
+        if self.trigger_info: body['trigger_info'] = self.trigger_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BaseRun:
         """Deserializes the BaseRun from a dictionary."""
@@ -292,6 +340,13 @@ def as_dict(self) -> dict:
         if self.job_id is not None: body['job_id'] = self.job_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelAllRuns into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_queued_runs is not None: body['all_queued_runs'] = self.all_queued_runs
+        if self.job_id is not None: body['job_id'] = self.job_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelAllRuns:
         """Deserializes the CancelAllRuns from a dictionary."""
@@ -306,6 +361,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelAllRunsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelAllRunsResponse:
         """Deserializes the CancelAllRunsResponse from a dictionary."""
@@ -323,6 +383,12 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelRun:
         """Deserializes the CancelRun from a dictionary."""
@@ -337,12 +403,82 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelRunResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelRunResponse:
         """Deserializes the CancelRunResponse from a dictionary."""
         return cls()
 
 
+class CleanRoomTaskRunLifeCycleState(Enum):
+    """Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to
+    remove coupling with jobs API definition"""
+
+    BLOCKED = 'BLOCKED'
+    INTERNAL_ERROR = 'INTERNAL_ERROR'
+    PENDING = 'PENDING'
+    QUEUED = 'QUEUED'
+    RUNNING = 'RUNNING'
+    SKIPPED = 'SKIPPED'
+    TERMINATED = 'TERMINATED'
+    TERMINATING = 'TERMINATING'
+    WAITING_FOR_RETRY = 'WAITING_FOR_RETRY'
+
+
+class CleanRoomTaskRunResultState(Enum):
+    """Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to avoid
+    cyclic dependency."""
+
+    CANCELED = 'CANCELED'
+    DISABLED = 'DISABLED'
+    EVICTED = 'EVICTED'
+    EXCLUDED = 'EXCLUDED'
+    FAILED = 'FAILED'
+    MAXIMUM_CONCURRENT_RUNS_REACHED = 'MAXIMUM_CONCURRENT_RUNS_REACHED'
+    SUCCESS = 'SUCCESS'
+    SUCCESS_WITH_FAILURES = 'SUCCESS_WITH_FAILURES'
+    TIMEDOUT = 'TIMEDOUT'
+    UPSTREAM_CANCELED = 'UPSTREAM_CANCELED'
+    UPSTREAM_EVICTED = 'UPSTREAM_EVICTED'
+    UPSTREAM_FAILED = 'UPSTREAM_FAILED'
+
+
+@dataclass
+class CleanRoomTaskRunState:
+    """Stores the run state of the clean room notebook V1 task."""
+
+    life_cycle_state: Optional[CleanRoomTaskRunLifeCycleState] = None
+    """A value indicating the run's current lifecycle state. This field is always available in the
+    response."""
+
+    result_state: Optional[CleanRoomTaskRunResultState] = None
+    """A value indicating the run's result. This field is only available for terminal lifecycle states."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomTaskRunState into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state.value
+        if self.result_state is not None: body['result_state'] = self.result_state.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomTaskRunState into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state
+        if self.result_state is not None: body['result_state'] = self.result_state
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomTaskRunState:
+        """Deserializes the CleanRoomTaskRunState from a dictionary."""
+        return cls(life_cycle_state=_enum(d, 'life_cycle_state', CleanRoomTaskRunLifeCycleState),
+                   result_state=_enum(d, 'result_state', CleanRoomTaskRunResultState))
+
+
 @dataclass
 class ClusterInstance:
     cluster_id: Optional[str] = None
@@ -369,6 +505,13 @@ def as_dict(self) -> dict:
         if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterInstance into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterInstance:
         """Deserializes the ClusterInstance from a dictionary."""
@@ -402,6 +545,15 @@ def as_dict(self) -> dict:
         if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
+        if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key
+        if self.libraries: body['libraries'] = self.libraries
+        if self.new_cluster: body['new_cluster'] = self.new_cluster
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterSpec:
         """Deserializes the ClusterSpec from a dictionary."""
@@ -446,6 +598,14 @@ def as_dict(self) -> dict:
         if self.right is not None: body['right'] = self.right
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ConditionTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.left is not None: body['left'] = self.left
+        if self.op is not None: body['op'] = self.op
+        if self.right is not None: body['right'] = self.right
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ConditionTask:
         """Deserializes the ConditionTask from a dictionary."""
@@ -482,6 +642,12 @@ def as_dict(self) -> dict:
         if self.pause_status is not None: body['pause_status'] = self.pause_status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Continuous into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.pause_status is not None: body['pause_status'] = self.pause_status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Continuous:
         """Deserializes the Continuous from a dictionary."""
@@ -571,8 +737,8 @@ class CreateJob:
     """The queue settings of the job."""
 
     run_as: Optional[JobRunAs] = None
-    """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
-    as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
+    """Write-only setting. Specifies the user or service principal that the job runs as. If not
+    specified, the job runs as the user who created the job.
     
     Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown."""
 
@@ -629,6 +795,35 @@ def as_dict(self) -> dict:
         if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateJob into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
+        if self.continuous: body['continuous'] = self.continuous
+        if self.deployment: body['deployment'] = self.deployment
+        if self.description is not None: body['description'] = self.description
+        if self.edit_mode is not None: body['edit_mode'] = self.edit_mode
+        if self.email_notifications: body['email_notifications'] = self.email_notifications
+        if self.environments: body['environments'] = self.environments
+        if self.format is not None: body['format'] = self.format
+        if self.git_source: body['git_source'] = self.git_source
+        if self.health: body['health'] = self.health
+        if self.job_clusters: body['job_clusters'] = self.job_clusters
+        if self.max_concurrent_runs is not None: body['max_concurrent_runs'] = self.max_concurrent_runs
+        if self.name is not None: body['name'] = self.name
+        if self.notification_settings: body['notification_settings'] = self.notification_settings
+        if self.parameters: body['parameters'] = self.parameters
+        if self.queue: body['queue'] = self.queue
+        if self.run_as: body['run_as'] = self.run_as
+        if self.schedule: body['schedule'] = self.schedule
+        if self.tags: body['tags'] = self.tags
+        if self.tasks: body['tasks'] = self.tasks
+        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
+        if self.trigger: body['trigger'] = self.trigger
+        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateJob:
         """Deserializes the CreateJob from a dictionary."""
@@ -671,6 +866,12 @@ def as_dict(self) -> dict:
         if self.job_id is not None: body['job_id'] = self.job_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.job_id is not None: body['job_id'] = self.job_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateResponse:
         """Deserializes the CreateResponse from a dictionary."""
@@ -703,6 +904,15 @@ def as_dict(self) -> dict:
         if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CronSchedule into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.pause_status is not None: body['pause_status'] = self.pause_status
+        if self.quartz_cron_expression is not None:
+            body['quartz_cron_expression'] = self.quartz_cron_expression
+        if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CronSchedule:
         """Deserializes the CronSchedule from a dictionary."""
@@ -727,6 +937,13 @@ def as_dict(self) -> dict:
         if self.artifacts_link is not None: body['artifacts_link'] = self.artifacts_link
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DbtOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.artifacts_headers: body['artifacts_headers'] = self.artifacts_headers
+        if self.artifacts_link is not None: body['artifacts_link'] = self.artifacts_link
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DbtOutput:
         """Deserializes the DbtOutput from a dictionary."""
@@ -783,6 +1000,18 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DbtTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.commands: body['commands'] = self.commands
+        if self.profiles_directory is not None: body['profiles_directory'] = self.profiles_directory
+        if self.project_directory is not None: body['project_directory'] = self.project_directory
+        if self.schema is not None: body['schema'] = self.schema
+        if self.source is not None: body['source'] = self.source
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DbtTask:
         """Deserializes the DbtTask from a dictionary."""
@@ -806,6 +1035,12 @@ def as_dict(self) -> dict:
         if self.job_id is not None: body['job_id'] = self.job_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteJob into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.job_id is not None: body['job_id'] = self.job_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteJob:
         """Deserializes the DeleteJob from a dictionary."""
@@ -820,6 +1055,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -837,6 +1077,12 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRun:
         """Deserializes the DeleteRun from a dictionary."""
@@ -851,6 +1097,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteRunResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRunResponse:
         """Deserializes the DeleteRunResponse from a dictionary."""
@@ -883,6 +1134,14 @@ def as_dict(self) -> dict:
         if self.previous_value is not None: body['previous_value'] = self.previous_value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.field is not None: body['field'] = self.field
+        if self.new_value is not None: body['new_value'] = self.new_value
+        if self.previous_value is not None: body['previous_value'] = self.previous_value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceForJobResponseJobClusterSettingsChange:
         """Deserializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange from a dictionary."""
@@ -906,6 +1165,13 @@ def as_dict(self) -> dict:
         if self.validate_only is not None: body['validate_only'] = self.validate_only
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EnforcePolicyComplianceRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.validate_only is not None: body['validate_only'] = self.validate_only
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceRequest:
         """Deserializes the EnforcePolicyComplianceRequest from a dictionary."""
@@ -937,6 +1203,14 @@ def as_dict(self) -> dict:
         if self.settings: body['settings'] = self.settings.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EnforcePolicyComplianceResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.has_changes is not None: body['has_changes'] = self.has_changes
+        if self.job_cluster_changes: body['job_cluster_changes'] = self.job_cluster_changes
+        if self.settings: body['settings'] = self.settings
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceResponse:
         """Deserializes the EnforcePolicyComplianceResponse from a dictionary."""
@@ -963,6 +1237,12 @@ def as_dict(self) -> dict:
         if self.views: body['views'] = [v.as_dict() for v in self.views]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExportRunOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.views: body['views'] = self.views
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExportRunOutput:
         """Deserializes the ExportRunOutput from a dictionary."""
@@ -994,6 +1274,16 @@ def as_dict(self) -> dict:
             body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FileArrivalTriggerConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.min_time_between_triggers_seconds is not None:
+            body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds
+        if self.url is not None: body['url'] = self.url
+        if self.wait_after_last_change_seconds is not None:
+            body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FileArrivalTriggerConfiguration:
         """Deserializes the FileArrivalTriggerConfiguration from a dictionary."""
@@ -1018,6 +1308,13 @@ def as_dict(self) -> dict:
         if self.task_run_stats: body['task_run_stats'] = self.task_run_stats.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ForEachStats into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.error_message_stats: body['error_message_stats'] = self.error_message_stats
+        if self.task_run_stats: body['task_run_stats'] = self.task_run_stats
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ForEachStats:
         """Deserializes the ForEachStats from a dictionary."""
@@ -1045,6 +1342,14 @@ def as_dict(self) -> dict:
         if self.task: body['task'] = self.task.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ForEachTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.concurrency is not None: body['concurrency'] = self.concurrency
+        if self.inputs is not None: body['inputs'] = self.inputs
+        if self.task: body['task'] = self.task
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ForEachTask:
         """Deserializes the ForEachTask from a dictionary."""
@@ -1072,6 +1377,14 @@ def as_dict(self) -> dict:
         if self.termination_category is not None: body['termination_category'] = self.termination_category
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ForEachTaskErrorMessageStats into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.count is not None: body['count'] = self.count
+        if self.error_message is not None: body['error_message'] = self.error_message
+        if self.termination_category is not None: body['termination_category'] = self.termination_category
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ForEachTaskErrorMessageStats:
         """Deserializes the ForEachTaskErrorMessageStats from a dictionary."""
@@ -1111,6 +1424,17 @@ def as_dict(self) -> dict:
         if self.total_iterations is not None: body['total_iterations'] = self.total_iterations
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ForEachTaskTaskRunStats into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.active_iterations is not None: body['active_iterations'] = self.active_iterations
+        if self.completed_iterations is not None: body['completed_iterations'] = self.completed_iterations
+        if self.failed_iterations is not None: body['failed_iterations'] = self.failed_iterations
+        if self.scheduled_iterations is not None: body['scheduled_iterations'] = self.scheduled_iterations
+        if self.succeeded_iterations is not None: body['succeeded_iterations'] = self.succeeded_iterations
+        if self.total_iterations is not None: body['total_iterations'] = self.total_iterations
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ForEachTaskTaskRunStats:
         """Deserializes the ForEachTaskTaskRunStats from a dictionary."""
@@ -1139,6 +1463,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetJobPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetJobPermissionLevelsResponse:
         """Deserializes the GetJobPermissionLevelsResponse from a dictionary."""
@@ -1165,6 +1495,13 @@ def as_dict(self) -> dict:
         if self.violations: body['violations'] = self.violations
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPolicyComplianceResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
+        if self.violations: body['violations'] = self.violations
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPolicyComplianceResponse:
         """Deserializes the GetPolicyComplianceResponse from a dictionary."""
@@ -1199,6 +1536,12 @@ def as_dict(self) -> dict:
         if self.used_commit is not None: body['used_commit'] = self.used_commit
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GitSnapshot into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.used_commit is not None: body['used_commit'] = self.used_commit
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GitSnapshot:
         """Deserializes the GitSnapshot from a dictionary."""
@@ -1253,6 +1596,18 @@ def as_dict(self) -> dict:
         if self.job_source: body['job_source'] = self.job_source.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GitSource into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.git_branch is not None: body['git_branch'] = self.git_branch
+        if self.git_commit is not None: body['git_commit'] = self.git_commit
+        if self.git_provider is not None: body['git_provider'] = self.git_provider
+        if self.git_snapshot: body['git_snapshot'] = self.git_snapshot
+        if self.git_tag is not None: body['git_tag'] = self.git_tag
+        if self.git_url is not None: body['git_url'] = self.git_url
+        if self.job_source: body['job_source'] = self.job_source
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GitSource:
         """Deserializes the GitSource from a dictionary."""
@@ -1310,6 +1665,18 @@ def as_dict(self) -> dict:
         if self.settings: body['settings'] = self.settings.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Job into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_time is not None: body['created_time'] = self.created_time
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.effective_budget_policy_id is not None:
+            body['effective_budget_policy_id'] = self.effective_budget_policy_id
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
+        if self.settings: body['settings'] = self.settings
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Job:
         """Deserializes the Job from a dictionary."""
@@ -1345,6 +1712,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobAccessControlRequest:
         """Deserializes the JobAccessControlRequest from a dictionary."""
@@ -1382,6 +1759,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobAccessControlResponse:
         """Deserializes the JobAccessControlResponse from a dictionary."""
@@ -1409,6 +1797,13 @@ def as_dict(self) -> dict:
         if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key
+        if self.new_cluster: body['new_cluster'] = self.new_cluster
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobCluster:
         """Deserializes the JobCluster from a dictionary."""
@@ -1438,6 +1833,14 @@ def as_dict(self) -> dict:
         if self.violations: body['violations'] = self.violations
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobCompliance into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.violations: body['violations'] = self.violations
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobCompliance:
         """Deserializes the JobCompliance from a dictionary."""
@@ -1463,6 +1866,13 @@ def as_dict(self) -> dict:
         if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobDeployment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.kind is not None: body['kind'] = self.kind
+        if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobDeployment:
         """Deserializes the JobDeployment from a dictionary."""
@@ -1537,6 +1947,20 @@ def as_dict(self) -> dict:
         if self.on_success: body['on_success'] = [v for v in self.on_success]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobEmailNotifications into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.no_alert_for_skipped_runs is not None:
+            body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
+        if self.on_duration_warning_threshold_exceeded:
+            body['on_duration_warning_threshold_exceeded'] = self.on_duration_warning_threshold_exceeded
+        if self.on_failure: body['on_failure'] = self.on_failure
+        if self.on_start: body['on_start'] = self.on_start
+        if self.on_streaming_backlog_exceeded:
+            body['on_streaming_backlog_exceeded'] = self.on_streaming_backlog_exceeded
+        if self.on_success: body['on_success'] = self.on_success
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobEmailNotifications:
         """Deserializes the JobEmailNotifications from a dictionary."""
@@ -1565,6 +1989,13 @@ def as_dict(self) -> dict:
         if self.spec: body['spec'] = self.spec.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobEnvironment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.environment_key is not None: body['environment_key'] = self.environment_key
+        if self.spec: body['spec'] = self.spec
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobEnvironment:
         """Deserializes the JobEnvironment from a dictionary."""
@@ -1591,6 +2022,15 @@ def as_dict(self) -> dict:
             body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobNotificationSettings into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.no_alert_for_canceled_runs is not None:
+            body['no_alert_for_canceled_runs'] = self.no_alert_for_canceled_runs
+        if self.no_alert_for_skipped_runs is not None:
+            body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobNotificationSettings:
         """Deserializes the JobNotificationSettings from a dictionary."""
@@ -1617,6 +2057,14 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobParameter into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.default is not None: body['default'] = self.default
+        if self.name is not None: body['name'] = self.name
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobParameter:
         """Deserializes the JobParameter from a dictionary."""
@@ -1638,6 +2086,13 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobParameterDefinition into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.default is not None: body['default'] = self.default
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobParameterDefinition:
         """Deserializes the JobParameterDefinition from a dictionary."""
@@ -1661,6 +2116,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobPermission:
         """Deserializes the JobPermission from a dictionary."""
@@ -1695,6 +2158,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobPermissions:
         """Deserializes the JobPermissions from a dictionary."""
@@ -1717,6 +2188,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobPermissionsDescription:
         """Deserializes the JobPermissionsDescription from a dictionary."""
@@ -1739,6 +2217,13 @@ def as_dict(self) -> dict:
         if self.job_id is not None: body['job_id'] = self.job_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.job_id is not None: body['job_id'] = self.job_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobPermissionsRequest:
         """Deserializes the JobPermissionsRequest from a dictionary."""
@@ -1748,8 +2233,8 @@ def from_dict(cls, d: Dict[str, any]) -> JobPermissionsRequest:
 
 @dataclass
 class JobRunAs:
-    """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
-    as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
+    """Write-only setting. Specifies the user or service principal that the job runs as. If not
+    specified, the job runs as the user who created the job.
     
     Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown."""
 
@@ -1769,6 +2254,14 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobRunAs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobRunAs:
         """Deserializes the JobRunAs from a dictionary."""
@@ -1856,8 +2349,8 @@ class JobSettings:
     """The queue settings of the job."""
 
     run_as: Optional[JobRunAs] = None
-    """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
-    as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
+    """Write-only setting. Specifies the user or service principal that the job runs as. If not
+    specified, the job runs as the user who created the job.
     
     Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown."""
 
@@ -1912,6 +2405,34 @@ def as_dict(self) -> dict:
         if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobSettings into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
+        if self.continuous: body['continuous'] = self.continuous
+        if self.deployment: body['deployment'] = self.deployment
+        if self.description is not None: body['description'] = self.description
+        if self.edit_mode is not None: body['edit_mode'] = self.edit_mode
+        if self.email_notifications: body['email_notifications'] = self.email_notifications
+        if self.environments: body['environments'] = self.environments
+        if self.format is not None: body['format'] = self.format
+        if self.git_source: body['git_source'] = self.git_source
+        if self.health: body['health'] = self.health
+        if self.job_clusters: body['job_clusters'] = self.job_clusters
+        if self.max_concurrent_runs is not None: body['max_concurrent_runs'] = self.max_concurrent_runs
+        if self.name is not None: body['name'] = self.name
+        if self.notification_settings: body['notification_settings'] = self.notification_settings
+        if self.parameters: body['parameters'] = self.parameters
+        if self.queue: body['queue'] = self.queue
+        if self.run_as: body['run_as'] = self.run_as
+        if self.schedule: body['schedule'] = self.schedule
+        if self.tags: body['tags'] = self.tags
+        if self.tasks: body['tasks'] = self.tasks
+        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
+        if self.trigger: body['trigger'] = self.trigger
+        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobSettings:
         """Deserializes the JobSettings from a dictionary."""
@@ -1969,6 +2490,15 @@ def as_dict(self) -> dict:
         if self.job_config_path is not None: body['job_config_path'] = self.job_config_path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobSource into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dirty_state is not None: body['dirty_state'] = self.dirty_state
+        if self.import_from_git_branch is not None:
+            body['import_from_git_branch'] = self.import_from_git_branch
+        if self.job_config_path is not None: body['job_config_path'] = self.job_config_path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobSource:
         """Deserializes the JobSource from a dictionary."""
@@ -2042,6 +2572,14 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobsHealthRule into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metric is not None: body['metric'] = self.metric
+        if self.op is not None: body['op'] = self.op
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobsHealthRule:
         """Deserializes the JobsHealthRule from a dictionary."""
@@ -2062,6 +2600,12 @@ def as_dict(self) -> dict:
         if self.rules: body['rules'] = [v.as_dict() for v in self.rules]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobsHealthRules into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.rules: body['rules'] = self.rules
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobsHealthRules:
         """Deserializes the JobsHealthRules from a dictionary."""
@@ -2089,6 +2633,14 @@ def as_dict(self) -> dict:
         if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListJobComplianceForPolicyResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.jobs: body['jobs'] = self.jobs
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListJobComplianceForPolicyResponse:
         """Deserializes the ListJobComplianceForPolicyResponse from a dictionary."""
@@ -2122,6 +2674,15 @@ def as_dict(self) -> dict:
         if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListJobsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.has_more is not None: body['has_more'] = self.has_more
+        if self.jobs: body['jobs'] = self.jobs
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListJobsResponse:
         """Deserializes the ListJobsResponse from a dictionary."""
@@ -2157,6 +2718,15 @@ def as_dict(self) -> dict:
         if self.runs: body['runs'] = [v.as_dict() for v in self.runs]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListRunsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.has_more is not None: body['has_more'] = self.has_more
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        if self.runs: body['runs'] = self.runs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListRunsResponse:
         """Deserializes the ListRunsResponse from a dictionary."""
@@ -2185,6 +2755,13 @@ def as_dict(self) -> dict:
         if self.truncated is not None: body['truncated'] = self.truncated
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NotebookOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.result is not None: body['result'] = self.result
+        if self.truncated is not None: body['truncated'] = self.truncated
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NotebookOutput:
         """Deserializes the NotebookOutput from a dictionary."""
@@ -2237,6 +2814,15 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NotebookTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.base_parameters: body['base_parameters'] = self.base_parameters
+        if self.notebook_path is not None: body['notebook_path'] = self.notebook_path
+        if self.source is not None: body['source'] = self.source
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NotebookTask:
         """Deserializes the NotebookTask from a dictionary."""
@@ -2267,6 +2853,13 @@ def as_dict(self) -> dict:
         if self.unit is not None: body['unit'] = self.unit.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PeriodicTriggerConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.interval is not None: body['interval'] = self.interval
+        if self.unit is not None: body['unit'] = self.unit
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PeriodicTriggerConfiguration:
         """Deserializes the PeriodicTriggerConfiguration from a dictionary."""
@@ -2292,6 +2885,12 @@ def as_dict(self) -> dict:
         if self.full_refresh is not None: body['full_refresh'] = self.full_refresh
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineParams into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.full_refresh is not None: body['full_refresh'] = self.full_refresh
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineParams:
         """Deserializes the PipelineParams from a dictionary."""
@@ -2313,6 +2912,13 @@ def as_dict(self) -> dict:
         if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.full_refresh is not None: body['full_refresh'] = self.full_refresh
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineTask:
         """Deserializes the PipelineTask from a dictionary."""
@@ -2345,6 +2951,15 @@ def as_dict(self) -> dict:
         if self.parameters: body['parameters'] = [v for v in self.parameters]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PythonWheelTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.entry_point is not None: body['entry_point'] = self.entry_point
+        if self.named_parameters: body['named_parameters'] = self.named_parameters
+        if self.package_name is not None: body['package_name'] = self.package_name
+        if self.parameters: body['parameters'] = self.parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PythonWheelTask:
         """Deserializes the PythonWheelTask from a dictionary."""
@@ -2374,6 +2989,13 @@ def as_dict(self) -> dict:
         if self.message is not None: body['message'] = self.message
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueueDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.code is not None: body['code'] = self.code
+        if self.message is not None: body['message'] = self.message
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueueDetails:
         """Deserializes the QueueDetails from a dictionary."""
@@ -2403,6 +3025,12 @@ def as_dict(self) -> dict:
         if self.enabled is not None: body['enabled'] = self.enabled
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueueSettings into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enabled is not None: body['enabled'] = self.enabled
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueueSettings:
         """Deserializes the QueueSettings from a dictionary."""
@@ -2444,6 +3072,18 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepairHistoryItem into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.end_time is not None: body['end_time'] = self.end_time
+        if self.id is not None: body['id'] = self.id
+        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.state: body['state'] = self.state
+        if self.status: body['status'] = self.status
+        if self.task_run_ids: body['task_run_ids'] = self.task_run_ids
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepairHistoryItem:
         """Deserializes the RepairHistoryItem from a dictionary."""
@@ -2580,6 +3220,26 @@ def as_dict(self) -> dict:
         if self.sql_params: body['sql_params'] = self.sql_params
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepairRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dbt_commands: body['dbt_commands'] = self.dbt_commands
+        if self.jar_params: body['jar_params'] = self.jar_params
+        if self.job_parameters: body['job_parameters'] = self.job_parameters
+        if self.latest_repair_id is not None: body['latest_repair_id'] = self.latest_repair_id
+        if self.notebook_params: body['notebook_params'] = self.notebook_params
+        if self.pipeline_params: body['pipeline_params'] = self.pipeline_params
+        if self.python_named_params: body['python_named_params'] = self.python_named_params
+        if self.python_params: body['python_params'] = self.python_params
+        if self.rerun_all_failed_tasks is not None:
+            body['rerun_all_failed_tasks'] = self.rerun_all_failed_tasks
+        if self.rerun_dependent_tasks is not None: body['rerun_dependent_tasks'] = self.rerun_dependent_tasks
+        if self.rerun_tasks: body['rerun_tasks'] = self.rerun_tasks
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params
+        if self.sql_params: body['sql_params'] = self.sql_params
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepairRun:
         """Deserializes the RepairRun from a dictionary."""
@@ -2613,6 +3273,12 @@ def as_dict(self) -> dict:
         if self.repair_id is not None: body['repair_id'] = self.repair_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepairRunResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.repair_id is not None: body['repair_id'] = self.repair_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepairRunResponse:
         """Deserializes the RepairRunResponse from a dictionary."""
@@ -2637,6 +3303,13 @@ def as_dict(self) -> dict:
         if self.new_settings: body['new_settings'] = self.new_settings.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResetJob into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.new_settings: body['new_settings'] = self.new_settings
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResetJob:
         """Deserializes the ResetJob from a dictionary."""
@@ -2651,6 +3324,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResetResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResetResponse:
         """Deserializes the ResetResponse from a dictionary."""
@@ -2670,6 +3348,13 @@ def as_dict(self) -> dict:
         if self.right is not None: body['right'] = self.right
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResolvedConditionTaskValues into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.left is not None: body['left'] = self.left
+        if self.right is not None: body['right'] = self.right
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedConditionTaskValues:
         """Deserializes the ResolvedConditionTaskValues from a dictionary."""
@@ -2686,6 +3371,12 @@ def as_dict(self) -> dict:
         if self.commands: body['commands'] = [v for v in self.commands]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResolvedDbtTaskValues into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.commands: body['commands'] = self.commands
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedDbtTaskValues:
         """Deserializes the ResolvedDbtTaskValues from a dictionary."""
@@ -2702,6 +3393,12 @@ def as_dict(self) -> dict:
         if self.base_parameters: body['base_parameters'] = self.base_parameters
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResolvedNotebookTaskValues into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.base_parameters: body['base_parameters'] = self.base_parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedNotebookTaskValues:
         """Deserializes the ResolvedNotebookTaskValues from a dictionary."""
@@ -2718,6 +3415,12 @@ def as_dict(self) -> dict:
         if self.parameters: body['parameters'] = self.parameters
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResolvedParamPairValues into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.parameters: body['parameters'] = self.parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedParamPairValues:
         """Deserializes the ResolvedParamPairValues from a dictionary."""
@@ -2737,6 +3440,13 @@ def as_dict(self) -> dict:
         if self.parameters: body['parameters'] = [v for v in self.parameters]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResolvedPythonWheelTaskValues into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.named_parameters: body['named_parameters'] = self.named_parameters
+        if self.parameters: body['parameters'] = self.parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedPythonWheelTaskValues:
         """Deserializes the ResolvedPythonWheelTaskValues from a dictionary."""
@@ -2756,6 +3466,13 @@ def as_dict(self) -> dict:
         if self.parameters: body['parameters'] = self.parameters
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResolvedRunJobTaskValues into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.job_parameters: body['job_parameters'] = self.job_parameters
+        if self.parameters: body['parameters'] = self.parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedRunJobTaskValues:
         """Deserializes the ResolvedRunJobTaskValues from a dictionary."""
@@ -2772,6 +3489,12 @@ def as_dict(self) -> dict:
         if self.parameters: body['parameters'] = [v for v in self.parameters]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResolvedStringParamsValues into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.parameters: body['parameters'] = self.parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedStringParamsValues:
         """Deserializes the ResolvedStringParamsValues from a dictionary."""
@@ -2815,6 +3538,21 @@ def as_dict(self) -> dict:
         if self.sql_task: body['sql_task'] = self.sql_task.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResolvedValues into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.condition_task: body['condition_task'] = self.condition_task
+        if self.dbt_task: body['dbt_task'] = self.dbt_task
+        if self.notebook_task: body['notebook_task'] = self.notebook_task
+        if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task
+        if self.run_job_task: body['run_job_task'] = self.run_job_task
+        if self.simulation_task: body['simulation_task'] = self.simulation_task
+        if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task
+        if self.spark_python_task: body['spark_python_task'] = self.spark_python_task
+        if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task
+        if self.sql_task: body['sql_task'] = self.sql_task
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedValues:
         """Deserializes the ResolvedValues from a dictionary."""
@@ -3017,6 +3755,45 @@ def as_dict(self) -> dict:
         if self.trigger_info: body['trigger_info'] = self.trigger_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Run into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.attempt_number is not None: body['attempt_number'] = self.attempt_number
+        if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration
+        if self.cluster_instance: body['cluster_instance'] = self.cluster_instance
+        if self.cluster_spec: body['cluster_spec'] = self.cluster_spec
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.description is not None: body['description'] = self.description
+        if self.end_time is not None: body['end_time'] = self.end_time
+        if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
+        if self.git_source: body['git_source'] = self.git_source
+        if self.iterations: body['iterations'] = self.iterations
+        if self.job_clusters: body['job_clusters'] = self.job_clusters
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.job_parameters: body['job_parameters'] = self.job_parameters
+        if self.job_run_id is not None: body['job_run_id'] = self.job_run_id
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.number_in_job is not None: body['number_in_job'] = self.number_in_job
+        if self.original_attempt_run_id is not None:
+            body['original_attempt_run_id'] = self.original_attempt_run_id
+        if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters
+        if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
+        if self.repair_history: body['repair_history'] = self.repair_history
+        if self.run_duration is not None: body['run_duration'] = self.run_duration
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_name is not None: body['run_name'] = self.run_name
+        if self.run_page_url is not None: body['run_page_url'] = self.run_page_url
+        if self.run_type is not None: body['run_type'] = self.run_type
+        if self.schedule: body['schedule'] = self.schedule
+        if self.setup_duration is not None: body['setup_duration'] = self.setup_duration
+        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.state: body['state'] = self.state
+        if self.status: body['status'] = self.status
+        if self.tasks: body['tasks'] = self.tasks
+        if self.trigger is not None: body['trigger'] = self.trigger
+        if self.trigger_info: body['trigger_info'] = self.trigger_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Run:
         """Deserializes the Run from a dictionary."""
@@ -3089,6 +3866,15 @@ def as_dict(self) -> dict:
         if self.right is not None: body['right'] = self.right
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunConditionTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.left is not None: body['left'] = self.left
+        if self.op is not None: body['op'] = self.op
+        if self.outcome is not None: body['outcome'] = self.outcome
+        if self.right is not None: body['right'] = self.right
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunConditionTask:
         """Deserializes the RunConditionTask from a dictionary."""
@@ -3123,6 +3909,15 @@ def as_dict(self) -> dict:
         if self.task: body['task'] = self.task.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunForEachTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.concurrency is not None: body['concurrency'] = self.concurrency
+        if self.inputs is not None: body['inputs'] = self.inputs
+        if self.stats: body['stats'] = self.stats
+        if self.task: body['task'] = self.task
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunForEachTask:
         """Deserializes the RunForEachTask from a dictionary."""
@@ -3161,6 +3956,12 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunJobOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunJobOutput:
         """Deserializes the RunJobOutput from a dictionary."""
@@ -3264,6 +4065,21 @@ def as_dict(self) -> dict:
         if self.sql_params: body['sql_params'] = self.sql_params
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunJobTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dbt_commands: body['dbt_commands'] = self.dbt_commands
+        if self.jar_params: body['jar_params'] = self.jar_params
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.job_parameters: body['job_parameters'] = self.job_parameters
+        if self.notebook_params: body['notebook_params'] = self.notebook_params
+        if self.pipeline_params: body['pipeline_params'] = self.pipeline_params
+        if self.python_named_params: body['python_named_params'] = self.python_named_params
+        if self.python_params: body['python_params'] = self.python_params
+        if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params
+        if self.sql_params: body['sql_params'] = self.sql_params
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunJobTask:
         """Deserializes the RunJobTask from a dictionary."""
@@ -3435,6 +4251,24 @@ def as_dict(self) -> dict:
         if self.sql_params: body['sql_params'] = self.sql_params
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunNow into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dbt_commands: body['dbt_commands'] = self.dbt_commands
+        if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token
+        if self.jar_params: body['jar_params'] = self.jar_params
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.job_parameters: body['job_parameters'] = self.job_parameters
+        if self.notebook_params: body['notebook_params'] = self.notebook_params
+        if self.only: body['only'] = self.only
+        if self.pipeline_params: body['pipeline_params'] = self.pipeline_params
+        if self.python_named_params: body['python_named_params'] = self.python_named_params
+        if self.python_params: body['python_params'] = self.python_params
+        if self.queue: body['queue'] = self.queue
+        if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params
+        if self.sql_params: body['sql_params'] = self.sql_params
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunNow:
         """Deserializes the RunNow from a dictionary."""
@@ -3470,6 +4304,13 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunNowResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.number_in_job is not None: body['number_in_job'] = self.number_in_job
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunNowResponse:
         """Deserializes the RunNowResponse from a dictionary."""
@@ -3536,6 +4377,21 @@ def as_dict(self) -> dict:
         if self.sql_output: body['sql_output'] = self.sql_output.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dbt_output: body['dbt_output'] = self.dbt_output
+        if self.error is not None: body['error'] = self.error
+        if self.error_trace is not None: body['error_trace'] = self.error_trace
+        if self.info is not None: body['info'] = self.info
+        if self.logs is not None: body['logs'] = self.logs
+        if self.logs_truncated is not None: body['logs_truncated'] = self.logs_truncated
+        if self.metadata: body['metadata'] = self.metadata
+        if self.notebook_output: body['notebook_output'] = self.notebook_output
+        if self.run_job_output: body['run_job_output'] = self.run_job_output
+        if self.sql_output: body['sql_output'] = self.sql_output
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunOutput:
         """Deserializes the RunOutput from a dictionary."""
@@ -3640,6 +4496,19 @@ def as_dict(self) -> dict:
         if self.sql_params: body['sql_params'] = self.sql_params
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunParameters into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dbt_commands: body['dbt_commands'] = self.dbt_commands
+        if self.jar_params: body['jar_params'] = self.jar_params
+        if self.notebook_params: body['notebook_params'] = self.notebook_params
+        if self.pipeline_params: body['pipeline_params'] = self.pipeline_params
+        if self.python_named_params: body['python_named_params'] = self.python_named_params
+        if self.python_params: body['python_params'] = self.python_params
+        if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params
+        if self.sql_params: body['sql_params'] = self.sql_params
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunParameters:
         """Deserializes the RunParameters from a dictionary."""
@@ -3709,6 +4578,17 @@ def as_dict(self) -> dict:
             body['user_cancelled_or_timedout'] = self.user_cancelled_or_timedout
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunState into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state
+        if self.queue_reason is not None: body['queue_reason'] = self.queue_reason
+        if self.result_state is not None: body['result_state'] = self.result_state
+        if self.state_message is not None: body['state_message'] = self.state_message
+        if self.user_cancelled_or_timedout is not None:
+            body['user_cancelled_or_timedout'] = self.user_cancelled_or_timedout
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunState:
         """Deserializes the RunState from a dictionary."""
@@ -3741,6 +4621,14 @@ def as_dict(self) -> dict:
         if self.termination_details: body['termination_details'] = self.termination_details.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.queue_details: body['queue_details'] = self.queue_details
+        if self.state is not None: body['state'] = self.state
+        if self.termination_details: body['termination_details'] = self.termination_details
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunStatus:
         """Deserializes the RunStatus from a dictionary."""
@@ -3972,6 +4860,50 @@ def as_dict(self) -> dict:
         if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.attempt_number is not None: body['attempt_number'] = self.attempt_number
+        if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration
+        if self.cluster_instance: body['cluster_instance'] = self.cluster_instance
+        if self.condition_task: body['condition_task'] = self.condition_task
+        if self.dbt_task: body['dbt_task'] = self.dbt_task
+        if self.depends_on: body['depends_on'] = self.depends_on
+        if self.description is not None: body['description'] = self.description
+        if self.email_notifications: body['email_notifications'] = self.email_notifications
+        if self.end_time is not None: body['end_time'] = self.end_time
+        if self.environment_key is not None: body['environment_key'] = self.environment_key
+        if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
+        if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
+        if self.for_each_task: body['for_each_task'] = self.for_each_task
+        if self.git_source: body['git_source'] = self.git_source
+        if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key
+        if self.libraries: body['libraries'] = self.libraries
+        if self.new_cluster: body['new_cluster'] = self.new_cluster
+        if self.notebook_task: body['notebook_task'] = self.notebook_task
+        if self.notification_settings: body['notification_settings'] = self.notification_settings
+        if self.pipeline_task: body['pipeline_task'] = self.pipeline_task
+        if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task
+        if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
+        if self.resolved_values: body['resolved_values'] = self.resolved_values
+        if self.run_duration is not None: body['run_duration'] = self.run_duration
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_if is not None: body['run_if'] = self.run_if
+        if self.run_job_task: body['run_job_task'] = self.run_job_task
+        if self.run_page_url is not None: body['run_page_url'] = self.run_page_url
+        if self.setup_duration is not None: body['setup_duration'] = self.setup_duration
+        if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task
+        if self.spark_python_task: body['spark_python_task'] = self.spark_python_task
+        if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task
+        if self.sql_task: body['sql_task'] = self.sql_task
+        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.state: body['state'] = self.state
+        if self.status: body['status'] = self.status
+        if self.task_key is not None: body['task_key'] = self.task_key
+        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
+        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunTask:
         """Deserializes the RunTask from a dictionary."""
@@ -4069,6 +5001,14 @@ def as_dict(self) -> dict:
         if self.parameters: body['parameters'] = [v for v in self.parameters]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SparkJarTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.jar_uri is not None: body['jar_uri'] = self.jar_uri
+        if self.main_class_name is not None: body['main_class_name'] = self.main_class_name
+        if self.parameters: body['parameters'] = self.parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparkJarTask:
         """Deserializes the SparkJarTask from a dictionary."""
@@ -4109,6 +5049,14 @@ def as_dict(self) -> dict:
         if self.source is not None: body['source'] = self.source.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SparkPythonTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.parameters: body['parameters'] = self.parameters
+        if self.python_file is not None: body['python_file'] = self.python_file
+        if self.source is not None: body['source'] = self.source
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparkPythonTask:
         """Deserializes the SparkPythonTask from a dictionary."""
@@ -4132,6 +5080,12 @@ def as_dict(self) -> dict:
         if self.parameters: body['parameters'] = [v for v in self.parameters]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SparkSubmitTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.parameters: body['parameters'] = self.parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparkSubmitTask:
         """Deserializes the SparkSubmitTask from a dictionary."""
@@ -4169,6 +5123,16 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlAlertOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alert_state is not None: body['alert_state'] = self.alert_state
+        if self.output_link is not None: body['output_link'] = self.output_link
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.sql_statements: body['sql_statements'] = self.sql_statements
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlAlertOutput:
         """Deserializes the SqlAlertOutput from a dictionary."""
@@ -4205,6 +5169,13 @@ def as_dict(self) -> dict:
         if self.widgets: body['widgets'] = [v.as_dict() for v in self.widgets]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlDashboardOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.widgets: body['widgets'] = self.widgets
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlDashboardOutput:
         """Deserializes the SqlDashboardOutput from a dictionary."""
@@ -4247,6 +5218,18 @@ def as_dict(self) -> dict:
         if self.widget_title is not None: body['widget_title'] = self.widget_title
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlDashboardWidgetOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.end_time is not None: body['end_time'] = self.end_time
+        if self.error: body['error'] = self.error
+        if self.output_link is not None: body['output_link'] = self.output_link
+        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.status is not None: body['status'] = self.status
+        if self.widget_id is not None: body['widget_id'] = self.widget_id
+        if self.widget_title is not None: body['widget_title'] = self.widget_title
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlDashboardWidgetOutput:
         """Deserializes the SqlDashboardWidgetOutput from a dictionary."""
@@ -4287,6 +5270,14 @@ def as_dict(self) -> dict:
         if self.query_output: body['query_output'] = self.query_output.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alert_output: body['alert_output'] = self.alert_output
+        if self.dashboard_output: body['dashboard_output'] = self.dashboard_output
+        if self.query_output: body['query_output'] = self.query_output
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlOutput:
         """Deserializes the SqlOutput from a dictionary."""
@@ -4306,6 +5297,12 @@ def as_dict(self) -> dict:
         if self.message is not None: body['message'] = self.message
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlOutputError into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlOutputError:
         """Deserializes the SqlOutputError from a dictionary."""
@@ -4338,6 +5335,16 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlQueryOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id
+        if self.output_link is not None: body['output_link'] = self.output_link
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.sql_statements: body['sql_statements'] = self.sql_statements
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlQueryOutput:
         """Deserializes the SqlQueryOutput from a dictionary."""
@@ -4359,6 +5366,12 @@ def as_dict(self) -> dict:
         if self.lookup_key is not None: body['lookup_key'] = self.lookup_key
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlStatementOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.lookup_key is not None: body['lookup_key'] = self.lookup_key
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlStatementOutput:
         """Deserializes the SqlStatementOutput from a dictionary."""
@@ -4399,6 +5412,17 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alert: body['alert'] = self.alert
+        if self.dashboard: body['dashboard'] = self.dashboard
+        if self.file: body['file'] = self.file
+        if self.parameters: body['parameters'] = self.parameters
+        if self.query: body['query'] = self.query
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlTask:
         """Deserializes the SqlTask from a dictionary."""
@@ -4429,6 +5453,14 @@ def as_dict(self) -> dict:
         if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlTaskAlert into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alert_id is not None: body['alert_id'] = self.alert_id
+        if self.pause_subscriptions is not None: body['pause_subscriptions'] = self.pause_subscriptions
+        if self.subscriptions: body['subscriptions'] = self.subscriptions
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlTaskAlert:
         """Deserializes the SqlTaskAlert from a dictionary."""
@@ -4460,6 +5492,15 @@ def as_dict(self) -> dict:
         if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlTaskDashboard into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.pause_subscriptions is not None: body['pause_subscriptions'] = self.pause_subscriptions
+        if self.subscriptions: body['subscriptions'] = self.subscriptions
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlTaskDashboard:
         """Deserializes the SqlTaskDashboard from a dictionary."""
@@ -4491,6 +5532,13 @@ def as_dict(self) -> dict:
         if self.source is not None: body['source'] = self.source.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlTaskFile into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.path is not None: body['path'] = self.path
+        if self.source is not None: body['source'] = self.source
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlTaskFile:
         """Deserializes the SqlTaskFile from a dictionary."""
@@ -4508,6 +5556,12 @@ def as_dict(self) -> dict:
         if self.query_id is not None: body['query_id'] = self.query_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlTaskQuery into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.query_id is not None: body['query_id'] = self.query_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlTaskQuery:
         """Deserializes the SqlTaskQuery from a dictionary."""
@@ -4532,6 +5586,13 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlTaskSubscription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination_id is not None: body['destination_id'] = self.destination_id
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlTaskSubscription:
         """Deserializes the SqlTaskSubscription from a dictionary."""
@@ -4622,6 +5683,25 @@ def as_dict(self) -> dict:
         if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SubmitRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
+        if self.email_notifications: body['email_notifications'] = self.email_notifications
+        if self.environments: body['environments'] = self.environments
+        if self.git_source: body['git_source'] = self.git_source
+        if self.health: body['health'] = self.health
+        if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token
+        if self.notification_settings: body['notification_settings'] = self.notification_settings
+        if self.queue: body['queue'] = self.queue
+        if self.run_as: body['run_as'] = self.run_as
+        if self.run_name is not None: body['run_name'] = self.run_name
+        if self.tasks: body['tasks'] = self.tasks
+        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
+        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SubmitRun:
         """Deserializes the SubmitRun from a dictionary."""
@@ -4654,6 +5734,12 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SubmitRunResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SubmitRunResponse:
         """Deserializes the SubmitRunResponse from a dictionary."""
@@ -4797,6 +5883,35 @@ def as_dict(self) -> dict:
         if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SubmitTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.condition_task: body['condition_task'] = self.condition_task
+        if self.dbt_task: body['dbt_task'] = self.dbt_task
+        if self.depends_on: body['depends_on'] = self.depends_on
+        if self.description is not None: body['description'] = self.description
+        if self.email_notifications: body['email_notifications'] = self.email_notifications
+        if self.environment_key is not None: body['environment_key'] = self.environment_key
+        if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
+        if self.for_each_task: body['for_each_task'] = self.for_each_task
+        if self.health: body['health'] = self.health
+        if self.libraries: body['libraries'] = self.libraries
+        if self.new_cluster: body['new_cluster'] = self.new_cluster
+        if self.notebook_task: body['notebook_task'] = self.notebook_task
+        if self.notification_settings: body['notification_settings'] = self.notification_settings
+        if self.pipeline_task: body['pipeline_task'] = self.pipeline_task
+        if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task
+        if self.run_if is not None: body['run_if'] = self.run_if
+        if self.run_job_task: body['run_job_task'] = self.run_job_task
+        if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task
+        if self.spark_python_task: body['spark_python_task'] = self.spark_python_task
+        if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task
+        if self.sql_task: body['sql_task'] = self.sql_task
+        if self.task_key is not None: body['task_key'] = self.task_key
+        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
+        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SubmitTask:
         """Deserializes the SubmitTask from a dictionary."""
@@ -4855,6 +5970,17 @@ def as_dict(self) -> dict:
             body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TableUpdateTriggerConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.condition is not None: body['condition'] = self.condition
+        if self.min_time_between_triggers_seconds is not None:
+            body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds
+        if self.table_names: body['table_names'] = self.table_names
+        if self.wait_after_last_change_seconds is not None:
+            body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableUpdateTriggerConfiguration:
         """Deserializes the TableUpdateTriggerConfiguration from a dictionary."""
@@ -5033,6 +6159,42 @@ def as_dict(self) -> dict:
         if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Task into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.condition_task: body['condition_task'] = self.condition_task
+        if self.dbt_task: body['dbt_task'] = self.dbt_task
+        if self.depends_on: body['depends_on'] = self.depends_on
+        if self.description is not None: body['description'] = self.description
+        if self.disable_auto_optimization is not None:
+            body['disable_auto_optimization'] = self.disable_auto_optimization
+        if self.email_notifications: body['email_notifications'] = self.email_notifications
+        if self.environment_key is not None: body['environment_key'] = self.environment_key
+        if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
+        if self.for_each_task: body['for_each_task'] = self.for_each_task
+        if self.health: body['health'] = self.health
+        if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key
+        if self.libraries: body['libraries'] = self.libraries
+        if self.max_retries is not None: body['max_retries'] = self.max_retries
+        if self.min_retry_interval_millis is not None:
+            body['min_retry_interval_millis'] = self.min_retry_interval_millis
+        if self.new_cluster: body['new_cluster'] = self.new_cluster
+        if self.notebook_task: body['notebook_task'] = self.notebook_task
+        if self.notification_settings: body['notification_settings'] = self.notification_settings
+        if self.pipeline_task: body['pipeline_task'] = self.pipeline_task
+        if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task
+        if self.retry_on_timeout is not None: body['retry_on_timeout'] = self.retry_on_timeout
+        if self.run_if is not None: body['run_if'] = self.run_if
+        if self.run_job_task: body['run_job_task'] = self.run_job_task
+        if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task
+        if self.spark_python_task: body['spark_python_task'] = self.spark_python_task
+        if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task
+        if self.sql_task: body['sql_task'] = self.sql_task
+        if self.task_key is not None: body['task_key'] = self.task_key
+        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
+        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Task:
         """Deserializes the Task from a dictionary."""
@@ -5083,6 +6245,13 @@ def as_dict(self) -> dict:
         if self.task_key is not None: body['task_key'] = self.task_key
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TaskDependency into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.outcome is not None: body['outcome'] = self.outcome
+        if self.task_key is not None: body['task_key'] = self.task_key
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TaskDependency:
         """Deserializes the TaskDependency from a dictionary."""
@@ -5140,6 +6309,20 @@ def as_dict(self) -> dict:
         if self.on_success: body['on_success'] = [v for v in self.on_success]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TaskEmailNotifications into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.no_alert_for_skipped_runs is not None:
+            body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
+        if self.on_duration_warning_threshold_exceeded:
+            body['on_duration_warning_threshold_exceeded'] = self.on_duration_warning_threshold_exceeded
+        if self.on_failure: body['on_failure'] = self.on_failure
+        if self.on_start: body['on_start'] = self.on_start
+        if self.on_streaming_backlog_exceeded:
+            body['on_streaming_backlog_exceeded'] = self.on_streaming_backlog_exceeded
+        if self.on_success: body['on_success'] = self.on_success
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TaskEmailNotifications:
         """Deserializes the TaskEmailNotifications from a dictionary."""
@@ -5177,6 +6360,16 @@ def as_dict(self) -> dict:
             body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TaskNotificationSettings into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alert_on_last_attempt is not None: body['alert_on_last_attempt'] = self.alert_on_last_attempt
+        if self.no_alert_for_canceled_runs is not None:
+            body['no_alert_for_canceled_runs'] = self.no_alert_for_canceled_runs
+        if self.no_alert_for_skipped_runs is not None:
+            body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TaskNotificationSettings:
         """Deserializes the TaskNotificationSettings from a dictionary."""
@@ -5306,6 +6499,14 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TerminationDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.code is not None: body['code'] = self.code
+        if self.message is not None: body['message'] = self.message
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TerminationDetails:
         """Deserializes the TerminationDetails from a dictionary."""
@@ -5342,6 +6543,12 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TriggerInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TriggerInfo:
         """Deserializes the TriggerInfo from a dictionary."""
@@ -5374,6 +6581,16 @@ def as_dict(self) -> dict:
         if self.table_update: body['table_update'] = self.table_update.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TriggerSettings into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.file_arrival: body['file_arrival'] = self.file_arrival
+        if self.pause_status is not None: body['pause_status'] = self.pause_status
+        if self.periodic: body['periodic'] = self.periodic
+        if self.table: body['table'] = self.table
+        if self.table_update: body['table_update'] = self.table_update
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TriggerSettings:
         """Deserializes the TriggerSettings from a dictionary."""
@@ -5432,6 +6649,14 @@ def as_dict(self) -> dict:
         if self.new_settings: body['new_settings'] = self.new_settings.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateJob into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.fields_to_remove: body['fields_to_remove'] = self.fields_to_remove
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.new_settings: body['new_settings'] = self.new_settings
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateJob:
         """Deserializes the UpdateJob from a dictionary."""
@@ -5448,6 +6673,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
         """Deserializes the UpdateResponse from a dictionary."""
@@ -5474,6 +6704,14 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ViewItem into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.content is not None: body['content'] = self.content
+        if self.name is not None: body['name'] = self.name
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ViewItem:
         """Deserializes the ViewItem from a dictionary."""
@@ -5506,6 +6744,12 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Webhook into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Webhook:
         """Deserializes the Webhook from a dictionary."""
@@ -5553,6 +6797,18 @@ def as_dict(self) -> dict:
         if self.on_success: body['on_success'] = [v.as_dict() for v in self.on_success]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WebhookNotifications into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.on_duration_warning_threshold_exceeded:
+            body['on_duration_warning_threshold_exceeded'] = self.on_duration_warning_threshold_exceeded
+        if self.on_failure: body['on_failure'] = self.on_failure
+        if self.on_start: body['on_start'] = self.on_start
+        if self.on_streaming_backlog_exceeded:
+            body['on_streaming_backlog_exceeded'] = self.on_streaming_backlog_exceeded
+        if self.on_success: body['on_success'] = self.on_success
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WebhookNotifications:
         """Deserializes the WebhookNotifications from a dictionary."""
@@ -5754,8 +7010,8 @@ def create(self,
         :param queue: :class:`QueueSettings` (optional)
           The queue settings of the job.
         :param run_as: :class:`JobRunAs` (optional)
-          Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If
-          not specified, the job/pipeline runs as the user who created the job/pipeline.
+          Write-only setting. Specifies the user or service principal that the job runs as. If not specified,
+          the job runs as the user who created the job.
           
           Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.
         :param schedule: :class:`CronSchedule` (optional)
diff --git a/databricks/sdk/service/marketplace.py b/databricks/sdk/service/marketplace.py
index 242e3bf0c..239cd2eaf 100755
--- a/databricks/sdk/service/marketplace.py
+++ b/databricks/sdk/service/marketplace.py
@@ -27,6 +27,13 @@ def as_dict(self) -> dict:
         if self.listing_id is not None: body['listing_id'] = self.listing_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AddExchangeForListingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchange_id is not None: body['exchange_id'] = self.exchange_id
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AddExchangeForListingRequest:
         """Deserializes the AddExchangeForListingRequest from a dictionary."""
@@ -43,6 +50,12 @@ def as_dict(self) -> dict:
         if self.exchange_for_listing: body['exchange_for_listing'] = self.exchange_for_listing.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AddExchangeForListingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchange_for_listing: body['exchange_for_listing'] = self.exchange_for_listing
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AddExchangeForListingResponse:
         """Deserializes the AddExchangeForListingResponse from a dictionary."""
@@ -69,6 +82,12 @@ def as_dict(self) -> dict:
         if self.listings: body['listings'] = [v.as_dict() for v in self.listings]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BatchGetListingsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.listings: body['listings'] = self.listings
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BatchGetListingsResponse:
         """Deserializes the BatchGetListingsResponse from a dictionary."""
@@ -85,6 +104,12 @@ def as_dict(self) -> dict:
         if self.providers: body['providers'] = [v.as_dict() for v in self.providers]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BatchGetProvidersResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.providers: body['providers'] = self.providers
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BatchGetProvidersResponse:
         """Deserializes the BatchGetProvidersResponse from a dictionary."""
@@ -127,6 +152,12 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ConsumerTerms into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ConsumerTerms:
         """Deserializes the ConsumerTerms from a dictionary."""
@@ -154,6 +185,15 @@ def as_dict(self) -> dict:
         if self.last_name is not None: body['last_name'] = self.last_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ContactInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.company is not None: body['company'] = self.company
+        if self.email is not None: body['email'] = self.email
+        if self.first_name is not None: body['first_name'] = self.first_name
+        if self.last_name is not None: body['last_name'] = self.last_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ContactInfo:
         """Deserializes the ContactInfo from a dictionary."""
@@ -179,6 +219,12 @@ def as_dict(self) -> dict:
         if self.filter: body['filter'] = self.filter.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateExchangeFilterRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.filter: body['filter'] = self.filter
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExchangeFilterRequest:
         """Deserializes the CreateExchangeFilterRequest from a dictionary."""
@@ -195,6 +241,12 @@ def as_dict(self) -> dict:
         if self.filter_id is not None: body['filter_id'] = self.filter_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateExchangeFilterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.filter_id is not None: body['filter_id'] = self.filter_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExchangeFilterResponse:
         """Deserializes the CreateExchangeFilterResponse from a dictionary."""
@@ -211,6 +263,12 @@ def as_dict(self) -> dict:
         if self.exchange: body['exchange'] = self.exchange.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateExchangeRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchange: body['exchange'] = self.exchange
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExchangeRequest:
         """Deserializes the CreateExchangeRequest from a dictionary."""
@@ -227,6 +285,12 @@ def as_dict(self) -> dict:
         if self.exchange_id is not None: body['exchange_id'] = self.exchange_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateExchangeResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchange_id is not None: body['exchange_id'] = self.exchange_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExchangeResponse:
         """Deserializes the CreateExchangeResponse from a dictionary."""
@@ -253,6 +317,15 @@ def as_dict(self) -> dict:
         if self.mime_type is not None: body['mime_type'] = self.mime_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateFileRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.file_parent: body['file_parent'] = self.file_parent
+        if self.marketplace_file_type is not None: body['marketplace_file_type'] = self.marketplace_file_type
+        if self.mime_type is not None: body['mime_type'] = self.mime_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateFileRequest:
         """Deserializes the CreateFileRequest from a dictionary."""
@@ -276,6 +349,13 @@ def as_dict(self) -> dict:
         if self.upload_url is not None: body['upload_url'] = self.upload_url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateFileResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.file_info: body['file_info'] = self.file_info
+        if self.upload_url is not None: body['upload_url'] = self.upload_url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateFileResponse:
         """Deserializes the CreateFileResponse from a dictionary."""
@@ -309,6 +389,17 @@ def as_dict(self) -> dict:
         if self.share_name is not None: body['share_name'] = self.share_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateInstallationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.accepted_consumer_terms: body['accepted_consumer_terms'] = self.accepted_consumer_terms
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type
+        if self.repo_detail: body['repo_detail'] = self.repo_detail
+        if self.share_name is not None: body['share_name'] = self.share_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateInstallationRequest:
         """Deserializes the CreateInstallationRequest from a dictionary."""
@@ -330,6 +421,12 @@ def as_dict(self) -> dict:
         if self.listing: body['listing'] = self.listing.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateListingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.listing: body['listing'] = self.listing
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateListingRequest:
         """Deserializes the CreateListingRequest from a dictionary."""
@@ -346,6 +443,12 @@ def as_dict(self) -> dict:
         if self.listing_id is not None: body['listing_id'] = self.listing_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateListingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateListingResponse:
         """Deserializes the CreateListingResponse from a dictionary."""
@@ -389,6 +492,20 @@ def as_dict(self) -> dict:
         if self.recipient_type is not None: body['recipient_type'] = self.recipient_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreatePersonalizationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.accepted_consumer_terms: body['accepted_consumer_terms'] = self.accepted_consumer_terms
+        if self.comment is not None: body['comment'] = self.comment
+        if self.company is not None: body['company'] = self.company
+        if self.first_name is not None: body['first_name'] = self.first_name
+        if self.intended_use is not None: body['intended_use'] = self.intended_use
+        if self.is_from_lighthouse is not None: body['is_from_lighthouse'] = self.is_from_lighthouse
+        if self.last_name is not None: body['last_name'] = self.last_name
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePersonalizationRequest:
         """Deserializes the CreatePersonalizationRequest from a dictionary."""
@@ -413,6 +530,12 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreatePersonalizationRequestResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePersonalizationRequestResponse:
         """Deserializes the CreatePersonalizationRequestResponse from a dictionary."""
@@ -429,6 +552,12 @@ def as_dict(self) -> dict:
         if self.provider: body['provider'] = self.provider.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateProviderRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.provider: body['provider'] = self.provider
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateProviderRequest:
         """Deserializes the CreateProviderRequest from a dictionary."""
@@ -445,6 +574,12 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateProviderResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateProviderResponse:
         """Deserializes the CreateProviderResponse from a dictionary."""
@@ -477,6 +612,13 @@ def as_dict(self) -> dict:
         if self.unit is not None: body['unit'] = self.unit.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DataRefreshInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.interval is not None: body['interval'] = self.interval
+        if self.unit is not None: body['unit'] = self.unit
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DataRefreshInfo:
         """Deserializes the DataRefreshInfo from a dictionary."""
@@ -491,6 +633,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteExchangeFilterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteExchangeFilterResponse:
         """Deserializes the DeleteExchangeFilterResponse from a dictionary."""
@@ -505,6 +652,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteExchangeResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteExchangeResponse:
         """Deserializes the DeleteExchangeResponse from a dictionary."""
@@ -519,6 +671,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteFileResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteFileResponse:
         """Deserializes the DeleteFileResponse from a dictionary."""
@@ -533,6 +690,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteInstallationResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteInstallationResponse:
         """Deserializes the DeleteInstallationResponse from a dictionary."""
@@ -547,6 +709,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteListingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteListingResponse:
         """Deserializes the DeleteListingResponse from a dictionary."""
@@ -561,6 +728,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteProviderResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteProviderResponse:
         """Deserializes the DeleteProviderResponse from a dictionary."""
@@ -607,6 +779,20 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Exchange into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.filters: body['filters'] = self.filters
+        if self.id is not None: body['id'] = self.id
+        if self.linked_listings: body['linked_listings'] = self.linked_listings
+        if self.name is not None: body['name'] = self.name
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Exchange:
         """Deserializes the Exchange from a dictionary."""
@@ -655,6 +841,20 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExchangeFilter into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.exchange_id is not None: body['exchange_id'] = self.exchange_id
+        if self.filter_type is not None: body['filter_type'] = self.filter_type
+        if self.filter_value is not None: body['filter_value'] = self.filter_value
+        if self.id is not None: body['id'] = self.id
+        if self.name is not None: body['name'] = self.name
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExchangeFilter:
         """Deserializes the ExchangeFilter from a dictionary."""
@@ -702,6 +902,18 @@ def as_dict(self) -> dict:
         if self.listing_name is not None: body['listing_name'] = self.listing_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExchangeListing into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.exchange_id is not None: body['exchange_id'] = self.exchange_id
+        if self.exchange_name is not None: body['exchange_name'] = self.exchange_name
+        if self.id is not None: body['id'] = self.id
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.listing_name is not None: body['listing_name'] = self.listing_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExchangeListing:
         """Deserializes the ExchangeListing from a dictionary."""
@@ -754,6 +966,21 @@ def as_dict(self) -> dict:
         if self.updated_at is not None: body['updated_at'] = self.updated_at
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FileInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.download_link is not None: body['download_link'] = self.download_link
+        if self.file_parent: body['file_parent'] = self.file_parent
+        if self.id is not None: body['id'] = self.id
+        if self.marketplace_file_type is not None: body['marketplace_file_type'] = self.marketplace_file_type
+        if self.mime_type is not None: body['mime_type'] = self.mime_type
+        if self.status is not None: body['status'] = self.status
+        if self.status_message is not None: body['status_message'] = self.status_message
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FileInfo:
         """Deserializes the FileInfo from a dictionary."""
@@ -783,6 +1010,13 @@ def as_dict(self) -> dict:
         if self.parent_id is not None: body['parent_id'] = self.parent_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FileParent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.file_parent_type is not None: body['file_parent_type'] = self.file_parent_type
+        if self.parent_id is not None: body['parent_id'] = self.parent_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FileParent:
         """Deserializes the FileParent from a dictionary."""
@@ -820,6 +1054,12 @@ def as_dict(self) -> dict:
         if self.exchange: body['exchange'] = self.exchange.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetExchangeResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchange: body['exchange'] = self.exchange
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetExchangeResponse:
         """Deserializes the GetExchangeResponse from a dictionary."""
@@ -836,6 +1076,12 @@ def as_dict(self) -> dict:
         if self.file_info: body['file_info'] = self.file_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetFileResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.file_info: body['file_info'] = self.file_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetFileResponse:
         """Deserializes the GetFileResponse from a dictionary."""
@@ -853,6 +1099,12 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetLatestVersionProviderAnalyticsDashboardResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetLatestVersionProviderAnalyticsDashboardResponse:
         """Deserializes the GetLatestVersionProviderAnalyticsDashboardResponse from a dictionary."""
@@ -873,6 +1125,13 @@ def as_dict(self) -> dict:
             body['shared_data_objects'] = [v.as_dict() for v in self.shared_data_objects]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetListingContentMetadataResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.shared_data_objects: body['shared_data_objects'] = self.shared_data_objects
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetListingContentMetadataResponse:
         """Deserializes the GetListingContentMetadataResponse from a dictionary."""
@@ -890,6 +1149,12 @@ def as_dict(self) -> dict:
         if self.listing: body['listing'] = self.listing.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetListingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.listing: body['listing'] = self.listing
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetListingResponse:
         """Deserializes the GetListingResponse from a dictionary."""
@@ -909,6 +1174,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetListingsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.listings: body['listings'] = self.listings
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetListingsResponse:
         """Deserializes the GetListingsResponse from a dictionary."""
@@ -927,6 +1199,12 @@ def as_dict(self) -> dict:
             body['personalization_requests'] = [v.as_dict() for v in self.personalization_requests]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPersonalizationRequestResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.personalization_requests: body['personalization_requests'] = self.personalization_requests
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPersonalizationRequestResponse:
         """Deserializes the GetPersonalizationRequestResponse from a dictionary."""
@@ -944,6 +1222,12 @@ def as_dict(self) -> dict:
         if self.provider: body['provider'] = self.provider.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetProviderResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.provider: body['provider'] = self.provider
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetProviderResponse:
         """Deserializes the GetProviderResponse from a dictionary."""
@@ -960,6 +1244,12 @@ def as_dict(self) -> dict:
         if self.installation: body['installation'] = self.installation.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Installation into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.installation: body['installation'] = self.installation
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Installation:
         """Deserializes the Installation from a dictionary."""
@@ -1012,6 +1302,24 @@ def as_dict(self) -> dict:
         if self.tokens: body['tokens'] = [v.as_dict() for v in self.tokens]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstallationDetail into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.error_message is not None: body['error_message'] = self.error_message
+        if self.id is not None: body['id'] = self.id
+        if self.installed_on is not None: body['installed_on'] = self.installed_on
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.listing_name is not None: body['listing_name'] = self.listing_name
+        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type
+        if self.repo_name is not None: body['repo_name'] = self.repo_name
+        if self.repo_path is not None: body['repo_path'] = self.repo_path
+        if self.share_name is not None: body['share_name'] = self.share_name
+        if self.status is not None: body['status'] = self.status
+        if self.token_detail: body['token_detail'] = self.token_detail
+        if self.tokens: body['tokens'] = self.tokens
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstallationDetail:
         """Deserializes the InstallationDetail from a dictionary."""
@@ -1049,6 +1357,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAllInstallationsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.installations: body['installations'] = self.installations
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAllInstallationsResponse:
         """Deserializes the ListAllInstallationsResponse from a dictionary."""
@@ -1070,6 +1385,13 @@ def as_dict(self) -> dict:
             body['personalization_requests'] = [v.as_dict() for v in self.personalization_requests]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAllPersonalizationRequestsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.personalization_requests: body['personalization_requests'] = self.personalization_requests
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAllPersonalizationRequestsResponse:
         """Deserializes the ListAllPersonalizationRequestsResponse from a dictionary."""
@@ -1091,6 +1413,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListExchangeFiltersResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.filters: body['filters'] = self.filters
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListExchangeFiltersResponse:
         """Deserializes the ListExchangeFiltersResponse from a dictionary."""
@@ -1111,6 +1440,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListExchangesForListingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchange_listing: body['exchange_listing'] = self.exchange_listing
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListExchangesForListingResponse:
         """Deserializes the ListExchangesForListingResponse from a dictionary."""
@@ -1131,6 +1467,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListExchangesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchanges: body['exchanges'] = self.exchanges
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListExchangesResponse:
         """Deserializes the ListExchangesResponse from a dictionary."""
@@ -1151,6 +1494,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListFilesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.file_infos: body['file_infos'] = self.file_infos
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListFilesResponse:
         """Deserializes the ListFilesResponse from a dictionary."""
@@ -1171,6 +1521,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListFulfillmentsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.fulfillments: body['fulfillments'] = self.fulfillments
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListFulfillmentsResponse:
         """Deserializes the ListFulfillmentsResponse from a dictionary."""
@@ -1191,6 +1548,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListInstallationsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.installations: body['installations'] = self.installations
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListInstallationsResponse:
         """Deserializes the ListInstallationsResponse from a dictionary."""
@@ -1211,6 +1575,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListListingsForExchangeResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchange_listings: body['exchange_listings'] = self.exchange_listings
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListListingsForExchangeResponse:
         """Deserializes the ListListingsForExchangeResponse from a dictionary."""
@@ -1231,6 +1602,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListListingsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.listings: body['listings'] = self.listings
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListListingsResponse:
         """Deserializes the ListListingsResponse from a dictionary."""
@@ -1255,6 +1633,14 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListProviderAnalyticsDashboardResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.id is not None: body['id'] = self.id
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListProviderAnalyticsDashboardResponse:
         """Deserializes the ListProviderAnalyticsDashboardResponse from a dictionary."""
@@ -1276,6 +1662,13 @@ def as_dict(self) -> dict:
         if self.providers: body['providers'] = [v.as_dict() for v in self.providers]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListProvidersResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.providers: body['providers'] = self.providers
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListProvidersResponse:
         """Deserializes the ListProvidersResponse from a dictionary."""
@@ -1300,6 +1693,14 @@ def as_dict(self) -> dict:
         if self.summary: body['summary'] = self.summary.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Listing into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.detail: body['detail'] = self.detail
+        if self.id is not None: body['id'] = self.id
+        if self.summary: body['summary'] = self.summary
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Listing:
         """Deserializes the Listing from a dictionary."""
@@ -1392,6 +1793,31 @@ def as_dict(self) -> dict:
         if self.update_frequency: body['update_frequency'] = self.update_frequency.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListingDetail into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.assets: body['assets'] = self.assets
+        if self.collection_date_end is not None: body['collection_date_end'] = self.collection_date_end
+        if self.collection_date_start is not None: body['collection_date_start'] = self.collection_date_start
+        if self.collection_granularity: body['collection_granularity'] = self.collection_granularity
+        if self.cost is not None: body['cost'] = self.cost
+        if self.data_source is not None: body['data_source'] = self.data_source
+        if self.description is not None: body['description'] = self.description
+        if self.documentation_link is not None: body['documentation_link'] = self.documentation_link
+        if self.embedded_notebook_file_infos:
+            body['embedded_notebook_file_infos'] = self.embedded_notebook_file_infos
+        if self.file_ids: body['file_ids'] = self.file_ids
+        if self.geographical_coverage is not None: body['geographical_coverage'] = self.geographical_coverage
+        if self.license is not None: body['license'] = self.license
+        if self.pricing_model is not None: body['pricing_model'] = self.pricing_model
+        if self.privacy_policy_link is not None: body['privacy_policy_link'] = self.privacy_policy_link
+        if self.size is not None: body['size'] = self.size
+        if self.support_link is not None: body['support_link'] = self.support_link
+        if self.tags: body['tags'] = self.tags
+        if self.terms_of_service is not None: body['terms_of_service'] = self.terms_of_service
+        if self.update_frequency: body['update_frequency'] = self.update_frequency
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListingDetail:
         """Deserializes the ListingDetail from a dictionary."""
@@ -1438,6 +1864,16 @@ def as_dict(self) -> dict:
         if self.share_info: body['share_info'] = self.share_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListingFulfillment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.fulfillment_type is not None: body['fulfillment_type'] = self.fulfillment_type
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type
+        if self.repo_info: body['repo_info'] = self.repo_info
+        if self.share_info: body['share_info'] = self.share_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListingFulfillment:
         """Deserializes the ListingFulfillment from a dictionary."""
@@ -1458,6 +1894,12 @@ def as_dict(self) -> dict:
         if self.visibility is not None: body['visibility'] = self.visibility.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListingSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.visibility is not None: body['visibility'] = self.visibility
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListingSetting:
         """Deserializes the ListingSetting from a dictionary."""
@@ -1548,6 +1990,30 @@ def as_dict(self) -> dict:
         if self.updated_by_id is not None: body['updated_by_id'] = self.updated_by_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListingSummary into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.categories: body['categories'] = self.categories
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.created_by_id is not None: body['created_by_id'] = self.created_by_id
+        if self.exchange_ids: body['exchange_ids'] = self.exchange_ids
+        if self.git_repo: body['git_repo'] = self.git_repo
+        if self.listing_type is not None: body['listingType'] = self.listing_type
+        if self.name is not None: body['name'] = self.name
+        if self.provider_id is not None: body['provider_id'] = self.provider_id
+        if self.provider_region: body['provider_region'] = self.provider_region
+        if self.published_at is not None: body['published_at'] = self.published_at
+        if self.published_by is not None: body['published_by'] = self.published_by
+        if self.setting: body['setting'] = self.setting
+        if self.share: body['share'] = self.share
+        if self.status is not None: body['status'] = self.status
+        if self.subtitle is not None: body['subtitle'] = self.subtitle
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.updated_by_id is not None: body['updated_by_id'] = self.updated_by_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListingSummary:
         """Deserializes the ListingSummary from a dictionary."""
@@ -1587,6 +2053,13 @@ def as_dict(self) -> dict:
         if self.tag_values: body['tag_values'] = [v for v in self.tag_values]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListingTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.tag_name is not None: body['tag_name'] = self.tag_name
+        if self.tag_values: body['tag_values'] = self.tag_values
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListingTag:
         """Deserializes the ListingTag from a dictionary."""
@@ -1667,6 +2140,27 @@ def as_dict(self) -> dict:
         if self.updated_at is not None: body['updated_at'] = self.updated_at
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PersonalizationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.consumer_region: body['consumer_region'] = self.consumer_region
+        if self.contact_info: body['contact_info'] = self.contact_info
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.id is not None: body['id'] = self.id
+        if self.intended_use is not None: body['intended_use'] = self.intended_use
+        if self.is_from_lighthouse is not None: body['is_from_lighthouse'] = self.is_from_lighthouse
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.listing_name is not None: body['listing_name'] = self.listing_name
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.provider_id is not None: body['provider_id'] = self.provider_id
+        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type
+        if self.share: body['share'] = self.share
+        if self.status is not None: body['status'] = self.status
+        if self.status_message is not None: body['status_message'] = self.status_message
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PersonalizationRequest:
         """Deserializes the PersonalizationRequest from a dictionary."""
@@ -1706,6 +2200,12 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ProviderAnalyticsDashboard into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ProviderAnalyticsDashboard:
         """Deserializes the ProviderAnalyticsDashboard from a dictionary."""
@@ -1766,6 +2266,28 @@ def as_dict(self) -> dict:
         if self.term_of_service_link is not None: body['term_of_service_link'] = self.term_of_service_link
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ProviderInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.business_contact_email is not None:
+            body['business_contact_email'] = self.business_contact_email
+        if self.company_website_link is not None: body['company_website_link'] = self.company_website_link
+        if self.dark_mode_icon_file_id is not None:
+            body['dark_mode_icon_file_id'] = self.dark_mode_icon_file_id
+        if self.dark_mode_icon_file_path is not None:
+            body['dark_mode_icon_file_path'] = self.dark_mode_icon_file_path
+        if self.description is not None: body['description'] = self.description
+        if self.icon_file_id is not None: body['icon_file_id'] = self.icon_file_id
+        if self.icon_file_path is not None: body['icon_file_path'] = self.icon_file_path
+        if self.id is not None: body['id'] = self.id
+        if self.is_featured is not None: body['is_featured'] = self.is_featured
+        if self.name is not None: body['name'] = self.name
+        if self.privacy_policy_link is not None: body['privacy_policy_link'] = self.privacy_policy_link
+        if self.published_by is not None: body['published_by'] = self.published_by
+        if self.support_contact_email is not None: body['support_contact_email'] = self.support_contact_email
+        if self.term_of_service_link is not None: body['term_of_service_link'] = self.term_of_service_link
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ProviderInfo:
         """Deserializes the ProviderInfo from a dictionary."""
@@ -1798,6 +2320,13 @@ def as_dict(self) -> dict:
         if self.region is not None: body['region'] = self.region
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegionInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cloud is not None: body['cloud'] = self.cloud
+        if self.region is not None: body['region'] = self.region
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegionInfo:
         """Deserializes the RegionInfo from a dictionary."""
@@ -1812,6 +2341,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RemoveExchangeForListingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RemoveExchangeForListingResponse:
         """Deserializes the RemoveExchangeForListingResponse from a dictionary."""
@@ -1829,6 +2363,12 @@ def as_dict(self) -> dict:
         if self.git_repo_url is not None: body['git_repo_url'] = self.git_repo_url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepoInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.git_repo_url is not None: body['git_repo_url'] = self.git_repo_url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoInfo:
         """Deserializes the RepoInfo from a dictionary."""
@@ -1851,6 +2391,13 @@ def as_dict(self) -> dict:
         if self.repo_path is not None: body['repo_path'] = self.repo_path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepoInstallation into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.repo_name is not None: body['repo_name'] = self.repo_name
+        if self.repo_path is not None: body['repo_path'] = self.repo_path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoInstallation:
         """Deserializes the RepoInstallation from a dictionary."""
@@ -1870,6 +2417,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SearchListingsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.listings: body['listings'] = self.listings
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchListingsResponse:
         """Deserializes the SearchListingsResponse from a dictionary."""
@@ -1890,6 +2444,13 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ShareInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ShareInfo:
         """Deserializes the ShareInfo from a dictionary."""
@@ -1911,6 +2472,13 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SharedDataObject into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data_object_type is not None: body['data_object_type'] = self.data_object_type
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SharedDataObject:
         """Deserializes the SharedDataObject from a dictionary."""
@@ -1939,6 +2507,16 @@ def as_dict(self) -> dict:
             body['shareCredentialsVersion'] = self.share_credentials_version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenDetail into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.bearer_token is not None: body['bearerToken'] = self.bearer_token
+        if self.endpoint is not None: body['endpoint'] = self.endpoint
+        if self.expiration_time is not None: body['expirationTime'] = self.expiration_time
+        if self.share_credentials_version is not None:
+            body['shareCredentialsVersion'] = self.share_credentials_version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenDetail:
         """Deserializes the TokenDetail from a dictionary."""
@@ -1984,6 +2562,18 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.activation_url is not None: body['activation_url'] = self.activation_url
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.id is not None: body['id'] = self.id
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenInfo:
         """Deserializes the TokenInfo from a dictionary."""
@@ -2009,6 +2599,13 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateExchangeFilterRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.filter: body['filter'] = self.filter
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExchangeFilterRequest:
         """Deserializes the UpdateExchangeFilterRequest from a dictionary."""
@@ -2025,6 +2622,12 @@ def as_dict(self) -> dict:
         if self.filter: body['filter'] = self.filter.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateExchangeFilterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.filter: body['filter'] = self.filter
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExchangeFilterResponse:
         """Deserializes the UpdateExchangeFilterResponse from a dictionary."""
@@ -2044,6 +2647,13 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateExchangeRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchange: body['exchange'] = self.exchange
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExchangeRequest:
         """Deserializes the UpdateExchangeRequest from a dictionary."""
@@ -2060,6 +2670,12 @@ def as_dict(self) -> dict:
         if self.exchange: body['exchange'] = self.exchange.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateExchangeResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchange: body['exchange'] = self.exchange
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExchangeResponse:
         """Deserializes the UpdateExchangeResponse from a dictionary."""
@@ -2085,6 +2701,15 @@ def as_dict(self) -> dict:
         if self.rotate_token is not None: body['rotate_token'] = self.rotate_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateInstallationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.installation: body['installation'] = self.installation
+        if self.installation_id is not None: body['installation_id'] = self.installation_id
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.rotate_token is not None: body['rotate_token'] = self.rotate_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateInstallationRequest:
         """Deserializes the UpdateInstallationRequest from a dictionary."""
@@ -2104,6 +2729,12 @@ def as_dict(self) -> dict:
         if self.installation: body['installation'] = self.installation.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateInstallationResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.installation: body['installation'] = self.installation
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateInstallationResponse:
         """Deserializes the UpdateInstallationResponse from a dictionary."""
@@ -2123,6 +2754,13 @@ def as_dict(self) -> dict:
         if self.listing: body['listing'] = self.listing.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateListingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.listing: body['listing'] = self.listing
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateListingRequest:
         """Deserializes the UpdateListingRequest from a dictionary."""
@@ -2139,6 +2777,12 @@ def as_dict(self) -> dict:
         if self.listing: body['listing'] = self.listing.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateListingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.listing: body['listing'] = self.listing
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateListingResponse:
         """Deserializes the UpdateListingResponse from a dictionary."""
@@ -2167,6 +2811,16 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdatePersonalizationRequestRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.reason is not None: body['reason'] = self.reason
+        if self.request_id is not None: body['request_id'] = self.request_id
+        if self.share: body['share'] = self.share
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdatePersonalizationRequestRequest:
         """Deserializes the UpdatePersonalizationRequestRequest from a dictionary."""
@@ -2187,6 +2841,12 @@ def as_dict(self) -> dict:
         if self.request: body['request'] = self.request.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdatePersonalizationRequestResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.request: body['request'] = self.request
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdatePersonalizationRequestResponse:
         """Deserializes the UpdatePersonalizationRequestResponse from a dictionary."""
@@ -2209,6 +2869,13 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateProviderAnalyticsDashboardRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateProviderAnalyticsDashboardRequest:
         """Deserializes the UpdateProviderAnalyticsDashboardRequest from a dictionary."""
@@ -2233,6 +2900,14 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateProviderAnalyticsDashboardResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.id is not None: body['id'] = self.id
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateProviderAnalyticsDashboardResponse:
         """Deserializes the UpdateProviderAnalyticsDashboardResponse from a dictionary."""
@@ -2254,6 +2929,13 @@ def as_dict(self) -> dict:
         if self.provider: body['provider'] = self.provider.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateProviderRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.provider: body['provider'] = self.provider
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateProviderRequest:
         """Deserializes the UpdateProviderRequest from a dictionary."""
@@ -2270,6 +2952,12 @@ def as_dict(self) -> dict:
         if self.provider: body['provider'] = self.provider.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateProviderResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.provider: body['provider'] = self.provider
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateProviderResponse:
         """Deserializes the UpdateProviderResponse from a dictionary."""
diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py
index c44edbe48..e551c72ca 100755
--- a/databricks/sdk/service/ml.py
+++ b/databricks/sdk/service/ml.py
@@ -90,6 +90,21 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Activity into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.activity_type is not None: body['activity_type'] = self.activity_type
+        if self.comment is not None: body['comment'] = self.comment
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.from_stage is not None: body['from_stage'] = self.from_stage
+        if self.id is not None: body['id'] = self.id
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.system_comment is not None: body['system_comment'] = self.system_comment
+        if self.to_stage is not None: body['to_stage'] = self.to_stage
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Activity:
         """Deserializes the Activity from a dictionary."""
@@ -177,6 +192,17 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ApproveTransitionRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.archive_existing_versions is not None:
+            body['archive_existing_versions'] = self.archive_existing_versions
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.stage is not None: body['stage'] = self.stage
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ApproveTransitionRequest:
         """Deserializes the ApproveTransitionRequest from a dictionary."""
@@ -198,6 +224,12 @@ def as_dict(self) -> dict:
         if self.activity: body['activity'] = self.activity.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ApproveTransitionRequestResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.activity: body['activity'] = self.activity
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ApproveTransitionRequestResponse:
         """Deserializes the ApproveTransitionRequestResponse from a dictionary."""
@@ -248,6 +280,18 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CommentObject into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.available_actions: body['available_actions'] = self.available_actions
+        if self.comment is not None: body['comment'] = self.comment
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.id is not None: body['id'] = self.id
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CommentObject:
         """Deserializes the CommentObject from a dictionary."""
@@ -278,6 +322,14 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateComment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateComment:
         """Deserializes the CreateComment from a dictionary."""
@@ -295,6 +347,12 @@ def as_dict(self) -> dict:
         if self.comment: body['comment'] = self.comment.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCommentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment: body['comment'] = self.comment
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCommentResponse:
         """Deserializes the CreateCommentResponse from a dictionary."""
@@ -324,6 +382,14 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateExperiment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.artifact_location is not None: body['artifact_location'] = self.artifact_location
+        if self.name is not None: body['name'] = self.name
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExperiment:
         """Deserializes the CreateExperiment from a dictionary."""
@@ -343,6 +409,12 @@ def as_dict(self) -> dict:
         if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateExperimentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExperimentResponse:
         """Deserializes the CreateExperimentResponse from a dictionary."""
@@ -368,6 +440,14 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateModelRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateModelRequest:
         """Deserializes the CreateModelRequest from a dictionary."""
@@ -386,6 +466,12 @@ def as_dict(self) -> dict:
         if self.registered_model: body['registered_model'] = self.registered_model.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateModelResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.registered_model: body['registered_model'] = self.registered_model
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateModelResponse:
         """Deserializes the CreateModelResponse from a dictionary."""
@@ -425,6 +511,17 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateModelVersionRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_link is not None: body['run_link'] = self.run_link
+        if self.source is not None: body['source'] = self.source
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateModelVersionRequest:
         """Deserializes the CreateModelVersionRequest from a dictionary."""
@@ -447,6 +544,12 @@ def as_dict(self) -> dict:
         if self.model_version: body['model_version'] = self.model_version.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateModelVersionResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.model_version: body['model_version'] = self.model_version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateModelVersionResponse:
         """Deserializes the CreateModelVersionResponse from a dictionary."""
@@ -515,6 +618,17 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateRegistryWebhook into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.events: body['events'] = self.events
+        if self.http_url_spec: body['http_url_spec'] = self.http_url_spec
+        if self.job_spec: body['job_spec'] = self.job_spec
+        if self.model_name is not None: body['model_name'] = self.model_name
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRegistryWebhook:
         """Deserializes the CreateRegistryWebhook from a dictionary."""
@@ -550,6 +664,15 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.tags: body['tags'] = self.tags
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRun:
         """Deserializes the CreateRun from a dictionary."""
@@ -570,6 +693,12 @@ def as_dict(self) -> dict:
         if self.run: body['run'] = self.run.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateRunResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run: body['run'] = self.run
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRunResponse:
         """Deserializes the CreateRunResponse from a dictionary."""
@@ -607,6 +736,15 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateTransitionRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.stage is not None: body['stage'] = self.stage
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateTransitionRequest:
         """Deserializes the CreateTransitionRequest from a dictionary."""
@@ -627,6 +765,12 @@ def as_dict(self) -> dict:
         if self.request: body['request'] = self.request.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateTransitionRequestResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.request: body['request'] = self.request
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateTransitionRequestResponse:
         """Deserializes the CreateTransitionRequestResponse from a dictionary."""
@@ -643,6 +787,12 @@ def as_dict(self) -> dict:
         if self.webhook: body['webhook'] = self.webhook.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateWebhookResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.webhook: body['webhook'] = self.webhook
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateWebhookResponse:
         """Deserializes the CreateWebhookResponse from a dictionary."""
@@ -684,6 +834,17 @@ def as_dict(self) -> dict:
         if self.source_type is not None: body['source_type'] = self.source_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Dataset into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.digest is not None: body['digest'] = self.digest
+        if self.name is not None: body['name'] = self.name
+        if self.profile is not None: body['profile'] = self.profile
+        if self.schema is not None: body['schema'] = self.schema
+        if self.source is not None: body['source'] = self.source
+        if self.source_type is not None: body['source_type'] = self.source_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Dataset:
         """Deserializes the Dataset from a dictionary."""
@@ -710,6 +871,13 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DatasetInput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dataset: body['dataset'] = self.dataset
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DatasetInput:
         """Deserializes the DatasetInput from a dictionary."""
@@ -724,6 +892,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteCommentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteCommentResponse:
         """Deserializes the DeleteCommentResponse from a dictionary."""
@@ -741,6 +914,12 @@ def as_dict(self) -> dict:
         if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteExperiment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteExperiment:
         """Deserializes the DeleteExperiment from a dictionary."""
@@ -755,6 +934,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteExperimentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteExperimentResponse:
         """Deserializes the DeleteExperimentResponse from a dictionary."""
@@ -769,6 +953,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteModelResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteModelResponse:
         """Deserializes the DeleteModelResponse from a dictionary."""
@@ -783,6 +972,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteModelTagResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteModelTagResponse:
         """Deserializes the DeleteModelTagResponse from a dictionary."""
@@ -797,6 +991,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteModelVersionResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteModelVersionResponse:
         """Deserializes the DeleteModelVersionResponse from a dictionary."""
@@ -811,6 +1010,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteModelVersionTagResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteModelVersionTagResponse:
         """Deserializes the DeleteModelVersionTagResponse from a dictionary."""
@@ -828,6 +1032,12 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRun:
         """Deserializes the DeleteRun from a dictionary."""
@@ -842,6 +1052,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteRunResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRunResponse:
         """Deserializes the DeleteRunResponse from a dictionary."""
@@ -869,6 +1084,14 @@ def as_dict(self) -> dict:
         if self.max_timestamp_millis is not None: body['max_timestamp_millis'] = self.max_timestamp_millis
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteRuns into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.max_runs is not None: body['max_runs'] = self.max_runs
+        if self.max_timestamp_millis is not None: body['max_timestamp_millis'] = self.max_timestamp_millis
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRuns:
         """Deserializes the DeleteRuns from a dictionary."""
@@ -888,6 +1111,12 @@ def as_dict(self) -> dict:
         if self.runs_deleted is not None: body['runs_deleted'] = self.runs_deleted
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteRunsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.runs_deleted is not None: body['runs_deleted'] = self.runs_deleted
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRunsResponse:
         """Deserializes the DeleteRunsResponse from a dictionary."""
@@ -909,6 +1138,13 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteTag:
         """Deserializes the DeleteTag from a dictionary."""
@@ -923,6 +1159,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteTagResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteTagResponse:
         """Deserializes the DeleteTagResponse from a dictionary."""
@@ -937,6 +1178,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteTransitionRequestResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteTransitionRequestResponse:
         """Deserializes the DeleteTransitionRequestResponse from a dictionary."""
@@ -959,6 +1205,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteWebhookResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteWebhookResponse:
         """Deserializes the DeleteWebhookResponse from a dictionary."""
@@ -1001,6 +1252,18 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Experiment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.artifact_location is not None: body['artifact_location'] = self.artifact_location
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.last_update_time is not None: body['last_update_time'] = self.last_update_time
+        if self.lifecycle_stage is not None: body['lifecycle_stage'] = self.lifecycle_stage
+        if self.name is not None: body['name'] = self.name
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Experiment:
         """Deserializes the Experiment from a dictionary."""
@@ -1037,6 +1300,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExperimentAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentAccessControlRequest:
         """Deserializes the ExperimentAccessControlRequest from a dictionary."""
@@ -1074,6 +1347,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExperimentAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentAccessControlResponse:
         """Deserializes the ExperimentAccessControlResponse from a dictionary."""
@@ -1101,6 +1385,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExperimentPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentPermission:
         """Deserializes the ExperimentPermission from a dictionary."""
@@ -1134,6 +1426,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExperimentPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentPermissions:
         """Deserializes the ExperimentPermissions from a dictionary."""
@@ -1157,6 +1457,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExperimentPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentPermissionsDescription:
         """Deserializes the ExperimentPermissionsDescription from a dictionary."""
@@ -1179,6 +1486,13 @@ def as_dict(self) -> dict:
         if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExperimentPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentPermissionsRequest:
         """Deserializes the ExperimentPermissionsRequest from a dictionary."""
@@ -1202,6 +1516,13 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExperimentTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentTag:
         """Deserializes the ExperimentTag from a dictionary."""
@@ -1227,6 +1548,14 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FileInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.file_size is not None: body['file_size'] = self.file_size
+        if self.is_dir is not None: body['is_dir'] = self.is_dir
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FileInfo:
         """Deserializes the FileInfo from a dictionary."""
@@ -1244,6 +1573,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetExperimentPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetExperimentPermissionLevelsResponse:
         """Deserializes the GetExperimentPermissionLevelsResponse from a dictionary."""
@@ -1261,6 +1596,12 @@ def as_dict(self) -> dict:
         if self.experiment: body['experiment'] = self.experiment.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetExperimentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment: body['experiment'] = self.experiment
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetExperimentResponse:
         """Deserializes the GetExperimentResponse from a dictionary."""
@@ -1282,6 +1623,13 @@ def as_dict(self) -> dict:
         if self.stages: body['stages'] = [v for v in self.stages]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetLatestVersionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.stages: body['stages'] = self.stages
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetLatestVersionsRequest:
         """Deserializes the GetLatestVersionsRequest from a dictionary."""
@@ -1300,6 +1648,12 @@ def as_dict(self) -> dict:
         if self.model_versions: body['model_versions'] = [v.as_dict() for v in self.model_versions]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetLatestVersionsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.model_versions: body['model_versions'] = self.model_versions
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetLatestVersionsResponse:
         """Deserializes the GetLatestVersionsResponse from a dictionary."""
@@ -1321,6 +1675,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetMetricHistoryResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metrics: body['metrics'] = self.metrics
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetMetricHistoryResponse:
         """Deserializes the GetMetricHistoryResponse from a dictionary."""
@@ -1339,6 +1700,13 @@ def as_dict(self) -> dict:
             body['registered_model_databricks'] = self.registered_model_databricks.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetModelResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.registered_model_databricks:
+            body['registered_model_databricks'] = self.registered_model_databricks
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetModelResponse:
         """Deserializes the GetModelResponse from a dictionary."""
@@ -1356,6 +1724,12 @@ def as_dict(self) -> dict:
         if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetModelVersionDownloadUriResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetModelVersionDownloadUriResponse:
         """Deserializes the GetModelVersionDownloadUriResponse from a dictionary."""
@@ -1372,6 +1746,12 @@ def as_dict(self) -> dict:
         if self.model_version: body['model_version'] = self.model_version.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetModelVersionResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.model_version: body['model_version'] = self.model_version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetModelVersionResponse:
         """Deserializes the GetModelVersionResponse from a dictionary."""
@@ -1389,6 +1769,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetRegisteredModelPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetRegisteredModelPermissionLevelsResponse:
         """Deserializes the GetRegisteredModelPermissionLevelsResponse from a dictionary."""
@@ -1407,6 +1793,12 @@ def as_dict(self) -> dict:
         if self.run: body['run'] = self.run.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetRunResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run: body['run'] = self.run
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetRunResponse:
         """Deserializes the GetRunResponse from a dictionary."""
@@ -1444,6 +1836,16 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the HttpUrlSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.authorization is not None: body['authorization'] = self.authorization
+        if self.enable_ssl_verification is not None:
+            body['enable_ssl_verification'] = self.enable_ssl_verification
+        if self.secret is not None: body['secret'] = self.secret
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> HttpUrlSpec:
         """Deserializes the HttpUrlSpec from a dictionary."""
@@ -1473,6 +1875,14 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the HttpUrlSpecWithoutSecret into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enable_ssl_verification is not None:
+            body['enable_ssl_verification'] = self.enable_ssl_verification
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> HttpUrlSpecWithoutSecret:
         """Deserializes the HttpUrlSpecWithoutSecret from a dictionary."""
@@ -1494,6 +1904,13 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InputTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InputTag:
         """Deserializes the InputTag from a dictionary."""
@@ -1520,6 +1937,14 @@ def as_dict(self) -> dict:
         if self.workspace_url is not None: body['workspace_url'] = self.workspace_url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_token is not None: body['access_token'] = self.access_token
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.workspace_url is not None: body['workspace_url'] = self.workspace_url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobSpec:
         """Deserializes the JobSpec from a dictionary."""
@@ -1545,6 +1970,13 @@ def as_dict(self) -> dict:
         if self.workspace_url is not None: body['workspace_url'] = self.workspace_url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobSpecWithoutSecret into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.workspace_url is not None: body['workspace_url'] = self.workspace_url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobSpecWithoutSecret:
         """Deserializes the JobSpecWithoutSecret from a dictionary."""
@@ -1570,6 +2002,14 @@ def as_dict(self) -> dict:
         if self.root_uri is not None: body['root_uri'] = self.root_uri
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListArtifactsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.files: body['files'] = self.files
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.root_uri is not None: body['root_uri'] = self.root_uri
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListArtifactsResponse:
         """Deserializes the ListArtifactsResponse from a dictionary."""
@@ -1594,6 +2034,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListExperimentsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiments: body['experiments'] = self.experiments
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListExperimentsResponse:
         """Deserializes the ListExperimentsResponse from a dictionary."""
@@ -1615,6 +2062,13 @@ def as_dict(self) -> dict:
         if self.registered_models: body['registered_models'] = [v.as_dict() for v in self.registered_models]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListModelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.registered_models: body['registered_models'] = self.registered_models
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListModelsResponse:
         """Deserializes the ListModelsResponse from a dictionary."""
@@ -1637,6 +2091,13 @@ def as_dict(self) -> dict:
         if self.webhooks: body['webhooks'] = [v.as_dict() for v in self.webhooks]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListRegistryWebhooks into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.webhooks: body['webhooks'] = self.webhooks
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListRegistryWebhooks:
         """Deserializes the ListRegistryWebhooks from a dictionary."""
@@ -1655,6 +2116,12 @@ def as_dict(self) -> dict:
         if self.requests: body['requests'] = [v.as_dict() for v in self.requests]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListTransitionRequestsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.requests: body['requests'] = self.requests
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListTransitionRequestsResponse:
         """Deserializes the ListTransitionRequestsResponse from a dictionary."""
@@ -1687,6 +2154,15 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogBatch into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metrics: body['metrics'] = self.metrics
+        if self.params: body['params'] = self.params
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogBatch:
         """Deserializes the LogBatch from a dictionary."""
@@ -1704,6 +2180,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogBatchResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogBatchResponse:
         """Deserializes the LogBatchResponse from a dictionary."""
@@ -1725,6 +2206,13 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogInputs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.datasets: body['datasets'] = self.datasets
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogInputs:
         """Deserializes the LogInputs from a dictionary."""
@@ -1739,6 +2227,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogInputsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogInputsResponse:
         """Deserializes the LogInputsResponse from a dictionary."""
@@ -1777,6 +2270,17 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogMetric into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
+        if self.step is not None: body['step'] = self.step
+        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogMetric:
         """Deserializes the LogMetric from a dictionary."""
@@ -1796,6 +2300,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogMetricResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogMetricResponse:
         """Deserializes the LogMetricResponse from a dictionary."""
@@ -1817,6 +2326,13 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogModel into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.model_json is not None: body['model_json'] = self.model_json
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogModel:
         """Deserializes the LogModel from a dictionary."""
@@ -1831,6 +2347,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogModelResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogModelResponse:
         """Deserializes the LogModelResponse from a dictionary."""
@@ -1861,6 +2382,15 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogParam into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogParam:
         """Deserializes the LogParam from a dictionary."""
@@ -1878,6 +2408,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogParamResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogParamResponse:
         """Deserializes the LogParamResponse from a dictionary."""
@@ -1907,6 +2442,15 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Metric into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.step is not None: body['step'] = self.step
+        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Metric:
         """Deserializes the Metric from a dictionary."""
@@ -1953,6 +2497,19 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Model into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.description is not None: body['description'] = self.description
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.latest_versions: body['latest_versions'] = self.latest_versions
+        if self.name is not None: body['name'] = self.name
+        if self.tags: body['tags'] = self.tags
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Model:
         """Deserializes the Model from a dictionary."""
@@ -2010,6 +2567,21 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ModelDatabricks into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.description is not None: body['description'] = self.description
+        if self.id is not None: body['id'] = self.id
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.latest_versions: body['latest_versions'] = self.latest_versions
+        if self.name is not None: body['name'] = self.name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.tags: body['tags'] = self.tags
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelDatabricks:
         """Deserializes the ModelDatabricks from a dictionary."""
@@ -2039,6 +2611,13 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ModelTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelTag:
         """Deserializes the ModelTag from a dictionary."""
@@ -2106,6 +2685,25 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ModelVersion into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.current_stage is not None: body['current_stage'] = self.current_stage
+        if self.description is not None: body['description'] = self.description
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.name is not None: body['name'] = self.name
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_link is not None: body['run_link'] = self.run_link
+        if self.source is not None: body['source'] = self.source
+        if self.status is not None: body['status'] = self.status
+        if self.status_message is not None: body['status_message'] = self.status_message
+        if self.tags: body['tags'] = self.tags
+        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelVersion:
         """Deserializes the ModelVersion from a dictionary."""
@@ -2205,6 +2803,26 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ModelVersionDatabricks into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.current_stage is not None: body['current_stage'] = self.current_stage
+        if self.description is not None: body['description'] = self.description
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.name is not None: body['name'] = self.name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_link is not None: body['run_link'] = self.run_link
+        if self.source is not None: body['source'] = self.source
+        if self.status is not None: body['status'] = self.status
+        if self.status_message is not None: body['status_message'] = self.status_message
+        if self.tags: body['tags'] = self.tags
+        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelVersionDatabricks:
         """Deserializes the ModelVersionDatabricks from a dictionary."""
@@ -2247,6 +2865,13 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ModelVersionTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelVersionTag:
         """Deserializes the ModelVersionTag from a dictionary."""
@@ -2261,8 +2886,15 @@ class Param:
     value: Optional[str] = None
     """Value associated with this param."""
 
-    def as_dict(self) -> dict:
-        """Serializes the Param into a dictionary suitable for use as a JSON request body."""
+    def as_dict(self) -> dict:
+        """Serializes the Param into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Param into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.key is not None: body['key'] = self.key
         if self.value is not None: body['value'] = self.value
@@ -2309,6 +2941,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegisteredModelAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelAccessControlRequest:
         """Deserializes the RegisteredModelAccessControlRequest from a dictionary."""
@@ -2346,6 +2988,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegisteredModelAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelAccessControlResponse:
         """Deserializes the RegisteredModelAccessControlResponse from a dictionary."""
@@ -2373,6 +3026,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegisteredModelPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelPermission:
         """Deserializes the RegisteredModelPermission from a dictionary."""
@@ -2408,6 +3069,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegisteredModelPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelPermissions:
         """Deserializes the RegisteredModelPermissions from a dictionary."""
@@ -2431,6 +3100,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegisteredModelPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelPermissionsDescription:
         """Deserializes the RegisteredModelPermissionsDescription from a dictionary."""
@@ -2453,6 +3129,13 @@ def as_dict(self) -> dict:
         if self.registered_model_id is not None: body['registered_model_id'] = self.registered_model_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegisteredModelPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.registered_model_id is not None: body['registered_model_id'] = self.registered_model_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelPermissionsRequest:
         """Deserializes the RegisteredModelPermissionsRequest from a dictionary."""
@@ -2536,6 +3219,21 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegistryWebhook into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.description is not None: body['description'] = self.description
+        if self.events: body['events'] = self.events
+        if self.http_url_spec: body['http_url_spec'] = self.http_url_spec
+        if self.id is not None: body['id'] = self.id
+        if self.job_spec: body['job_spec'] = self.job_spec
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.model_name is not None: body['model_name'] = self.model_name
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegistryWebhook:
         """Deserializes the RegistryWebhook from a dictionary."""
@@ -2611,6 +3309,15 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RejectTransitionRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.stage is not None: body['stage'] = self.stage
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RejectTransitionRequest:
         """Deserializes the RejectTransitionRequest from a dictionary."""
@@ -2631,6 +3338,12 @@ def as_dict(self) -> dict:
         if self.activity: body['activity'] = self.activity.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RejectTransitionRequestResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.activity: body['activity'] = self.activity
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RejectTransitionRequestResponse:
         """Deserializes the RejectTransitionRequestResponse from a dictionary."""
@@ -2652,6 +3365,13 @@ def as_dict(self) -> dict:
         if self.new_name is not None: body['new_name'] = self.new_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RenameModelRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RenameModelRequest:
         """Deserializes the RenameModelRequest from a dictionary."""
@@ -2668,6 +3388,12 @@ def as_dict(self) -> dict:
         if self.registered_model: body['registered_model'] = self.registered_model.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RenameModelResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.registered_model: body['registered_model'] = self.registered_model
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RenameModelResponse:
         """Deserializes the RenameModelResponse from a dictionary."""
@@ -2685,6 +3411,12 @@ def as_dict(self) -> dict:
         if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestoreExperiment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestoreExperiment:
         """Deserializes the RestoreExperiment from a dictionary."""
@@ -2699,6 +3431,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestoreExperimentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestoreExperimentResponse:
         """Deserializes the RestoreExperimentResponse from a dictionary."""
@@ -2716,6 +3453,12 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestoreRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestoreRun:
         """Deserializes the RestoreRun from a dictionary."""
@@ -2730,6 +3473,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestoreRunResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestoreRunResponse:
         """Deserializes the RestoreRunResponse from a dictionary."""
@@ -2757,6 +3505,14 @@ def as_dict(self) -> dict:
         if self.min_timestamp_millis is not None: body['min_timestamp_millis'] = self.min_timestamp_millis
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestoreRuns into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.max_runs is not None: body['max_runs'] = self.max_runs
+        if self.min_timestamp_millis is not None: body['min_timestamp_millis'] = self.min_timestamp_millis
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestoreRuns:
         """Deserializes the RestoreRuns from a dictionary."""
@@ -2776,6 +3532,12 @@ def as_dict(self) -> dict:
         if self.runs_restored is not None: body['runs_restored'] = self.runs_restored
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestoreRunsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.runs_restored is not None: body['runs_restored'] = self.runs_restored
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestoreRunsResponse:
         """Deserializes the RestoreRunsResponse from a dictionary."""
@@ -2801,6 +3563,14 @@ def as_dict(self) -> dict:
         if self.inputs: body['inputs'] = self.inputs.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Run into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data: body['data'] = self.data
+        if self.info: body['info'] = self.info
+        if self.inputs: body['inputs'] = self.inputs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Run:
         """Deserializes the Run from a dictionary."""
@@ -2828,6 +3598,14 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunData into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metrics: body['metrics'] = self.metrics
+        if self.params: body['params'] = self.params
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunData:
         """Deserializes the RunData from a dictionary."""
@@ -2883,6 +3661,20 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri
+        if self.end_time is not None: body['end_time'] = self.end_time
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.lifecycle_stage is not None: body['lifecycle_stage'] = self.lifecycle_stage
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
+        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.status is not None: body['status'] = self.status
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunInfo:
         """Deserializes the RunInfo from a dictionary."""
@@ -2918,6 +3710,12 @@ def as_dict(self) -> dict:
         if self.dataset_inputs: body['dataset_inputs'] = [v.as_dict() for v in self.dataset_inputs]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunInputs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dataset_inputs: body['dataset_inputs'] = self.dataset_inputs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunInputs:
         """Deserializes the RunInputs from a dictionary."""
@@ -2939,6 +3737,13 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunTag:
         """Deserializes the RunTag from a dictionary."""
@@ -2975,6 +3780,16 @@ def as_dict(self) -> dict:
         if self.view_type is not None: body['view_type'] = self.view_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SearchExperiments into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.filter is not None: body['filter'] = self.filter
+        if self.max_results is not None: body['max_results'] = self.max_results
+        if self.order_by: body['order_by'] = self.order_by
+        if self.page_token is not None: body['page_token'] = self.page_token
+        if self.view_type is not None: body['view_type'] = self.view_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchExperiments:
         """Deserializes the SearchExperiments from a dictionary."""
@@ -3001,6 +3816,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SearchExperimentsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiments: body['experiments'] = self.experiments
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchExperimentsResponse:
         """Deserializes the SearchExperimentsResponse from a dictionary."""
@@ -3032,6 +3854,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SearchModelVersionsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.model_versions: body['model_versions'] = self.model_versions
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchModelVersionsResponse:
         """Deserializes the SearchModelVersionsResponse from a dictionary."""
@@ -3054,6 +3883,13 @@ def as_dict(self) -> dict:
         if self.registered_models: body['registered_models'] = [v.as_dict() for v in self.registered_models]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SearchModelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.registered_models: body['registered_models'] = self.registered_models
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchModelsResponse:
         """Deserializes the SearchModelsResponse from a dictionary."""
@@ -3105,6 +3941,17 @@ def as_dict(self) -> dict:
         if self.run_view_type is not None: body['run_view_type'] = self.run_view_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SearchRuns into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment_ids: body['experiment_ids'] = self.experiment_ids
+        if self.filter is not None: body['filter'] = self.filter
+        if self.max_results is not None: body['max_results'] = self.max_results
+        if self.order_by: body['order_by'] = self.order_by
+        if self.page_token is not None: body['page_token'] = self.page_token
+        if self.run_view_type is not None: body['run_view_type'] = self.run_view_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchRuns:
         """Deserializes the SearchRuns from a dictionary."""
@@ -3131,6 +3978,13 @@ def as_dict(self) -> dict:
         if self.runs: body['runs'] = [v.as_dict() for v in self.runs]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SearchRunsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.runs: body['runs'] = self.runs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchRunsResponse:
         """Deserializes the SearchRunsResponse from a dictionary."""
@@ -3166,6 +4020,14 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetExperimentTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetExperimentTag:
         """Deserializes the SetExperimentTag from a dictionary."""
@@ -3182,6 +4044,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetExperimentTagResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetExperimentTagResponse:
         """Deserializes the SetExperimentTagResponse from a dictionary."""
@@ -3210,6 +4077,14 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetModelTagRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.name is not None: body['name'] = self.name
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetModelTagRequest:
         """Deserializes the SetModelTagRequest from a dictionary."""
@@ -3224,6 +4099,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetModelTagResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetModelTagResponse:
         """Deserializes the SetModelTagResponse from a dictionary."""
@@ -3256,6 +4136,15 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetModelVersionTagRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.name is not None: body['name'] = self.name
+        if self.value is not None: body['value'] = self.value
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetModelVersionTagRequest:
         """Deserializes the SetModelVersionTagRequest from a dictionary."""
@@ -3273,6 +4162,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetModelVersionTagResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetModelVersionTagResponse:
         """Deserializes the SetModelVersionTagResponse from a dictionary."""
@@ -3305,6 +4199,15 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetTag:
         """Deserializes the SetTag from a dictionary."""
@@ -3322,6 +4225,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetTagResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetTagResponse:
         """Deserializes the SetTagResponse from a dictionary."""
@@ -3375,6 +4283,13 @@ def as_dict(self) -> dict:
         if self.status_code is not None: body['status_code'] = self.status_code
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TestRegistryWebhook into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.body is not None: body['body'] = self.body
+        if self.status_code is not None: body['status_code'] = self.status_code
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TestRegistryWebhook:
         """Deserializes the TestRegistryWebhook from a dictionary."""
@@ -3397,6 +4312,13 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TestRegistryWebhookRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.event is not None: body['event'] = self.event
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TestRegistryWebhookRequest:
         """Deserializes the TestRegistryWebhookRequest from a dictionary."""
@@ -3414,6 +4336,12 @@ def as_dict(self) -> dict:
         if self.webhook: body['webhook'] = self.webhook.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TestRegistryWebhookResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.webhook: body['webhook'] = self.webhook
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TestRegistryWebhookResponse:
         """Deserializes the TestRegistryWebhookResponse from a dictionary."""
@@ -3456,6 +4384,17 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TransitionModelVersionStageDatabricks into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.archive_existing_versions is not None:
+            body['archive_existing_versions'] = self.archive_existing_versions
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.stage is not None: body['stage'] = self.stage
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TransitionModelVersionStageDatabricks:
         """Deserializes the TransitionModelVersionStageDatabricks from a dictionary."""
@@ -3503,6 +4442,16 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TransitionRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.available_actions: body['available_actions'] = self.available_actions
+        if self.comment is not None: body['comment'] = self.comment
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.to_stage is not None: body['to_stage'] = self.to_stage
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TransitionRequest:
         """Deserializes the TransitionRequest from a dictionary."""
@@ -3523,6 +4472,12 @@ def as_dict(self) -> dict:
         if self.model_version: body['model_version'] = self.model_version.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TransitionStageResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.model_version: body['model_version'] = self.model_version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TransitionStageResponse:
         """Deserializes the TransitionStageResponse from a dictionary."""
@@ -3544,6 +4499,13 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateComment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateComment:
         """Deserializes the UpdateComment from a dictionary."""
@@ -3561,6 +4523,12 @@ def as_dict(self) -> dict:
         if self.comment: body['comment'] = self.comment.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCommentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment: body['comment'] = self.comment
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCommentResponse:
         """Deserializes the UpdateCommentResponse from a dictionary."""
@@ -3582,6 +4550,13 @@ def as_dict(self) -> dict:
         if self.new_name is not None: body['new_name'] = self.new_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateExperiment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.new_name is not None: body['new_name'] = self.new_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExperiment:
         """Deserializes the UpdateExperiment from a dictionary."""
@@ -3596,6 +4571,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateExperimentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExperimentResponse:
         """Deserializes the UpdateExperimentResponse from a dictionary."""
@@ -3617,6 +4597,13 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateModelRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateModelRequest:
         """Deserializes the UpdateModelRequest from a dictionary."""
@@ -3631,6 +4618,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateModelResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateModelResponse:
         """Deserializes the UpdateModelResponse from a dictionary."""
@@ -3656,6 +4648,14 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateModelVersionRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateModelVersionRequest:
         """Deserializes the UpdateModelVersionRequest from a dictionary."""
@@ -3672,6 +4672,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateModelVersionResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateModelVersionResponse:
         """Deserializes the UpdateModelVersionResponse from a dictionary."""
@@ -3740,6 +4745,17 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateRegistryWebhook into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.events: body['events'] = self.events
+        if self.http_url_spec: body['http_url_spec'] = self.http_url_spec
+        if self.id is not None: body['id'] = self.id
+        if self.job_spec: body['job_spec'] = self.job_spec
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRegistryWebhook:
         """Deserializes the UpdateRegistryWebhook from a dictionary."""
@@ -3775,6 +4791,15 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.end_time is not None: body['end_time'] = self.end_time
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRun:
         """Deserializes the UpdateRun from a dictionary."""
@@ -3795,6 +4820,12 @@ def as_dict(self) -> dict:
         if self.run_info: body['run_info'] = self.run_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateRunResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run_info: body['run_info'] = self.run_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRunResponse:
         """Deserializes the UpdateRunResponse from a dictionary."""
@@ -3819,6 +4850,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateWebhookResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateWebhookResponse:
         """Deserializes the UpdateWebhookResponse from a dictionary."""
diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py
index 7bfc8fe1a..11a83b3ab 100755
--- a/databricks/sdk/service/oauth2.py
+++ b/databricks/sdk/service/oauth2.py
@@ -41,6 +41,16 @@ def as_dict(self) -> dict:
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCustomAppIntegration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.confidential is not None: body['confidential'] = self.confidential
+        if self.name is not None: body['name'] = self.name
+        if self.redirect_urls: body['redirect_urls'] = self.redirect_urls
+        if self.scopes: body['scopes'] = self.scopes
+        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCustomAppIntegration:
         """Deserializes the CreateCustomAppIntegration from a dictionary."""
@@ -71,6 +81,14 @@ def as_dict(self) -> dict:
         if self.integration_id is not None: body['integration_id'] = self.integration_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCustomAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.client_id is not None: body['client_id'] = self.client_id
+        if self.client_secret is not None: body['client_secret'] = self.client_secret
+        if self.integration_id is not None: body['integration_id'] = self.integration_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCustomAppIntegrationOutput:
         """Deserializes the CreateCustomAppIntegrationOutput from a dictionary."""
@@ -94,6 +112,13 @@ def as_dict(self) -> dict:
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreatePublishedAppIntegration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.app_id is not None: body['app_id'] = self.app_id
+        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePublishedAppIntegration:
         """Deserializes the CreatePublishedAppIntegration from a dictionary."""
@@ -112,6 +137,12 @@ def as_dict(self) -> dict:
         if self.integration_id is not None: body['integration_id'] = self.integration_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreatePublishedAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.integration_id is not None: body['integration_id'] = self.integration_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePublishedAppIntegrationOutput:
         """Deserializes the CreatePublishedAppIntegrationOutput from a dictionary."""
@@ -149,6 +180,17 @@ def as_dict(self) -> dict:
         if self.update_time is not None: body['update_time'] = self.update_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateServicePrincipalSecretResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.id is not None: body['id'] = self.id
+        if self.secret is not None: body['secret'] = self.secret
+        if self.secret_hash is not None: body['secret_hash'] = self.secret_hash
+        if self.status is not None: body['status'] = self.status
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateServicePrincipalSecretResponse:
         """Deserializes the CreateServicePrincipalSecretResponse from a dictionary."""
@@ -175,6 +217,13 @@ def as_dict(self) -> dict:
         if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DataPlaneInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.authorization_details is not None: body['authorization_details'] = self.authorization_details
+        if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DataPlaneInfo:
         """Deserializes the DataPlaneInfo from a dictionary."""
@@ -190,6 +239,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteCustomAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteCustomAppIntegrationOutput:
         """Deserializes the DeleteCustomAppIntegrationOutput from a dictionary."""
@@ -204,6 +258,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeletePublishedAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeletePublishedAppIntegrationOutput:
         """Deserializes the DeletePublishedAppIntegrationOutput from a dictionary."""
@@ -218,6 +277,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -267,6 +331,21 @@ def as_dict(self) -> dict:
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetCustomAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.client_id is not None: body['client_id'] = self.client_id
+        if self.confidential is not None: body['confidential'] = self.confidential
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.creator_username is not None: body['creator_username'] = self.creator_username
+        if self.integration_id is not None: body['integration_id'] = self.integration_id
+        if self.name is not None: body['name'] = self.name
+        if self.redirect_urls: body['redirect_urls'] = self.redirect_urls
+        if self.scopes: body['scopes'] = self.scopes
+        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetCustomAppIntegrationOutput:
         """Deserializes the GetCustomAppIntegrationOutput from a dictionary."""
@@ -296,6 +375,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetCustomAppIntegrationsOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apps: body['apps'] = self.apps
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetCustomAppIntegrationsOutput:
         """Deserializes the GetCustomAppIntegrationsOutput from a dictionary."""
@@ -332,6 +418,17 @@ def as_dict(self) -> dict:
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPublishedAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.app_id is not None: body['app_id'] = self.app_id
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.integration_id is not None: body['integration_id'] = self.integration_id
+        if self.name is not None: body['name'] = self.name
+        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppIntegrationOutput:
         """Deserializes the GetPublishedAppIntegrationOutput from a dictionary."""
@@ -357,6 +454,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPublishedAppIntegrationsOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apps: body['apps'] = self.apps
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppIntegrationsOutput:
         """Deserializes the GetPublishedAppIntegrationsOutput from a dictionary."""
@@ -380,6 +484,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPublishedAppsOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apps: body['apps'] = self.apps
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppsOutput:
         """Deserializes the GetPublishedAppsOutput from a dictionary."""
@@ -402,6 +513,13 @@ def as_dict(self) -> dict:
         if self.secrets: body['secrets'] = [v.as_dict() for v in self.secrets]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListServicePrincipalSecretsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.secrets: body['secrets'] = self.secrets
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListServicePrincipalSecretsResponse:
         """Deserializes the ListServicePrincipalSecretsResponse from a dictionary."""
@@ -446,6 +564,19 @@ def as_dict(self) -> dict:
         if self.scopes: body['scopes'] = [v for v in self.scopes]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PublishedAppOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.app_id is not None: body['app_id'] = self.app_id
+        if self.client_id is not None: body['client_id'] = self.client_id
+        if self.description is not None: body['description'] = self.description
+        if self.is_confidential_client is not None:
+            body['is_confidential_client'] = self.is_confidential_client
+        if self.name is not None: body['name'] = self.name
+        if self.redirect_urls: body['redirect_urls'] = self.redirect_urls
+        if self.scopes: body['scopes'] = self.scopes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PublishedAppOutput:
         """Deserializes the PublishedAppOutput from a dictionary."""
@@ -485,6 +616,16 @@ def as_dict(self) -> dict:
         if self.update_time is not None: body['update_time'] = self.update_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SecretInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.id is not None: body['id'] = self.id
+        if self.secret_hash is not None: body['secret_hash'] = self.secret_hash
+        if self.status is not None: body['status'] = self.status
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SecretInfo:
         """Deserializes the SecretInfo from a dictionary."""
@@ -512,6 +653,15 @@ def as_dict(self) -> dict:
             body['refresh_token_ttl_in_minutes'] = self.refresh_token_ttl_in_minutes
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenAccessPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_token_ttl_in_minutes is not None:
+            body['access_token_ttl_in_minutes'] = self.access_token_ttl_in_minutes
+        if self.refresh_token_ttl_in_minutes is not None:
+            body['refresh_token_ttl_in_minutes'] = self.refresh_token_ttl_in_minutes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenAccessPolicy:
         """Deserializes the TokenAccessPolicy from a dictionary."""
@@ -537,6 +687,14 @@ def as_dict(self) -> dict:
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCustomAppIntegration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.integration_id is not None: body['integration_id'] = self.integration_id
+        if self.redirect_urls: body['redirect_urls'] = self.redirect_urls
+        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCustomAppIntegration:
         """Deserializes the UpdateCustomAppIntegration from a dictionary."""
@@ -553,6 +711,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCustomAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCustomAppIntegrationOutput:
         """Deserializes the UpdateCustomAppIntegrationOutput from a dictionary."""
@@ -573,6 +736,13 @@ def as_dict(self) -> dict:
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdatePublishedAppIntegration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.integration_id is not None: body['integration_id'] = self.integration_id
+        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdatePublishedAppIntegration:
         """Deserializes the UpdatePublishedAppIntegration from a dictionary."""
@@ -588,6 +758,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdatePublishedAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdatePublishedAppIntegrationOutput:
         """Deserializes the UpdatePublishedAppIntegrationOutput from a dictionary."""
diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py
index 137ab3c21..0ded4a83b 100755
--- a/databricks/sdk/service/pipelines.py
+++ b/databricks/sdk/service/pipelines.py
@@ -133,6 +133,36 @@ def as_dict(self) -> dict:
         if self.trigger: body['trigger'] = self.trigger.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreatePipeline into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.channel is not None: body['channel'] = self.channel
+        if self.clusters: body['clusters'] = self.clusters
+        if self.configuration: body['configuration'] = self.configuration
+        if self.continuous is not None: body['continuous'] = self.continuous
+        if self.deployment: body['deployment'] = self.deployment
+        if self.development is not None: body['development'] = self.development
+        if self.dry_run is not None: body['dry_run'] = self.dry_run
+        if self.edition is not None: body['edition'] = self.edition
+        if self.filters: body['filters'] = self.filters
+        if self.gateway_definition: body['gateway_definition'] = self.gateway_definition
+        if self.id is not None: body['id'] = self.id
+        if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition
+        if self.libraries: body['libraries'] = self.libraries
+        if self.name is not None: body['name'] = self.name
+        if self.notifications: body['notifications'] = self.notifications
+        if self.photon is not None: body['photon'] = self.photon
+        if self.restart_window: body['restart_window'] = self.restart_window
+        if self.schema is not None: body['schema'] = self.schema
+        if self.serverless is not None: body['serverless'] = self.serverless
+        if self.storage is not None: body['storage'] = self.storage
+        if self.target is not None: body['target'] = self.target
+        if self.trigger: body['trigger'] = self.trigger
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePipeline:
         """Deserializes the CreatePipeline from a dictionary."""
@@ -178,6 +208,13 @@ def as_dict(self) -> dict:
         if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreatePipelineResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.effective_settings: body['effective_settings'] = self.effective_settings
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePipelineResponse:
         """Deserializes the CreatePipelineResponse from a dictionary."""
@@ -198,6 +235,13 @@ def as_dict(self) -> dict:
         if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CronTrigger into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.quartz_cron_schedule is not None: body['quartz_cron_schedule'] = self.quartz_cron_schedule
+        if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CronTrigger:
         """Deserializes the CronTrigger from a dictionary."""
@@ -220,6 +264,13 @@ def as_dict(self) -> dict:
         if self.seq_no is not None: body['seq_no'] = self.seq_no
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DataPlaneId into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.instance is not None: body['instance'] = self.instance
+        if self.seq_no is not None: body['seq_no'] = self.seq_no
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DataPlaneId:
         """Deserializes the DataPlaneId from a dictionary."""
@@ -234,6 +285,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeletePipelineResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeletePipelineResponse:
         """Deserializes the DeletePipelineResponse from a dictionary."""
@@ -367,6 +423,38 @@ def as_dict(self) -> dict:
         if self.trigger: body['trigger'] = self.trigger.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditPipeline into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.channel is not None: body['channel'] = self.channel
+        if self.clusters: body['clusters'] = self.clusters
+        if self.configuration: body['configuration'] = self.configuration
+        if self.continuous is not None: body['continuous'] = self.continuous
+        if self.deployment: body['deployment'] = self.deployment
+        if self.development is not None: body['development'] = self.development
+        if self.edition is not None: body['edition'] = self.edition
+        if self.expected_last_modified is not None:
+            body['expected_last_modified'] = self.expected_last_modified
+        if self.filters: body['filters'] = self.filters
+        if self.gateway_definition: body['gateway_definition'] = self.gateway_definition
+        if self.id is not None: body['id'] = self.id
+        if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition
+        if self.libraries: body['libraries'] = self.libraries
+        if self.name is not None: body['name'] = self.name
+        if self.notifications: body['notifications'] = self.notifications
+        if self.photon is not None: body['photon'] = self.photon
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.restart_window: body['restart_window'] = self.restart_window
+        if self.schema is not None: body['schema'] = self.schema
+        if self.serverless is not None: body['serverless'] = self.serverless
+        if self.storage is not None: body['storage'] = self.storage
+        if self.target is not None: body['target'] = self.target
+        if self.trigger: body['trigger'] = self.trigger
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditPipeline:
         """Deserializes the EditPipeline from a dictionary."""
@@ -406,6 +494,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditPipelineResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditPipelineResponse:
         """Deserializes the EditPipelineResponse from a dictionary."""
@@ -427,6 +520,13 @@ def as_dict(self) -> dict:
         if self.fatal is not None: body['fatal'] = self.fatal
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ErrorDetail into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exceptions: body['exceptions'] = self.exceptions
+        if self.fatal is not None: body['fatal'] = self.fatal
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ErrorDetail:
         """Deserializes the ErrorDetail from a dictionary."""
@@ -454,6 +554,12 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FileLibrary into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FileLibrary:
         """Deserializes the FileLibrary from a dictionary."""
@@ -475,6 +581,13 @@ def as_dict(self) -> dict:
         if self.include: body['include'] = [v for v in self.include]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Filters into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exclude: body['exclude'] = self.exclude
+        if self.include: body['include'] = self.include
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Filters:
         """Deserializes the Filters from a dictionary."""
@@ -492,6 +605,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPipelinePermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPipelinePermissionLevelsResponse:
         """Deserializes the GetPipelinePermissionLevelsResponse from a dictionary."""
@@ -554,6 +673,24 @@ def as_dict(self) -> dict:
         if self.state is not None: body['state'] = self.state.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPipelineResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cause is not None: body['cause'] = self.cause
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.effective_budget_policy_id is not None:
+            body['effective_budget_policy_id'] = self.effective_budget_policy_id
+        if self.health is not None: body['health'] = self.health
+        if self.last_modified is not None: body['last_modified'] = self.last_modified
+        if self.latest_updates: body['latest_updates'] = self.latest_updates
+        if self.name is not None: body['name'] = self.name
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
+        if self.spec: body['spec'] = self.spec
+        if self.state is not None: body['state'] = self.state
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPipelineResponse:
         """Deserializes the GetPipelineResponse from a dictionary."""
@@ -589,6 +726,12 @@ def as_dict(self) -> dict:
         if self.update: body['update'] = self.update.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetUpdateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.update: body['update'] = self.update
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetUpdateResponse:
         """Deserializes the GetUpdateResponse from a dictionary."""
@@ -614,6 +757,14 @@ def as_dict(self) -> dict:
         if self.table: body['table'] = self.table.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the IngestionConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.report: body['report'] = self.report
+        if self.schema: body['schema'] = self.schema
+        if self.table: body['table'] = self.table
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> IngestionConfig:
         """Deserializes the IngestionConfig from a dictionary."""
@@ -655,6 +806,18 @@ def as_dict(self) -> dict:
             body['gateway_storage_schema'] = self.gateway_storage_schema
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the IngestionGatewayPipelineDefinition into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.connection_id is not None: body['connection_id'] = self.connection_id
+        if self.connection_name is not None: body['connection_name'] = self.connection_name
+        if self.gateway_storage_catalog is not None:
+            body['gateway_storage_catalog'] = self.gateway_storage_catalog
+        if self.gateway_storage_name is not None: body['gateway_storage_name'] = self.gateway_storage_name
+        if self.gateway_storage_schema is not None:
+            body['gateway_storage_schema'] = self.gateway_storage_schema
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> IngestionGatewayPipelineDefinition:
         """Deserializes the IngestionGatewayPipelineDefinition from a dictionary."""
@@ -691,6 +854,15 @@ def as_dict(self) -> dict:
         if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the IngestionPipelineDefinition into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.connection_name is not None: body['connection_name'] = self.connection_name
+        if self.ingestion_gateway_id is not None: body['ingestion_gateway_id'] = self.ingestion_gateway_id
+        if self.objects: body['objects'] = self.objects
+        if self.table_configuration: body['table_configuration'] = self.table_configuration
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> IngestionPipelineDefinition:
         """Deserializes the IngestionPipelineDefinition from a dictionary."""
@@ -719,6 +891,14 @@ def as_dict(self) -> dict:
         if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListPipelineEventsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.events: body['events'] = self.events
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListPipelineEventsResponse:
         """Deserializes the ListPipelineEventsResponse from a dictionary."""
@@ -742,6 +922,13 @@ def as_dict(self) -> dict:
         if self.statuses: body['statuses'] = [v.as_dict() for v in self.statuses]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListPipelinesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.statuses: body['statuses'] = self.statuses
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListPipelinesResponse:
         """Deserializes the ListPipelinesResponse from a dictionary."""
@@ -768,6 +955,14 @@ def as_dict(self) -> dict:
         if self.updates: body['updates'] = [v.as_dict() for v in self.updates]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListUpdatesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        if self.updates: body['updates'] = self.updates
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListUpdatesResponse:
         """Deserializes the ListUpdatesResponse from a dictionary."""
@@ -784,6 +979,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ManualTrigger into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ManualTrigger:
         """Deserializes the ManualTrigger from a dictionary."""
@@ -809,6 +1009,12 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NotebookLibrary into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NotebookLibrary:
         """Deserializes the NotebookLibrary from a dictionary."""
@@ -835,6 +1041,13 @@ def as_dict(self) -> dict:
         if self.email_recipients: body['email_recipients'] = [v for v in self.email_recipients]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Notifications into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alerts: body['alerts'] = self.alerts
+        if self.email_recipients: body['email_recipients'] = self.email_recipients
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Notifications:
         """Deserializes the Notifications from a dictionary."""
@@ -917,6 +1130,28 @@ def as_dict(self) -> dict:
         if self.update_id is not None: body['update_id'] = self.update_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Origin into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.batch_id is not None: body['batch_id'] = self.batch_id
+        if self.cloud is not None: body['cloud'] = self.cloud
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.dataset_name is not None: body['dataset_name'] = self.dataset_name
+        if self.flow_id is not None: body['flow_id'] = self.flow_id
+        if self.flow_name is not None: body['flow_name'] = self.flow_name
+        if self.host is not None: body['host'] = self.host
+        if self.maintenance_id is not None: body['maintenance_id'] = self.maintenance_id
+        if self.materialization_name is not None: body['materialization_name'] = self.materialization_name
+        if self.org_id is not None: body['org_id'] = self.org_id
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.pipeline_name is not None: body['pipeline_name'] = self.pipeline_name
+        if self.region is not None: body['region'] = self.region
+        if self.request_id is not None: body['request_id'] = self.request_id
+        if self.table_id is not None: body['table_id'] = self.table_id
+        if self.uc_resource_id is not None: body['uc_resource_id'] = self.uc_resource_id
+        if self.update_id is not None: body['update_id'] = self.update_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Origin:
         """Deserializes the Origin from a dictionary."""
@@ -963,6 +1198,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineAccessControlRequest:
         """Deserializes the PipelineAccessControlRequest from a dictionary."""
@@ -1000,6 +1245,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineAccessControlResponse:
         """Deserializes the PipelineAccessControlResponse from a dictionary."""
@@ -1139,6 +1395,33 @@ def as_dict(self) -> dict:
         if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apply_policy_default_values is not None:
+            body['apply_policy_default_values'] = self.apply_policy_default_values
+        if self.autoscale: body['autoscale'] = self.autoscale
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.driver_instance_pool_id is not None:
+            body['driver_instance_pool_id'] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
+        if self.enable_local_disk_encryption is not None:
+            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.init_scripts: body['init_scripts'] = self.init_scripts
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.label is not None: body['label'] = self.label
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.spark_conf: body['spark_conf'] = self.spark_conf
+        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
+        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineCluster:
         """Deserializes the PipelineCluster from a dictionary."""
@@ -1187,6 +1470,14 @@ def as_dict(self) -> dict:
         if self.mode is not None: body['mode'] = self.mode.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineClusterAutoscale into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.max_workers is not None: body['max_workers'] = self.max_workers
+        if self.min_workers is not None: body['min_workers'] = self.min_workers
+        if self.mode is not None: body['mode'] = self.mode
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineClusterAutoscale:
         """Deserializes the PipelineClusterAutoscale from a dictionary."""
@@ -1220,6 +1511,13 @@ def as_dict(self) -> dict:
         if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineDeployment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.kind is not None: body['kind'] = self.kind
+        if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineDeployment:
         """Deserializes the PipelineDeployment from a dictionary."""
@@ -1270,6 +1568,20 @@ def as_dict(self) -> dict:
         if self.timestamp is not None: body['timestamp'] = self.timestamp
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineEvent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.error: body['error'] = self.error
+        if self.event_type is not None: body['event_type'] = self.event_type
+        if self.id is not None: body['id'] = self.id
+        if self.level is not None: body['level'] = self.level
+        if self.maturity_level is not None: body['maturity_level'] = self.maturity_level
+        if self.message is not None: body['message'] = self.message
+        if self.origin: body['origin'] = self.origin
+        if self.sequence: body['sequence'] = self.sequence
+        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineEvent:
         """Deserializes the PipelineEvent from a dictionary."""
@@ -1311,6 +1623,16 @@ def as_dict(self) -> dict:
         if self.whl is not None: body['whl'] = self.whl
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineLibrary into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.file: body['file'] = self.file
+        if self.jar is not None: body['jar'] = self.jar
+        if self.maven: body['maven'] = self.maven
+        if self.notebook: body['notebook'] = self.notebook
+        if self.whl is not None: body['whl'] = self.whl
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineLibrary:
         """Deserializes the PipelineLibrary from a dictionary."""
@@ -1338,6 +1660,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelinePermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelinePermission:
         """Deserializes the PipelinePermission from a dictionary."""
@@ -1372,6 +1702,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelinePermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelinePermissions:
         """Deserializes the PipelinePermissions from a dictionary."""
@@ -1395,6 +1733,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelinePermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelinePermissionsDescription:
         """Deserializes the PipelinePermissionsDescription from a dictionary."""
@@ -1417,6 +1762,13 @@ def as_dict(self) -> dict:
         if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelinePermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelinePermissionsRequest:
         """Deserializes the PipelinePermissionsRequest from a dictionary."""
@@ -1530,6 +1882,34 @@ def as_dict(self) -> dict:
         if self.trigger: body['trigger'] = self.trigger.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.channel is not None: body['channel'] = self.channel
+        if self.clusters: body['clusters'] = self.clusters
+        if self.configuration: body['configuration'] = self.configuration
+        if self.continuous is not None: body['continuous'] = self.continuous
+        if self.deployment: body['deployment'] = self.deployment
+        if self.development is not None: body['development'] = self.development
+        if self.edition is not None: body['edition'] = self.edition
+        if self.filters: body['filters'] = self.filters
+        if self.gateway_definition: body['gateway_definition'] = self.gateway_definition
+        if self.id is not None: body['id'] = self.id
+        if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition
+        if self.libraries: body['libraries'] = self.libraries
+        if self.name is not None: body['name'] = self.name
+        if self.notifications: body['notifications'] = self.notifications
+        if self.photon is not None: body['photon'] = self.photon
+        if self.restart_window: body['restart_window'] = self.restart_window
+        if self.schema is not None: body['schema'] = self.schema
+        if self.serverless is not None: body['serverless'] = self.serverless
+        if self.storage is not None: body['storage'] = self.storage
+        if self.target is not None: body['target'] = self.target
+        if self.trigger: body['trigger'] = self.trigger
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineSpec:
         """Deserializes the PipelineSpec from a dictionary."""
@@ -1612,6 +1992,19 @@ def as_dict(self) -> dict:
         if self.state is not None: body['state'] = self.state.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineStateInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.health is not None: body['health'] = self.health
+        if self.latest_updates: body['latest_updates'] = self.latest_updates
+        if self.name is not None: body['name'] = self.name
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
+        if self.state is not None: body['state'] = self.state
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineStateInfo:
         """Deserializes the PipelineStateInfo from a dictionary."""
@@ -1645,6 +2038,13 @@ def as_dict(self) -> dict:
         if self.manual: body['manual'] = self.manual.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineTrigger into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cron: body['cron'] = self.cron
+        if self.manual: body['manual'] = self.manual
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineTrigger:
         """Deserializes the PipelineTrigger from a dictionary."""
@@ -1679,6 +2079,16 @@ def as_dict(self) -> dict:
         if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ReportSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog
+        if self.destination_schema is not None: body['destination_schema'] = self.destination_schema
+        if self.destination_table is not None: body['destination_table'] = self.destination_table
+        if self.source_url is not None: body['source_url'] = self.source_url
+        if self.table_configuration: body['table_configuration'] = self.table_configuration
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ReportSpec:
         """Deserializes the ReportSpec from a dictionary."""
@@ -1712,6 +2122,14 @@ def as_dict(self) -> dict:
         if self.time_zone_id is not None: body['time_zone_id'] = self.time_zone_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestartWindow into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.days_of_week is not None: body['days_of_week'] = self.days_of_week
+        if self.start_hour is not None: body['start_hour'] = self.start_hour
+        if self.time_zone_id is not None: body['time_zone_id'] = self.time_zone_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestartWindow:
         """Deserializes the RestartWindow from a dictionary."""
@@ -1764,6 +2182,16 @@ def as_dict(self) -> dict:
         if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SchemaSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog
+        if self.destination_schema is not None: body['destination_schema'] = self.destination_schema
+        if self.source_catalog is not None: body['source_catalog'] = self.source_catalog
+        if self.source_schema is not None: body['source_schema'] = self.source_schema
+        if self.table_configuration: body['table_configuration'] = self.table_configuration
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SchemaSpec:
         """Deserializes the SchemaSpec from a dictionary."""
@@ -1789,6 +2217,13 @@ def as_dict(self) -> dict:
         if self.data_plane_id: body['data_plane_id'] = self.data_plane_id.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Sequencing into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.control_plane_seq_no is not None: body['control_plane_seq_no'] = self.control_plane_seq_no
+        if self.data_plane_id: body['data_plane_id'] = self.data_plane_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Sequencing:
         """Deserializes the Sequencing from a dictionary."""
@@ -1815,6 +2250,14 @@ def as_dict(self) -> dict:
         if self.stack: body['stack'] = [v.as_dict() for v in self.stack]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SerializedException into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.class_name is not None: body['class_name'] = self.class_name
+        if self.message is not None: body['message'] = self.message
+        if self.stack: body['stack'] = self.stack
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SerializedException:
         """Deserializes the SerializedException from a dictionary."""
@@ -1846,6 +2289,15 @@ def as_dict(self) -> dict:
         if self.method_name is not None: body['method_name'] = self.method_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StackFrame into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.declaring_class is not None: body['declaring_class'] = self.declaring_class
+        if self.file_name is not None: body['file_name'] = self.file_name
+        if self.line_number is not None: body['line_number'] = self.line_number
+        if self.method_name is not None: body['method_name'] = self.method_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StackFrame:
         """Deserializes the StackFrame from a dictionary."""
@@ -1890,6 +2342,17 @@ def as_dict(self) -> dict:
         if self.validate_only is not None: body['validate_only'] = self.validate_only
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StartUpdate into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cause is not None: body['cause'] = self.cause
+        if self.full_refresh is not None: body['full_refresh'] = self.full_refresh
+        if self.full_refresh_selection: body['full_refresh_selection'] = self.full_refresh_selection
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.refresh_selection: body['refresh_selection'] = self.refresh_selection
+        if self.validate_only is not None: body['validate_only'] = self.validate_only
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StartUpdate:
         """Deserializes the StartUpdate from a dictionary."""
@@ -1921,6 +2384,12 @@ def as_dict(self) -> dict:
         if self.update_id is not None: body['update_id'] = self.update_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StartUpdateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.update_id is not None: body['update_id'] = self.update_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StartUpdateResponse:
         """Deserializes the StartUpdateResponse from a dictionary."""
@@ -1935,6 +2404,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StopPipelineResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StopPipelineResponse:
         """Deserializes the StopPipelineResponse from a dictionary."""
@@ -1978,6 +2452,18 @@ def as_dict(self) -> dict:
         if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TableSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog
+        if self.destination_schema is not None: body['destination_schema'] = self.destination_schema
+        if self.destination_table is not None: body['destination_table'] = self.destination_table
+        if self.source_catalog is not None: body['source_catalog'] = self.source_catalog
+        if self.source_schema is not None: body['source_schema'] = self.source_schema
+        if self.source_table is not None: body['source_table'] = self.source_table
+        if self.table_configuration: body['table_configuration'] = self.table_configuration
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableSpec:
         """Deserializes the TableSpec from a dictionary."""
@@ -2016,6 +2502,16 @@ def as_dict(self) -> dict:
         if self.sequence_by: body['sequence_by'] = [v for v in self.sequence_by]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TableSpecificConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.primary_keys: body['primary_keys'] = self.primary_keys
+        if self.salesforce_include_formula_fields is not None:
+            body['salesforce_include_formula_fields'] = self.salesforce_include_formula_fields
+        if self.scd_type is not None: body['scd_type'] = self.scd_type
+        if self.sequence_by: body['sequence_by'] = self.sequence_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableSpecificConfig:
         """Deserializes the TableSpecificConfig from a dictionary."""
@@ -2090,6 +2586,22 @@ def as_dict(self) -> dict:
         if self.validate_only is not None: body['validate_only'] = self.validate_only
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cause is not None: body['cause'] = self.cause
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.config: body['config'] = self.config
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.full_refresh is not None: body['full_refresh'] = self.full_refresh
+        if self.full_refresh_selection: body['full_refresh_selection'] = self.full_refresh_selection
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.refresh_selection: body['refresh_selection'] = self.refresh_selection
+        if self.state is not None: body['state'] = self.state
+        if self.update_id is not None: body['update_id'] = self.update_id
+        if self.validate_only is not None: body['validate_only'] = self.validate_only
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateInfo:
         """Deserializes the UpdateInfo from a dictionary."""
@@ -2149,6 +2661,14 @@ def as_dict(self) -> dict:
         if self.update_id is not None: body['update_id'] = self.update_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateStateInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.state is not None: body['state'] = self.state
+        if self.update_id is not None: body['update_id'] = self.update_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateStateInfo:
         """Deserializes the UpdateStateInfo from a dictionary."""
diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py
index d108f7984..c54120ad8 100755
--- a/databricks/sdk/service/provisioning.py
+++ b/databricks/sdk/service/provisioning.py
@@ -28,6 +28,12 @@ def as_dict(self) -> dict:
         if self.sts_role: body['sts_role'] = self.sts_role.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AwsCredentials into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.sts_role: body['sts_role'] = self.sts_role
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsCredentials:
         """Deserializes the AwsCredentials from a dictionary."""
@@ -60,6 +66,16 @@ def as_dict(self) -> dict:
             body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AwsKeyInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key_alias is not None: body['key_alias'] = self.key_alias
+        if self.key_arn is not None: body['key_arn'] = self.key_arn
+        if self.key_region is not None: body['key_region'] = self.key_region
+        if self.reuse_key_for_cluster_volumes is not None:
+            body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsKeyInfo:
         """Deserializes the AwsKeyInfo from a dictionary."""
@@ -84,6 +100,13 @@ def as_dict(self) -> dict:
         if self.subscription_id is not None: body['subscription_id'] = self.subscription_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AzureWorkspaceInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.resource_group is not None: body['resource_group'] = self.resource_group
+        if self.subscription_id is not None: body['subscription_id'] = self.subscription_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureWorkspaceInfo:
         """Deserializes the AzureWorkspaceInfo from a dictionary."""
@@ -104,6 +127,12 @@ def as_dict(self) -> dict:
         if self.gcp: body['gcp'] = self.gcp.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CloudResourceContainer into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.gcp: body['gcp'] = self.gcp
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CloudResourceContainer:
         """Deserializes the CloudResourceContainer from a dictionary."""
@@ -133,6 +162,15 @@ def as_dict(self) -> dict:
             body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateAwsKeyInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key_alias is not None: body['key_alias'] = self.key_alias
+        if self.key_arn is not None: body['key_arn'] = self.key_arn
+        if self.reuse_key_for_cluster_volumes is not None:
+            body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateAwsKeyInfo:
         """Deserializes the CreateAwsKeyInfo from a dictionary."""
@@ -151,6 +189,12 @@ def as_dict(self) -> dict:
         if self.sts_role: body['sts_role'] = self.sts_role.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCredentialAwsCredentials into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.sts_role: body['sts_role'] = self.sts_role
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCredentialAwsCredentials:
         """Deserializes the CreateCredentialAwsCredentials from a dictionary."""
@@ -171,6 +215,13 @@ def as_dict(self) -> dict:
         if self.credentials_name is not None: body['credentials_name'] = self.credentials_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCredentialRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_credentials: body['aws_credentials'] = self.aws_credentials
+        if self.credentials_name is not None: body['credentials_name'] = self.credentials_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCredentialRequest:
         """Deserializes the CreateCredentialRequest from a dictionary."""
@@ -189,6 +240,12 @@ def as_dict(self) -> dict:
         if self.role_arn is not None: body['role_arn'] = self.role_arn
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCredentialStsRole into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.role_arn is not None: body['role_arn'] = self.role_arn
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCredentialStsRole:
         """Deserializes the CreateCredentialStsRole from a dictionary."""
@@ -212,6 +269,14 @@ def as_dict(self) -> dict:
         if self.use_cases: body['use_cases'] = [v.value for v in self.use_cases]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCustomerManagedKeyRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_key_info: body['aws_key_info'] = self.aws_key_info
+        if self.gcp_key_info: body['gcp_key_info'] = self.gcp_key_info
+        if self.use_cases: body['use_cases'] = self.use_cases
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCustomerManagedKeyRequest:
         """Deserializes the CreateCustomerManagedKeyRequest from a dictionary."""
@@ -231,6 +296,12 @@ def as_dict(self) -> dict:
         if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateGcpKeyInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateGcpKeyInfo:
         """Deserializes the CreateGcpKeyInfo from a dictionary."""
@@ -275,6 +346,17 @@ def as_dict(self) -> dict:
         if self.vpc_id is not None: body['vpc_id'] = self.vpc_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateNetworkRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.gcp_network_info: body['gcp_network_info'] = self.gcp_network_info
+        if self.network_name is not None: body['network_name'] = self.network_name
+        if self.security_group_ids: body['security_group_ids'] = self.security_group_ids
+        if self.subnet_ids: body['subnet_ids'] = self.subnet_ids
+        if self.vpc_endpoints: body['vpc_endpoints'] = self.vpc_endpoints
+        if self.vpc_id is not None: body['vpc_id'] = self.vpc_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateNetworkRequest:
         """Deserializes the CreateNetworkRequest from a dictionary."""
@@ -302,6 +384,14 @@ def as_dict(self) -> dict:
             body['storage_configuration_name'] = self.storage_configuration_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateStorageConfigurationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.root_bucket_info: body['root_bucket_info'] = self.root_bucket_info
+        if self.storage_configuration_name is not None:
+            body['storage_configuration_name'] = self.storage_configuration_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateStorageConfigurationRequest:
         """Deserializes the CreateStorageConfigurationRequest from a dictionary."""
@@ -332,6 +422,15 @@ def as_dict(self) -> dict:
         if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateVpcEndpointRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id
+        if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info
+        if self.region is not None: body['region'] = self.region
+        if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateVpcEndpointRequest:
         """Deserializes the CreateVpcEndpointRequest from a dictionary."""
@@ -479,6 +578,34 @@ def as_dict(self) -> dict:
         if self.workspace_name is not None: body['workspace_name'] = self.workspace_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateWorkspaceRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_region is not None: body['aws_region'] = self.aws_region
+        if self.cloud is not None: body['cloud'] = self.cloud
+        if self.cloud_resource_container: body['cloud_resource_container'] = self.cloud_resource_container
+        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.deployment_name is not None: body['deployment_name'] = self.deployment_name
+        if self.gcp_managed_network_config:
+            body['gcp_managed_network_config'] = self.gcp_managed_network_config
+        if self.gke_config: body['gke_config'] = self.gke_config
+        if self.is_no_public_ip_enabled is not None:
+            body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled
+        if self.location is not None: body['location'] = self.location
+        if self.managed_services_customer_managed_key_id is not None:
+            body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id
+        if self.network_id is not None: body['network_id'] = self.network_id
+        if self.pricing_tier is not None: body['pricing_tier'] = self.pricing_tier
+        if self.private_access_settings_id is not None:
+            body['private_access_settings_id'] = self.private_access_settings_id
+        if self.storage_configuration_id is not None:
+            body['storage_configuration_id'] = self.storage_configuration_id
+        if self.storage_customer_managed_key_id is not None:
+            body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id
+        if self.workspace_name is not None: body['workspace_name'] = self.workspace_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateWorkspaceRequest:
         """Deserializes the CreateWorkspaceRequest from a dictionary."""
@@ -529,6 +656,16 @@ def as_dict(self) -> dict:
         if self.credentials_name is not None: body['credentials_name'] = self.credentials_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Credential into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.aws_credentials: body['aws_credentials'] = self.aws_credentials
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
+        if self.credentials_name is not None: body['credentials_name'] = self.credentials_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Credential:
         """Deserializes the Credential from a dictionary."""
@@ -556,6 +693,12 @@ def as_dict(self) -> dict:
         if self.project_id is not None: body['project_id'] = self.project_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CustomerFacingGcpCloudResourceContainer into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.project_id is not None: body['project_id'] = self.project_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CustomerFacingGcpCloudResourceContainer:
         """Deserializes the CustomerFacingGcpCloudResourceContainer from a dictionary."""
@@ -592,6 +735,18 @@ def as_dict(self) -> dict:
         if self.use_cases: body['use_cases'] = [v.value for v in self.use_cases]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CustomerManagedKey into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.aws_key_info: body['aws_key_info'] = self.aws_key_info
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.customer_managed_key_id is not None:
+            body['customer_managed_key_id'] = self.customer_managed_key_id
+        if self.gcp_key_info: body['gcp_key_info'] = self.gcp_key_info
+        if self.use_cases: body['use_cases'] = self.use_cases
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CustomerManagedKey:
         """Deserializes the CustomerManagedKey from a dictionary."""
@@ -611,6 +766,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -659,6 +819,16 @@ def as_dict(self) -> dict:
         if self.customer_name is not None: body['customer_name'] = self.customer_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExternalCustomerInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.authoritative_user_email is not None:
+            body['authoritative_user_email'] = self.authoritative_user_email
+        if self.authoritative_user_full_name is not None:
+            body['authoritative_user_full_name'] = self.authoritative_user_full_name
+        if self.customer_name is not None: body['customer_name'] = self.customer_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExternalCustomerInfo:
         """Deserializes the ExternalCustomerInfo from a dictionary."""
@@ -678,6 +848,12 @@ def as_dict(self) -> dict:
         if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GcpKeyInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GcpKeyInfo:
         """Deserializes the GcpKeyInfo from a dictionary."""
@@ -727,6 +903,16 @@ def as_dict(self) -> dict:
         if self.subnet_cidr is not None: body['subnet_cidr'] = self.subnet_cidr
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GcpManagedNetworkConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.gke_cluster_pod_ip_range is not None:
+            body['gke_cluster_pod_ip_range'] = self.gke_cluster_pod_ip_range
+        if self.gke_cluster_service_ip_range is not None:
+            body['gke_cluster_service_ip_range'] = self.gke_cluster_service_ip_range
+        if self.subnet_cidr is not None: body['subnet_cidr'] = self.subnet_cidr
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GcpManagedNetworkConfig:
         """Deserializes the GcpManagedNetworkConfig from a dictionary."""
@@ -772,6 +958,17 @@ def as_dict(self) -> dict:
         if self.vpc_id is not None: body['vpc_id'] = self.vpc_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GcpNetworkInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.network_project_id is not None: body['network_project_id'] = self.network_project_id
+        if self.pod_ip_range_name is not None: body['pod_ip_range_name'] = self.pod_ip_range_name
+        if self.service_ip_range_name is not None: body['service_ip_range_name'] = self.service_ip_range_name
+        if self.subnet_id is not None: body['subnet_id'] = self.subnet_id
+        if self.subnet_region is not None: body['subnet_region'] = self.subnet_region
+        if self.vpc_id is not None: body['vpc_id'] = self.vpc_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GcpNetworkInfo:
         """Deserializes the GcpNetworkInfo from a dictionary."""
@@ -812,6 +1009,16 @@ def as_dict(self) -> dict:
         if self.service_attachment_id is not None: body['service_attachment_id'] = self.service_attachment_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GcpVpcEndpointInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.endpoint_region is not None: body['endpoint_region'] = self.endpoint_region
+        if self.project_id is not None: body['project_id'] = self.project_id
+        if self.psc_connection_id is not None: body['psc_connection_id'] = self.psc_connection_id
+        if self.psc_endpoint_name is not None: body['psc_endpoint_name'] = self.psc_endpoint_name
+        if self.service_attachment_id is not None: body['service_attachment_id'] = self.service_attachment_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GcpVpcEndpointInfo:
         """Deserializes the GcpVpcEndpointInfo from a dictionary."""
@@ -848,6 +1055,13 @@ def as_dict(self) -> dict:
         if self.master_ip_range is not None: body['master_ip_range'] = self.master_ip_range
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GkeConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.connectivity_type is not None: body['connectivity_type'] = self.connectivity_type
+        if self.master_ip_range is not None: body['master_ip_range'] = self.master_ip_range
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GkeConfig:
         """Deserializes the GkeConfig from a dictionary."""
@@ -940,6 +1154,24 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Network into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.error_messages: body['error_messages'] = self.error_messages
+        if self.gcp_network_info: body['gcp_network_info'] = self.gcp_network_info
+        if self.network_id is not None: body['network_id'] = self.network_id
+        if self.network_name is not None: body['network_name'] = self.network_name
+        if self.security_group_ids: body['security_group_ids'] = self.security_group_ids
+        if self.subnet_ids: body['subnet_ids'] = self.subnet_ids
+        if self.vpc_endpoints: body['vpc_endpoints'] = self.vpc_endpoints
+        if self.vpc_id is not None: body['vpc_id'] = self.vpc_id
+        if self.vpc_status is not None: body['vpc_status'] = self.vpc_status
+        if self.warning_messages: body['warning_messages'] = self.warning_messages
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Network:
         """Deserializes the Network from a dictionary."""
@@ -974,6 +1206,13 @@ def as_dict(self) -> dict:
         if self.error_type is not None: body['error_type'] = self.error_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NetworkHealth into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.error_message is not None: body['error_message'] = self.error_message
+        if self.error_type is not None: body['error_type'] = self.error_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NetworkHealth:
         """Deserializes the NetworkHealth from a dictionary."""
@@ -1001,6 +1240,13 @@ def as_dict(self) -> dict:
         if self.rest_api: body['rest_api'] = [v for v in self.rest_api]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NetworkVpcEndpoints into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dataplane_relay: body['dataplane_relay'] = self.dataplane_relay
+        if self.rest_api: body['rest_api'] = self.rest_api
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NetworkVpcEndpoints:
         """Deserializes the NetworkVpcEndpoints from a dictionary."""
@@ -1022,6 +1268,13 @@ def as_dict(self) -> dict:
         if self.warning_type is not None: body['warning_type'] = self.warning_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NetworkWarning into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.warning_message is not None: body['warning_message'] = self.warning_message
+        if self.warning_type is not None: body['warning_type'] = self.warning_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NetworkWarning:
         """Deserializes the NetworkWarning from a dictionary."""
@@ -1099,6 +1352,20 @@ def as_dict(self) -> dict:
         if self.region is not None: body['region'] = self.region
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PrivateAccessSettings into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.allowed_vpc_endpoint_ids: body['allowed_vpc_endpoint_ids'] = self.allowed_vpc_endpoint_ids
+        if self.private_access_level is not None: body['private_access_level'] = self.private_access_level
+        if self.private_access_settings_id is not None:
+            body['private_access_settings_id'] = self.private_access_settings_id
+        if self.private_access_settings_name is not None:
+            body['private_access_settings_name'] = self.private_access_settings_name
+        if self.public_access_enabled is not None: body['public_access_enabled'] = self.public_access_enabled
+        if self.region is not None: body['region'] = self.region
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PrivateAccessSettings:
         """Deserializes the PrivateAccessSettings from a dictionary."""
@@ -1119,6 +1386,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ReplaceResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ReplaceResponse:
         """Deserializes the ReplaceResponse from a dictionary."""
@@ -1138,6 +1410,12 @@ def as_dict(self) -> dict:
         if self.bucket_name is not None: body['bucket_name'] = self.bucket_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RootBucketInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.bucket_name is not None: body['bucket_name'] = self.bucket_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RootBucketInfo:
         """Deserializes the RootBucketInfo from a dictionary."""
@@ -1173,6 +1451,18 @@ def as_dict(self) -> dict:
             body['storage_configuration_name'] = self.storage_configuration_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StorageConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.root_bucket_info: body['root_bucket_info'] = self.root_bucket_info
+        if self.storage_configuration_id is not None:
+            body['storage_configuration_id'] = self.storage_configuration_id
+        if self.storage_configuration_name is not None:
+            body['storage_configuration_name'] = self.storage_configuration_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StorageConfiguration:
         """Deserializes the StorageConfiguration from a dictionary."""
@@ -1199,6 +1489,13 @@ def as_dict(self) -> dict:
         if self.role_arn is not None: body['role_arn'] = self.role_arn
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StsRole into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.external_id is not None: body['external_id'] = self.external_id
+        if self.role_arn is not None: body['role_arn'] = self.role_arn
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StsRole:
         """Deserializes the StsRole from a dictionary."""
@@ -1213,6 +1510,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
         """Deserializes the UpdateResponse from a dictionary."""
@@ -1280,6 +1582,26 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateWorkspaceRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_region is not None: body['aws_region'] = self.aws_region
+        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.managed_services_customer_managed_key_id is not None:
+            body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id
+        if self.network_connectivity_config_id is not None:
+            body['network_connectivity_config_id'] = self.network_connectivity_config_id
+        if self.network_id is not None: body['network_id'] = self.network_id
+        if self.private_access_settings_id is not None:
+            body['private_access_settings_id'] = self.private_access_settings_id
+        if self.storage_configuration_id is not None:
+            body['storage_configuration_id'] = self.storage_configuration_id
+        if self.storage_customer_managed_key_id is not None:
+            body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceRequest:
         """Deserializes the UpdateWorkspaceRequest from a dictionary."""
@@ -1349,6 +1671,19 @@ def as_dict(self) -> dict:
         if self.region is not None: body['region'] = self.region
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpsertPrivateAccessSettingsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allowed_vpc_endpoint_ids: body['allowed_vpc_endpoint_ids'] = self.allowed_vpc_endpoint_ids
+        if self.private_access_level is not None: body['private_access_level'] = self.private_access_level
+        if self.private_access_settings_id is not None:
+            body['private_access_settings_id'] = self.private_access_settings_id
+        if self.private_access_settings_name is not None:
+            body['private_access_settings_name'] = self.private_access_settings_name
+        if self.public_access_enabled is not None: body['public_access_enabled'] = self.public_access_enabled
+        if self.region is not None: body['region'] = self.region
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpsertPrivateAccessSettingsRequest:
         """Deserializes the UpsertPrivateAccessSettingsRequest from a dictionary."""
@@ -1420,6 +1755,22 @@ def as_dict(self) -> dict:
         if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the VpcEndpoint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.aws_account_id is not None: body['aws_account_id'] = self.aws_account_id
+        if self.aws_endpoint_service_id is not None:
+            body['aws_endpoint_service_id'] = self.aws_endpoint_service_id
+        if self.aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id
+        if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info
+        if self.region is not None: body['region'] = self.region
+        if self.state is not None: body['state'] = self.state
+        if self.use_case is not None: body['use_case'] = self.use_case
+        if self.vpc_endpoint_id is not None: body['vpc_endpoint_id'] = self.vpc_endpoint_id
+        if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> VpcEndpoint:
         """Deserializes the VpcEndpoint from a dictionary."""
@@ -1597,6 +1948,42 @@ def as_dict(self) -> dict:
             body['workspace_status_message'] = self.workspace_status_message
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Workspace into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.aws_region is not None: body['aws_region'] = self.aws_region
+        if self.azure_workspace_info: body['azure_workspace_info'] = self.azure_workspace_info
+        if self.cloud is not None: body['cloud'] = self.cloud
+        if self.cloud_resource_container: body['cloud_resource_container'] = self.cloud_resource_container
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.deployment_name is not None: body['deployment_name'] = self.deployment_name
+        if self.external_customer_info: body['external_customer_info'] = self.external_customer_info
+        if self.gcp_managed_network_config:
+            body['gcp_managed_network_config'] = self.gcp_managed_network_config
+        if self.gke_config: body['gke_config'] = self.gke_config
+        if self.is_no_public_ip_enabled is not None:
+            body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled
+        if self.location is not None: body['location'] = self.location
+        if self.managed_services_customer_managed_key_id is not None:
+            body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id
+        if self.network_id is not None: body['network_id'] = self.network_id
+        if self.pricing_tier is not None: body['pricing_tier'] = self.pricing_tier
+        if self.private_access_settings_id is not None:
+            body['private_access_settings_id'] = self.private_access_settings_id
+        if self.storage_configuration_id is not None:
+            body['storage_configuration_id'] = self.storage_configuration_id
+        if self.storage_customer_managed_key_id is not None:
+            body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.workspace_name is not None: body['workspace_name'] = self.workspace_name
+        if self.workspace_status is not None: body['workspace_status'] = self.workspace_status
+        if self.workspace_status_message is not None:
+            body['workspace_status_message'] = self.workspace_status_message
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Workspace:
         """Deserializes the Workspace from a dictionary."""
diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py
index b00420a08..cb7861e88 100755
--- a/databricks/sdk/service/serving.py
+++ b/databricks/sdk/service/serving.py
@@ -43,6 +43,14 @@ def as_dict(self) -> dict:
             body['ai21labs_api_key_plaintext'] = self.ai21labs_api_key_plaintext
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Ai21LabsConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ai21labs_api_key is not None: body['ai21labs_api_key'] = self.ai21labs_api_key
+        if self.ai21labs_api_key_plaintext is not None:
+            body['ai21labs_api_key_plaintext'] = self.ai21labs_api_key_plaintext
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Ai21LabsConfig:
         """Deserializes the Ai21LabsConfig from a dictionary."""
@@ -76,6 +84,15 @@ def as_dict(self) -> dict:
         if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AiGatewayConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.guardrails: body['guardrails'] = self.guardrails
+        if self.inference_table_config: body['inference_table_config'] = self.inference_table_config
+        if self.rate_limits: body['rate_limits'] = self.rate_limits
+        if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayConfig:
         """Deserializes the AiGatewayConfig from a dictionary."""
@@ -111,6 +128,15 @@ def as_dict(self) -> dict:
         if self.valid_topics: body['valid_topics'] = [v for v in self.valid_topics]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AiGatewayGuardrailParameters into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.invalid_keywords: body['invalid_keywords'] = self.invalid_keywords
+        if self.pii: body['pii'] = self.pii
+        if self.safety is not None: body['safety'] = self.safety
+        if self.valid_topics: body['valid_topics'] = self.valid_topics
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrailParameters:
         """Deserializes the AiGatewayGuardrailParameters from a dictionary."""
@@ -134,6 +160,12 @@ def as_dict(self) -> dict:
         if self.behavior is not None: body['behavior'] = self.behavior.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AiGatewayGuardrailPiiBehavior into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.behavior is not None: body['behavior'] = self.behavior
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrailPiiBehavior:
         """Deserializes the AiGatewayGuardrailPiiBehavior from a dictionary."""
@@ -165,6 +197,13 @@ def as_dict(self) -> dict:
         if self.output: body['output'] = self.output.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AiGatewayGuardrails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.input: body['input'] = self.input
+        if self.output: body['output'] = self.output
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrails:
         """Deserializes the AiGatewayGuardrails from a dictionary."""
@@ -198,6 +237,15 @@ def as_dict(self) -> dict:
         if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AiGatewayInferenceTableConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayInferenceTableConfig:
         """Deserializes the AiGatewayInferenceTableConfig from a dictionary."""
@@ -227,6 +275,14 @@ def as_dict(self) -> dict:
         if self.renewal_period is not None: body['renewal_period'] = self.renewal_period.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AiGatewayRateLimit into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.calls is not None: body['calls'] = self.calls
+        if self.key is not None: body['key'] = self.key
+        if self.renewal_period is not None: body['renewal_period'] = self.renewal_period
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayRateLimit:
         """Deserializes the AiGatewayRateLimit from a dictionary."""
@@ -260,6 +316,12 @@ def as_dict(self) -> dict:
         if self.enabled is not None: body['enabled'] = self.enabled
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AiGatewayUsageTrackingConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enabled is not None: body['enabled'] = self.enabled
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayUsageTrackingConfig:
         """Deserializes the AiGatewayUsageTrackingConfig from a dictionary."""
@@ -312,6 +374,19 @@ def as_dict(self) -> dict:
         if self.bedrock_provider is not None: body['bedrock_provider'] = self.bedrock_provider.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AmazonBedrockConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_access_key_id is not None: body['aws_access_key_id'] = self.aws_access_key_id
+        if self.aws_access_key_id_plaintext is not None:
+            body['aws_access_key_id_plaintext'] = self.aws_access_key_id_plaintext
+        if self.aws_region is not None: body['aws_region'] = self.aws_region
+        if self.aws_secret_access_key is not None: body['aws_secret_access_key'] = self.aws_secret_access_key
+        if self.aws_secret_access_key_plaintext is not None:
+            body['aws_secret_access_key_plaintext'] = self.aws_secret_access_key_plaintext
+        if self.bedrock_provider is not None: body['bedrock_provider'] = self.bedrock_provider
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AmazonBedrockConfig:
         """Deserializes the AmazonBedrockConfig from a dictionary."""
@@ -353,6 +428,14 @@ def as_dict(self) -> dict:
             body['anthropic_api_key_plaintext'] = self.anthropic_api_key_plaintext
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AnthropicConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.anthropic_api_key is not None: body['anthropic_api_key'] = self.anthropic_api_key
+        if self.anthropic_api_key_plaintext is not None:
+            body['anthropic_api_key_plaintext'] = self.anthropic_api_key_plaintext
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AnthropicConfig:
         """Deserializes the AnthropicConfig from a dictionary."""
@@ -386,6 +469,15 @@ def as_dict(self) -> dict:
         if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AutoCaptureConfigInput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AutoCaptureConfigInput:
         """Deserializes the AutoCaptureConfigInput from a dictionary."""
@@ -421,6 +513,16 @@ def as_dict(self) -> dict:
         if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AutoCaptureConfigOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.state: body['state'] = self.state
+        if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AutoCaptureConfigOutput:
         """Deserializes the AutoCaptureConfigOutput from a dictionary."""
@@ -441,6 +543,12 @@ def as_dict(self) -> dict:
         if self.payload_table: body['payload_table'] = self.payload_table.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AutoCaptureState into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.payload_table: body['payload_table'] = self.payload_table
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AutoCaptureState:
         """Deserializes the AutoCaptureState from a dictionary."""
@@ -458,6 +566,12 @@ def as_dict(self) -> dict:
         if self.logs is not None: body['logs'] = self.logs
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BuildLogsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.logs is not None: body['logs'] = self.logs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BuildLogsResponse:
         """Deserializes the BuildLogsResponse from a dictionary."""
@@ -479,6 +593,13 @@ def as_dict(self) -> dict:
         if self.role is not None: body['role'] = self.role.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ChatMessage into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.content is not None: body['content'] = self.content
+        if self.role is not None: body['role'] = self.role
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ChatMessage:
         """Deserializes the ChatMessage from a dictionary."""
@@ -518,6 +639,15 @@ def as_dict(self) -> dict:
             body['cohere_api_key_plaintext'] = self.cohere_api_key_plaintext
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CohereConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cohere_api_base is not None: body['cohere_api_base'] = self.cohere_api_base
+        if self.cohere_api_key is not None: body['cohere_api_key'] = self.cohere_api_key
+        if self.cohere_api_key_plaintext is not None:
+            body['cohere_api_key_plaintext'] = self.cohere_api_key_plaintext
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CohereConfig:
         """Deserializes the CohereConfig from a dictionary."""
@@ -560,6 +690,17 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateServingEndpoint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ai_gateway: body['ai_gateway'] = self.ai_gateway
+        if self.config: body['config'] = self.config
+        if self.name is not None: body['name'] = self.name
+        if self.rate_limits: body['rate_limits'] = self.rate_limits
+        if self.route_optimized is not None: body['route_optimized'] = self.route_optimized
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateServingEndpoint:
         """Deserializes the CreateServingEndpoint from a dictionary."""
@@ -601,6 +742,16 @@ def as_dict(self) -> dict:
             body['databricks_workspace_url'] = self.databricks_workspace_url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DatabricksModelServingConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.databricks_api_token is not None: body['databricks_api_token'] = self.databricks_api_token
+        if self.databricks_api_token_plaintext is not None:
+            body['databricks_api_token_plaintext'] = self.databricks_api_token_plaintext
+        if self.databricks_workspace_url is not None:
+            body['databricks_workspace_url'] = self.databricks_workspace_url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DatabricksModelServingConfig:
         """Deserializes the DatabricksModelServingConfig from a dictionary."""
@@ -625,6 +776,14 @@ def as_dict(self) -> dict:
         if self.index: body['index'] = [v for v in self.index]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DataframeSplitInput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.columns: body['columns'] = self.columns
+        if self.data: body['data'] = self.data
+        if self.index: body['index'] = self.index
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DataframeSplitInput:
         """Deserializes the DataframeSplitInput from a dictionary."""
@@ -639,6 +798,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -663,6 +827,14 @@ def as_dict(self) -> dict:
         if self.object is not None: body['object'] = self.object.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EmbeddingsV1ResponseEmbeddingElement into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.embedding: body['embedding'] = self.embedding
+        if self.index is not None: body['index'] = self.index
+        if self.object is not None: body['object'] = self.object
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EmbeddingsV1ResponseEmbeddingElement:
         """Deserializes the EmbeddingsV1ResponseEmbeddingElement from a dictionary."""
@@ -707,6 +879,16 @@ def as_dict(self) -> dict:
         if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointCoreConfigInput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config
+        if self.name is not None: body['name'] = self.name
+        if self.served_entities: body['served_entities'] = self.served_entities
+        if self.served_models: body['served_models'] = self.served_models
+        if self.traffic_config: body['traffic_config'] = self.traffic_config
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigInput:
         """Deserializes the EndpointCoreConfigInput from a dictionary."""
@@ -746,6 +928,16 @@ def as_dict(self) -> dict:
         if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointCoreConfigOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config
+        if self.config_version is not None: body['config_version'] = self.config_version
+        if self.served_entities: body['served_entities'] = self.served_entities
+        if self.served_models: body['served_models'] = self.served_models
+        if self.traffic_config: body['traffic_config'] = self.traffic_config
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigOutput:
         """Deserializes the EndpointCoreConfigOutput from a dictionary."""
@@ -772,6 +964,13 @@ def as_dict(self) -> dict:
         if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointCoreConfigSummary into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.served_entities: body['served_entities'] = self.served_entities
+        if self.served_models: body['served_models'] = self.served_models
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigSummary:
         """Deserializes the EndpointCoreConfigSummary from a dictionary."""
@@ -812,6 +1011,17 @@ def as_dict(self) -> dict:
         if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointPendingConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config
+        if self.config_version is not None: body['config_version'] = self.config_version
+        if self.served_entities: body['served_entities'] = self.served_entities
+        if self.served_models: body['served_models'] = self.served_models
+        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.traffic_config: body['traffic_config'] = self.traffic_config
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointPendingConfig:
         """Deserializes the EndpointPendingConfig from a dictionary."""
@@ -843,6 +1053,13 @@ def as_dict(self) -> dict:
         if self.ready is not None: body['ready'] = self.ready.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointState into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.config_update is not None: body['config_update'] = self.config_update
+        if self.ready is not None: body['ready'] = self.ready
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointState:
         """Deserializes the EndpointState from a dictionary."""
@@ -886,6 +1103,13 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointTag:
         """Deserializes the EndpointTag from a dictionary."""
@@ -902,6 +1126,12 @@ def as_dict(self) -> dict:
         if self.contents: body['contents'] = self.contents
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExportMetricsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.contents: body['contents'] = self.contents
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExportMetricsResponse:
         """Deserializes the ExportMetricsResponse from a dictionary."""
@@ -963,6 +1193,24 @@ def as_dict(self) -> dict:
         if self.task is not None: body['task'] = self.task
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExternalModel into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ai21labs_config: body['ai21labs_config'] = self.ai21labs_config
+        if self.amazon_bedrock_config: body['amazon_bedrock_config'] = self.amazon_bedrock_config
+        if self.anthropic_config: body['anthropic_config'] = self.anthropic_config
+        if self.cohere_config: body['cohere_config'] = self.cohere_config
+        if self.databricks_model_serving_config:
+            body['databricks_model_serving_config'] = self.databricks_model_serving_config
+        if self.google_cloud_vertex_ai_config:
+            body['google_cloud_vertex_ai_config'] = self.google_cloud_vertex_ai_config
+        if self.name is not None: body['name'] = self.name
+        if self.openai_config: body['openai_config'] = self.openai_config
+        if self.palm_config: body['palm_config'] = self.palm_config
+        if self.provider is not None: body['provider'] = self.provider
+        if self.task is not None: body['task'] = self.task
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExternalModel:
         """Deserializes the ExternalModel from a dictionary."""
@@ -1015,6 +1263,14 @@ def as_dict(self) -> dict:
         if self.total_tokens is not None: body['total_tokens'] = self.total_tokens
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExternalModelUsageElement into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.completion_tokens is not None: body['completion_tokens'] = self.completion_tokens
+        if self.prompt_tokens is not None: body['prompt_tokens'] = self.prompt_tokens
+        if self.total_tokens is not None: body['total_tokens'] = self.total_tokens
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExternalModelUsageElement:
         """Deserializes the ExternalModelUsageElement from a dictionary."""
@@ -1046,6 +1302,15 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FoundationModel into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.docs is not None: body['docs'] = self.docs
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FoundationModel:
         """Deserializes the FoundationModel from a dictionary."""
@@ -1065,6 +1330,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetOpenApiResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetOpenApiResponse:
         """Deserializes the GetOpenApiResponse from a dictionary."""
@@ -1082,6 +1352,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetServingEndpointPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetServingEndpointPermissionLevelsResponse:
         """Deserializes the GetServingEndpointPermissionLevelsResponse from a dictionary."""
@@ -1125,6 +1401,15 @@ def as_dict(self) -> dict:
         if self.region is not None: body['region'] = self.region
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GoogleCloudVertexAiConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.private_key is not None: body['private_key'] = self.private_key
+        if self.private_key_plaintext is not None: body['private_key_plaintext'] = self.private_key_plaintext
+        if self.project_id is not None: body['project_id'] = self.project_id
+        if self.region is not None: body['region'] = self.region
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GoogleCloudVertexAiConfig:
         """Deserializes the GoogleCloudVertexAiConfig from a dictionary."""
@@ -1145,6 +1430,12 @@ def as_dict(self) -> dict:
         if self.endpoints: body['endpoints'] = [v.as_dict() for v in self.endpoints]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListEndpointsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.endpoints: body['endpoints'] = self.endpoints
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListEndpointsResponse:
         """Deserializes the ListEndpointsResponse from a dictionary."""
@@ -1162,6 +1453,12 @@ def as_dict(self) -> dict:
         if self.query_info: body['query_info'] = self.query_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ModelDataPlaneInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.query_info: body['query_info'] = self.query_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelDataPlaneInfo:
         """Deserializes the ModelDataPlaneInfo from a dictionary."""
@@ -1243,6 +1540,28 @@ def as_dict(self) -> dict:
         if self.openai_organization is not None: body['openai_organization'] = self.openai_organization
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the OpenAiConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.microsoft_entra_client_id is not None:
+            body['microsoft_entra_client_id'] = self.microsoft_entra_client_id
+        if self.microsoft_entra_client_secret is not None:
+            body['microsoft_entra_client_secret'] = self.microsoft_entra_client_secret
+        if self.microsoft_entra_client_secret_plaintext is not None:
+            body['microsoft_entra_client_secret_plaintext'] = self.microsoft_entra_client_secret_plaintext
+        if self.microsoft_entra_tenant_id is not None:
+            body['microsoft_entra_tenant_id'] = self.microsoft_entra_tenant_id
+        if self.openai_api_base is not None: body['openai_api_base'] = self.openai_api_base
+        if self.openai_api_key is not None: body['openai_api_key'] = self.openai_api_key
+        if self.openai_api_key_plaintext is not None:
+            body['openai_api_key_plaintext'] = self.openai_api_key_plaintext
+        if self.openai_api_type is not None: body['openai_api_type'] = self.openai_api_type
+        if self.openai_api_version is not None: body['openai_api_version'] = self.openai_api_version
+        if self.openai_deployment_name is not None:
+            body['openai_deployment_name'] = self.openai_deployment_name
+        if self.openai_organization is not None: body['openai_organization'] = self.openai_organization
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OpenAiConfig:
         """Deserializes the OpenAiConfig from a dictionary."""
@@ -1280,6 +1599,14 @@ def as_dict(self) -> dict:
             body['palm_api_key_plaintext'] = self.palm_api_key_plaintext
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PaLmConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.palm_api_key is not None: body['palm_api_key'] = self.palm_api_key
+        if self.palm_api_key_plaintext is not None:
+            body['palm_api_key_plaintext'] = self.palm_api_key_plaintext
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PaLmConfig:
         """Deserializes the PaLmConfig from a dictionary."""
@@ -1306,6 +1633,14 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PatchServingEndpointTags into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.add_tags: body['add_tags'] = self.add_tags
+        if self.delete_tags: body['delete_tags'] = self.delete_tags
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PatchServingEndpointTags:
         """Deserializes the PatchServingEndpointTags from a dictionary."""
@@ -1333,6 +1668,14 @@ def as_dict(self) -> dict:
         if self.status_message is not None: body['status_message'] = self.status_message
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PayloadTable into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.status is not None: body['status'] = self.status
+        if self.status_message is not None: body['status_message'] = self.status_message
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PayloadTable:
         """Deserializes the PayloadTable from a dictionary."""
@@ -1367,6 +1710,15 @@ def as_dict(self) -> dict:
         if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PutAiGatewayResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.guardrails: body['guardrails'] = self.guardrails
+        if self.inference_table_config: body['inference_table_config'] = self.inference_table_config
+        if self.rate_limits: body['rate_limits'] = self.rate_limits
+        if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutAiGatewayResponse:
         """Deserializes the PutAiGatewayResponse from a dictionary."""
@@ -1388,6 +1740,12 @@ def as_dict(self) -> dict:
         if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PutResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.rate_limits: body['rate_limits'] = self.rate_limits
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutResponse:
         """Deserializes the PutResponse from a dictionary."""
@@ -1473,6 +1831,25 @@ def as_dict(self) -> dict:
         if self.temperature is not None: body['temperature'] = self.temperature
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryEndpointInput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dataframe_records: body['dataframe_records'] = self.dataframe_records
+        if self.dataframe_split: body['dataframe_split'] = self.dataframe_split
+        if self.extra_params: body['extra_params'] = self.extra_params
+        if self.input: body['input'] = self.input
+        if self.inputs: body['inputs'] = self.inputs
+        if self.instances: body['instances'] = self.instances
+        if self.max_tokens is not None: body['max_tokens'] = self.max_tokens
+        if self.messages: body['messages'] = self.messages
+        if self.n is not None: body['n'] = self.n
+        if self.name is not None: body['name'] = self.name
+        if self.prompt: body['prompt'] = self.prompt
+        if self.stop: body['stop'] = self.stop
+        if self.stream is not None: body['stream'] = self.stream
+        if self.temperature is not None: body['temperature'] = self.temperature
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryEndpointInput:
         """Deserializes the QueryEndpointInput from a dictionary."""
@@ -1543,6 +1920,20 @@ def as_dict(self) -> dict:
         if self.usage: body['usage'] = self.usage.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryEndpointResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.choices: body['choices'] = self.choices
+        if self.created is not None: body['created'] = self.created
+        if self.data: body['data'] = self.data
+        if self.id is not None: body['id'] = self.id
+        if self.model is not None: body['model'] = self.model
+        if self.object is not None: body['object'] = self.object
+        if self.predictions: body['predictions'] = self.predictions
+        if self.served_model_name is not None: body['served-model-name'] = self.served_model_name
+        if self.usage: body['usage'] = self.usage
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryEndpointResponse:
         """Deserializes the QueryEndpointResponse from a dictionary."""
@@ -1586,6 +1977,14 @@ def as_dict(self) -> dict:
         if self.renewal_period is not None: body['renewal_period'] = self.renewal_period.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RateLimit into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.calls is not None: body['calls'] = self.calls
+        if self.key is not None: body['key'] = self.key
+        if self.renewal_period is not None: body['renewal_period'] = self.renewal_period
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RateLimit:
         """Deserializes the RateLimit from a dictionary."""
@@ -1624,6 +2023,13 @@ def as_dict(self) -> dict:
         if self.traffic_percentage is not None: body['traffic_percentage'] = self.traffic_percentage
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Route into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.served_model_name is not None: body['served_model_name'] = self.served_model_name
+        if self.traffic_percentage is not None: body['traffic_percentage'] = self.traffic_percentage
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Route:
         """Deserializes the Route from a dictionary."""
@@ -1709,6 +2115,24 @@ def as_dict(self) -> dict:
         if self.workload_type is not None: body['workload_type'] = self.workload_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServedEntityInput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.entity_name is not None: body['entity_name'] = self.entity_name
+        if self.entity_version is not None: body['entity_version'] = self.entity_version
+        if self.environment_vars: body['environment_vars'] = self.environment_vars
+        if self.external_model: body['external_model'] = self.external_model
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.max_provisioned_throughput is not None:
+            body['max_provisioned_throughput'] = self.max_provisioned_throughput
+        if self.min_provisioned_throughput is not None:
+            body['min_provisioned_throughput'] = self.min_provisioned_throughput
+        if self.name is not None: body['name'] = self.name
+        if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
+        if self.workload_size is not None: body['workload_size'] = self.workload_size
+        if self.workload_type is not None: body['workload_type'] = self.workload_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServedEntityInput:
         """Deserializes the ServedEntityInput from a dictionary."""
@@ -1815,6 +2239,28 @@ def as_dict(self) -> dict:
         if self.workload_type is not None: body['workload_type'] = self.workload_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServedEntityOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.creator is not None: body['creator'] = self.creator
+        if self.entity_name is not None: body['entity_name'] = self.entity_name
+        if self.entity_version is not None: body['entity_version'] = self.entity_version
+        if self.environment_vars: body['environment_vars'] = self.environment_vars
+        if self.external_model: body['external_model'] = self.external_model
+        if self.foundation_model: body['foundation_model'] = self.foundation_model
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.max_provisioned_throughput is not None:
+            body['max_provisioned_throughput'] = self.max_provisioned_throughput
+        if self.min_provisioned_throughput is not None:
+            body['min_provisioned_throughput'] = self.min_provisioned_throughput
+        if self.name is not None: body['name'] = self.name
+        if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
+        if self.state: body['state'] = self.state
+        if self.workload_size is not None: body['workload_size'] = self.workload_size
+        if self.workload_type is not None: body['workload_type'] = self.workload_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServedEntityOutput:
         """Deserializes the ServedEntityOutput from a dictionary."""
@@ -1868,6 +2314,16 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServedEntitySpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.entity_name is not None: body['entity_name'] = self.entity_name
+        if self.entity_version is not None: body['entity_version'] = self.entity_version
+        if self.external_model: body['external_model'] = self.external_model
+        if self.foundation_model: body['foundation_model'] = self.foundation_model
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServedEntitySpec:
         """Deserializes the ServedEntitySpec from a dictionary."""
@@ -1943,6 +2399,23 @@ def as_dict(self) -> dict:
         if self.workload_type is not None: body['workload_type'] = self.workload_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServedModelInput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.environment_vars: body['environment_vars'] = self.environment_vars
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.max_provisioned_throughput is not None:
+            body['max_provisioned_throughput'] = self.max_provisioned_throughput
+        if self.min_provisioned_throughput is not None:
+            body['min_provisioned_throughput'] = self.min_provisioned_throughput
+        if self.model_name is not None: body['model_name'] = self.model_name
+        if self.model_version is not None: body['model_version'] = self.model_version
+        if self.name is not None: body['name'] = self.name
+        if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
+        if self.workload_size is not None: body['workload_size'] = self.workload_size
+        if self.workload_type is not None: body['workload_type'] = self.workload_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServedModelInput:
         """Deserializes the ServedModelInput from a dictionary."""
@@ -2051,6 +2524,22 @@ def as_dict(self) -> dict:
         if self.workload_type is not None: body['workload_type'] = self.workload_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServedModelOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.creator is not None: body['creator'] = self.creator
+        if self.environment_vars: body['environment_vars'] = self.environment_vars
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.model_name is not None: body['model_name'] = self.model_name
+        if self.model_version is not None: body['model_version'] = self.model_version
+        if self.name is not None: body['name'] = self.name
+        if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
+        if self.state: body['state'] = self.state
+        if self.workload_size is not None: body['workload_size'] = self.workload_size
+        if self.workload_type is not None: body['workload_type'] = self.workload_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServedModelOutput:
         """Deserializes the ServedModelOutput from a dictionary."""
@@ -2087,6 +2576,14 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServedModelSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.model_name is not None: body['model_name'] = self.model_name
+        if self.model_version is not None: body['model_version'] = self.model_version
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServedModelSpec:
         """Deserializes the ServedModelSpec from a dictionary."""
@@ -2119,6 +2616,14 @@ def as_dict(self) -> dict:
             body['deployment_state_message'] = self.deployment_state_message
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServedModelState into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.deployment is not None: body['deployment'] = self.deployment
+        if self.deployment_state_message is not None:
+            body['deployment_state_message'] = self.deployment_state_message
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServedModelState:
         """Deserializes the ServedModelState from a dictionary."""
@@ -2155,6 +2660,12 @@ def as_dict(self) -> dict:
         if self.logs is not None: body['logs'] = self.logs
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServerLogsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.logs is not None: body['logs'] = self.logs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServerLogsResponse:
         """Deserializes the ServerLogsResponse from a dictionary."""
@@ -2211,6 +2722,22 @@ def as_dict(self) -> dict:
         if self.task is not None: body['task'] = self.task
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServingEndpoint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ai_gateway: body['ai_gateway'] = self.ai_gateway
+        if self.config: body['config'] = self.config
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.creator is not None: body['creator'] = self.creator
+        if self.id is not None: body['id'] = self.id
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.name is not None: body['name'] = self.name
+        if self.state: body['state'] = self.state
+        if self.tags: body['tags'] = self.tags
+        if self.task is not None: body['task'] = self.task
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpoint:
         """Deserializes the ServingEndpoint from a dictionary."""
@@ -2250,6 +2777,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServingEndpointAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointAccessControlRequest:
         """Deserializes the ServingEndpointAccessControlRequest from a dictionary."""
@@ -2287,6 +2824,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServingEndpointAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointAccessControlResponse:
         """Deserializes the ServingEndpointAccessControlResponse from a dictionary."""
@@ -2367,6 +2915,27 @@ def as_dict(self) -> dict:
         if self.task is not None: body['task'] = self.task
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServingEndpointDetailed into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ai_gateway: body['ai_gateway'] = self.ai_gateway
+        if self.config: body['config'] = self.config
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.creator is not None: body['creator'] = self.creator
+        if self.data_plane_info: body['data_plane_info'] = self.data_plane_info
+        if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url
+        if self.id is not None: body['id'] = self.id
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.name is not None: body['name'] = self.name
+        if self.pending_config: body['pending_config'] = self.pending_config
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.route_optimized is not None: body['route_optimized'] = self.route_optimized
+        if self.state: body['state'] = self.state
+        if self.tags: body['tags'] = self.tags
+        if self.task is not None: body['task'] = self.task
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointDetailed:
         """Deserializes the ServingEndpointDetailed from a dictionary."""
@@ -2412,6 +2981,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServingEndpointPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermission:
         """Deserializes the ServingEndpointPermission from a dictionary."""
@@ -2445,6 +3022,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServingEndpointPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissions:
         """Deserializes the ServingEndpointPermissions from a dictionary."""
@@ -2468,6 +3053,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServingEndpointPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissionsDescription:
         """Deserializes the ServingEndpointPermissionsDescription from a dictionary."""
@@ -2490,6 +3082,13 @@ def as_dict(self) -> dict:
         if self.serving_endpoint_id is not None: body['serving_endpoint_id'] = self.serving_endpoint_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServingEndpointPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.serving_endpoint_id is not None: body['serving_endpoint_id'] = self.serving_endpoint_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissionsRequest:
         """Deserializes the ServingEndpointPermissionsRequest from a dictionary."""
@@ -2509,6 +3108,12 @@ def as_dict(self) -> dict:
         if self.routes: body['routes'] = [v.as_dict() for v in self.routes]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TrafficConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.routes: body['routes'] = self.routes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TrafficConfig:
         """Deserializes the TrafficConfig from a dictionary."""
@@ -2542,6 +3147,16 @@ def as_dict(self) -> dict:
         if self.text is not None: body['text'] = self.text
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the V1ResponseChoiceElement into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.finish_reason is not None: body['finishReason'] = self.finish_reason
+        if self.index is not None: body['index'] = self.index
+        if self.logprobs is not None: body['logprobs'] = self.logprobs
+        if self.message: body['message'] = self.message
+        if self.text is not None: body['text'] = self.text
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> V1ResponseChoiceElement:
         """Deserializes the V1ResponseChoiceElement from a dictionary."""
diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py
index d6294b261..c3fba0ab3 100755
--- a/databricks/sdk/service/settings.py
+++ b/databricks/sdk/service/settings.py
@@ -24,6 +24,12 @@ def as_dict(self) -> dict:
         if self.access_policy_type is not None: body['access_policy_type'] = self.access_policy_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AibiDashboardEmbeddingAccessPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_policy_type is not None: body['access_policy_type'] = self.access_policy_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingAccessPolicy:
         """Deserializes the AibiDashboardEmbeddingAccessPolicy from a dictionary."""
@@ -67,6 +73,15 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AibiDashboardEmbeddingAccessPolicySetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aibi_dashboard_embedding_access_policy:
+            body['aibi_dashboard_embedding_access_policy'] = self.aibi_dashboard_embedding_access_policy
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingAccessPolicySetting:
         """Deserializes the AibiDashboardEmbeddingAccessPolicySetting from a dictionary."""
@@ -86,6 +101,12 @@ def as_dict(self) -> dict:
         if self.approved_domains: body['approved_domains'] = [v for v in self.approved_domains]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AibiDashboardEmbeddingApprovedDomains into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.approved_domains: body['approved_domains'] = self.approved_domains
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingApprovedDomains:
         """Deserializes the AibiDashboardEmbeddingApprovedDomains from a dictionary."""
@@ -121,6 +142,15 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AibiDashboardEmbeddingApprovedDomainsSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aibi_dashboard_embedding_approved_domains:
+            body['aibi_dashboard_embedding_approved_domains'] = self.aibi_dashboard_embedding_approved_domains
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingApprovedDomainsSetting:
         """Deserializes the AibiDashboardEmbeddingApprovedDomainsSetting from a dictionary."""
@@ -157,6 +187,15 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AutomaticClusterUpdateSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.automatic_cluster_update_workspace:
+            body['automatic_cluster_update_workspace'] = self.automatic_cluster_update_workspace
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AutomaticClusterUpdateSetting:
         """Deserializes the AutomaticClusterUpdateSetting from a dictionary."""
@@ -176,6 +215,12 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BooleanMessage into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BooleanMessage:
         """Deserializes the BooleanMessage from a dictionary."""
@@ -210,6 +255,17 @@ def as_dict(self) -> dict:
             body['restart_even_if_no_updates_available'] = self.restart_even_if_no_updates_available
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterAutoRestartMessage into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.can_toggle is not None: body['can_toggle'] = self.can_toggle
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.enablement_details: body['enablement_details'] = self.enablement_details
+        if self.maintenance_window: body['maintenance_window'] = self.maintenance_window
+        if self.restart_even_if_no_updates_available is not None:
+            body['restart_even_if_no_updates_available'] = self.restart_even_if_no_updates_available
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessage:
         """Deserializes the ClusterAutoRestartMessage from a dictionary."""
@@ -251,6 +307,17 @@ def as_dict(self) -> dict:
             body['unavailable_for_non_enterprise_tier'] = self.unavailable_for_non_enterprise_tier
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterAutoRestartMessageEnablementDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.forced_for_compliance_mode is not None:
+            body['forced_for_compliance_mode'] = self.forced_for_compliance_mode
+        if self.unavailable_for_disabled_entitlement is not None:
+            body['unavailable_for_disabled_entitlement'] = self.unavailable_for_disabled_entitlement
+        if self.unavailable_for_non_enterprise_tier is not None:
+            body['unavailable_for_non_enterprise_tier'] = self.unavailable_for_non_enterprise_tier
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageEnablementDetails:
         """Deserializes the ClusterAutoRestartMessageEnablementDetails from a dictionary."""
@@ -270,6 +337,12 @@ def as_dict(self) -> dict:
             body['week_day_based_schedule'] = self.week_day_based_schedule.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterAutoRestartMessageMaintenanceWindow into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.week_day_based_schedule: body['week_day_based_schedule'] = self.week_day_based_schedule
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageMaintenanceWindow:
         """Deserializes the ClusterAutoRestartMessageMaintenanceWindow from a dictionary."""
@@ -304,6 +377,14 @@ def as_dict(self) -> dict:
         if self.window_start_time: body['window_start_time'] = self.window_start_time.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.day_of_week is not None: body['day_of_week'] = self.day_of_week
+        if self.frequency is not None: body['frequency'] = self.frequency
+        if self.window_start_time: body['window_start_time'] = self.window_start_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule:
         """Deserializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule from a dictionary."""
@@ -338,6 +419,13 @@ def as_dict(self) -> dict:
         if self.minutes is not None: body['minutes'] = self.minutes
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.hours is not None: body['hours'] = self.hours
+        if self.minutes is not None: body['minutes'] = self.minutes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageMaintenanceWindowWindowStartTime:
         """Deserializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime from a dictionary."""
@@ -361,6 +449,13 @@ def as_dict(self) -> dict:
         if self.is_enabled is not None: body['is_enabled'] = self.is_enabled
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ComplianceSecurityProfile into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.compliance_standards: body['compliance_standards'] = self.compliance_standards
+        if self.is_enabled is not None: body['is_enabled'] = self.is_enabled
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ComplianceSecurityProfile:
         """Deserializes the ComplianceSecurityProfile from a dictionary."""
@@ -398,6 +493,15 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ComplianceSecurityProfileSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.compliance_security_profile_workspace:
+            body['compliance_security_profile_workspace'] = self.compliance_security_profile_workspace
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ComplianceSecurityProfileSetting:
         """Deserializes the ComplianceSecurityProfileSetting from a dictionary."""
@@ -445,6 +549,16 @@ def as_dict(self) -> dict:
         if self.slack: body['slack'] = self.slack.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Config into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.email: body['email'] = self.email
+        if self.generic_webhook: body['generic_webhook'] = self.generic_webhook
+        if self.microsoft_teams: body['microsoft_teams'] = self.microsoft_teams
+        if self.pagerduty: body['pagerduty'] = self.pagerduty
+        if self.slack: body['slack'] = self.slack
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Config:
         """Deserializes the Config from a dictionary."""
@@ -478,6 +592,14 @@ def as_dict(self) -> dict:
         if self.list_type is not None: body['list_type'] = self.list_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateIpAccessList into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ip_addresses: body['ip_addresses'] = self.ip_addresses
+        if self.label is not None: body['label'] = self.label
+        if self.list_type is not None: body['list_type'] = self.list_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateIpAccessList:
         """Deserializes the CreateIpAccessList from a dictionary."""
@@ -499,6 +621,12 @@ def as_dict(self) -> dict:
         if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateIpAccessListResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateIpAccessListResponse:
         """Deserializes the CreateIpAccessListResponse from a dictionary."""
@@ -523,6 +651,13 @@ def as_dict(self) -> dict:
         if self.region is not None: body['region'] = self.region
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateNetworkConnectivityConfigRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.region is not None: body['region'] = self.region
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateNetworkConnectivityConfigRequest:
         """Deserializes the CreateNetworkConnectivityConfigRequest from a dictionary."""
@@ -544,6 +679,13 @@ def as_dict(self) -> dict:
         if self.display_name is not None: body['display_name'] = self.display_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateNotificationDestinationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.config: body['config'] = self.config
+        if self.display_name is not None: body['display_name'] = self.display_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateNotificationDestinationRequest:
         """Deserializes the CreateNotificationDestinationRequest from a dictionary."""
@@ -571,6 +713,14 @@ def as_dict(self) -> dict:
         if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateOboTokenRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.application_id is not None: body['application_id'] = self.application_id
+        if self.comment is not None: body['comment'] = self.comment
+        if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateOboTokenRequest:
         """Deserializes the CreateOboTokenRequest from a dictionary."""
@@ -595,6 +745,13 @@ def as_dict(self) -> dict:
         if self.token_value is not None: body['token_value'] = self.token_value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateOboTokenResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.token_info: body['token_info'] = self.token_info
+        if self.token_value is not None: body['token_value'] = self.token_value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateOboTokenResponse:
         """Deserializes the CreateOboTokenResponse from a dictionary."""
@@ -622,6 +779,15 @@ def as_dict(self) -> dict:
         if self.resource_id is not None: body['resource_id'] = self.resource_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreatePrivateEndpointRuleRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_id is not None: body['group_id'] = self.group_id
+        if self.network_connectivity_config_id is not None:
+            body['network_connectivity_config_id'] = self.network_connectivity_config_id
+        if self.resource_id is not None: body['resource_id'] = self.resource_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePrivateEndpointRuleRequest:
         """Deserializes the CreatePrivateEndpointRuleRequest from a dictionary."""
@@ -657,6 +823,13 @@ def as_dict(self) -> dict:
         if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateTokenRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateTokenRequest:
         """Deserializes the CreateTokenRequest from a dictionary."""
@@ -678,6 +851,13 @@ def as_dict(self) -> dict:
         if self.token_value is not None: body['token_value'] = self.token_value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateTokenResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.token_info: body['token_info'] = self.token_info
+        if self.token_value is not None: body['token_value'] = self.token_value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateTokenResponse:
         """Deserializes the CreateTokenResponse from a dictionary."""
@@ -704,6 +884,13 @@ def as_dict(self) -> dict:
         if self.is_enforced is not None: body['is_enforced'] = self.is_enforced
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CspEnablementAccount into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.compliance_standards: body['compliance_standards'] = self.compliance_standards
+        if self.is_enforced is not None: body['is_enforced'] = self.is_enforced
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CspEnablementAccount:
         """Deserializes the CspEnablementAccount from a dictionary."""
@@ -738,6 +925,14 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CspEnablementAccountSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.csp_enablement_account: body['csp_enablement_account'] = self.csp_enablement_account
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CspEnablementAccountSetting:
         """Deserializes the CspEnablementAccountSetting from a dictionary."""
@@ -780,6 +975,14 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DefaultNamespaceSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        if self.namespace: body['namespace'] = self.namespace
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DefaultNamespaceSetting:
         """Deserializes the DefaultNamespaceSetting from a dictionary."""
@@ -788,6 +991,66 @@ def from_dict(cls, d: Dict[str, any]) -> DefaultNamespaceSetting:
                    setting_name=d.get('setting_name', None))
 
 
+@dataclass
+class DeleteAibiDashboardEmbeddingAccessPolicySettingResponse:
+    """The etag is returned."""
+
+    etag: str
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+    an etag from a GET request, and pass it with the DELETE request to identify the rule set version
+    you are deleting."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteAibiDashboardEmbeddingAccessPolicySettingResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteAibiDashboardEmbeddingAccessPolicySettingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse:
+        """Deserializes the DeleteAibiDashboardEmbeddingAccessPolicySettingResponse from a dictionary."""
+        return cls(etag=d.get('etag', None))
+
+
+@dataclass
+class DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse:
+    """The etag is returned."""
+
+    etag: str
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+    an etag from a GET request, and pass it with the DELETE request to identify the rule set version
+    you are deleting."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse:
+        """Deserializes the DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse from a dictionary."""
+        return cls(etag=d.get('etag', None))
+
+
 @dataclass
 class DeleteDefaultNamespaceSettingResponse:
     """The etag is returned."""
@@ -806,6 +1069,12 @@ def as_dict(self) -> dict:
         if self.etag is not None: body['etag'] = self.etag
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteDefaultNamespaceSettingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDefaultNamespaceSettingResponse:
         """Deserializes the DeleteDefaultNamespaceSettingResponse from a dictionary."""
@@ -830,6 +1099,12 @@ def as_dict(self) -> dict:
         if self.etag is not None: body['etag'] = self.etag
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteDisableLegacyAccessResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyAccessResponse:
         """Deserializes the DeleteDisableLegacyAccessResponse from a dictionary."""
@@ -854,6 +1129,12 @@ def as_dict(self) -> dict:
         if self.etag is not None: body['etag'] = self.etag
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteDisableLegacyDbfsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyDbfsResponse:
         """Deserializes the DeleteDisableLegacyDbfsResponse from a dictionary."""
@@ -878,6 +1159,12 @@ def as_dict(self) -> dict:
         if self.etag is not None: body['etag'] = self.etag
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteDisableLegacyFeaturesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyFeaturesResponse:
         """Deserializes the DeleteDisableLegacyFeaturesResponse from a dictionary."""
@@ -892,6 +1179,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteNetworkConnectivityConfigurationResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteNetworkConnectivityConfigurationResponse:
         """Deserializes the DeleteNetworkConnectivityConfigurationResponse from a dictionary."""
@@ -916,6 +1208,12 @@ def as_dict(self) -> dict:
         if self.etag is not None: body['etag'] = self.etag
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeletePersonalComputeSettingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeletePersonalComputeSettingResponse:
         """Deserializes the DeletePersonalComputeSettingResponse from a dictionary."""
@@ -930,6 +1228,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -954,6 +1257,12 @@ def as_dict(self) -> dict:
         if self.etag is not None: body['etag'] = self.etag
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteRestrictWorkspaceAdminsSettingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRestrictWorkspaceAdminsSettingResponse:
         """Deserializes the DeleteRestrictWorkspaceAdminsSettingResponse from a dictionary."""
@@ -995,6 +1304,14 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DisableLegacyAccess into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.disable_legacy_access: body['disable_legacy_access'] = self.disable_legacy_access
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DisableLegacyAccess:
         """Deserializes the DisableLegacyAccess from a dictionary."""
@@ -1029,6 +1346,14 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DisableLegacyDbfs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.disable_legacy_dbfs: body['disable_legacy_dbfs'] = self.disable_legacy_dbfs
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DisableLegacyDbfs:
         """Deserializes the DisableLegacyDbfs from a dictionary."""
@@ -1064,6 +1389,14 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DisableLegacyFeatures into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.disable_legacy_features: body['disable_legacy_features'] = self.disable_legacy_features
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DisableLegacyFeatures:
         """Deserializes the DisableLegacyFeatures from a dictionary."""
@@ -1072,6 +1405,270 @@ def from_dict(cls, d: Dict[str, any]) -> DisableLegacyFeatures:
                    setting_name=d.get('setting_name', None))
 
 
+@dataclass
+class EgressNetworkPolicy:
+    """The network policies applying for egress traffic. This message is used by the UI/REST API. We
+    translate this message to the format expected by the dataplane in Lakehouse Network Manager (for
+    the format expected by the dataplane, see networkconfig.textproto)."""
+
+    internet_access: Optional[EgressNetworkPolicyInternetAccessPolicy] = None
+    """The access policy enforced for egress traffic to the internet."""
+
+    def as_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicy into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.internet_access: body['internet_access'] = self.internet_access.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.internet_access: body['internet_access'] = self.internet_access
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicy:
+        """Deserializes the EgressNetworkPolicy from a dictionary."""
+        return cls(internet_access=_from_dict(d, 'internet_access', EgressNetworkPolicyInternetAccessPolicy))
+
+
+@dataclass
+class EgressNetworkPolicyInternetAccessPolicy:
+    allowed_internet_destinations: Optional[
+        List[EgressNetworkPolicyInternetAccessPolicyInternetDestination]] = None
+
+    allowed_storage_destinations: Optional[
+        List[EgressNetworkPolicyInternetAccessPolicyStorageDestination]] = None
+
+    log_only_mode: Optional[EgressNetworkPolicyInternetAccessPolicyLogOnlyMode] = None
+    """Optional. If not specified, assume the policy is enforced for all workloads."""
+
+    restriction_mode: Optional[EgressNetworkPolicyInternetAccessPolicyRestrictionMode] = None
+    """At which level can Databricks and Databricks managed compute access Internet. FULL_ACCESS:
+    Databricks can access Internet. No blocking rules will apply. RESTRICTED_ACCESS: Databricks can
+    only access explicitly allowed internet and storage destinations, as well as UC connections and
+    external locations. PRIVATE_ACCESS_ONLY (not used): Databricks can only access destinations via
+    private link."""
+
+    def as_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicyInternetAccessPolicy into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.allowed_internet_destinations:
+            body['allowed_internet_destinations'] = [v.as_dict() for v in self.allowed_internet_destinations]
+        if self.allowed_storage_destinations:
+            body['allowed_storage_destinations'] = [v.as_dict() for v in self.allowed_storage_destinations]
+        if self.log_only_mode: body['log_only_mode'] = self.log_only_mode.as_dict()
+        if self.restriction_mode is not None: body['restriction_mode'] = self.restriction_mode.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicyInternetAccessPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allowed_internet_destinations:
+            body['allowed_internet_destinations'] = self.allowed_internet_destinations
+        if self.allowed_storage_destinations:
+            body['allowed_storage_destinations'] = self.allowed_storage_destinations
+        if self.log_only_mode: body['log_only_mode'] = self.log_only_mode
+        if self.restriction_mode is not None: body['restriction_mode'] = self.restriction_mode
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicyInternetAccessPolicy:
+        """Deserializes the EgressNetworkPolicyInternetAccessPolicy from a dictionary."""
+        return cls(allowed_internet_destinations=_repeated_dict(
+            d, 'allowed_internet_destinations', EgressNetworkPolicyInternetAccessPolicyInternetDestination),
+                   allowed_storage_destinations=_repeated_dict(
+                       d, 'allowed_storage_destinations',
+                       EgressNetworkPolicyInternetAccessPolicyStorageDestination),
+                   log_only_mode=_from_dict(d, 'log_only_mode',
+                                            EgressNetworkPolicyInternetAccessPolicyLogOnlyMode),
+                   restriction_mode=_enum(d, 'restriction_mode',
+                                          EgressNetworkPolicyInternetAccessPolicyRestrictionMode))
+
+
+@dataclass
+class EgressNetworkPolicyInternetAccessPolicyInternetDestination:
+    """Users can specify accessible internet destinations when outbound access is restricted. We only
+    support domain name (FQDN) destinations for the time being, though going forwards we want to
+    support host names and IP addresses."""
+
+    destination: Optional[str] = None
+
+    protocol: Optional[
+        EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol] = None
+    """The filtering protocol used by the DP. For private and public preview, SEG will only support TCP
+    filtering (i.e. DNS based filtering, filtering by destination IP address), so protocol will be
+    set to TCP by default and hidden from the user. In the future, users may be able to select HTTP
+    filtering (i.e. SNI based filtering, filtering by FQDN)."""
+
+    type: Optional[EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicyInternetAccessPolicyInternetDestination into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.destination is not None: body['destination'] = self.destination
+        if self.protocol is not None: body['protocol'] = self.protocol.value
+        if self.type is not None: body['type'] = self.type.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicyInternetAccessPolicyInternetDestination into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination is not None: body['destination'] = self.destination
+        if self.protocol is not None: body['protocol'] = self.protocol
+        if self.type is not None: body['type'] = self.type
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicyInternetAccessPolicyInternetDestination:
+        """Deserializes the EgressNetworkPolicyInternetAccessPolicyInternetDestination from a dictionary."""
+        return cls(
+            destination=d.get('destination', None),
+            protocol=_enum(
+                d, 'protocol',
+                EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol
+            ),
+            type=_enum(d, 'type',
+                       EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType))
+
+
+class EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol(Enum):
+    """The filtering protocol used by the DP. For private and public preview, SEG will only support TCP
+    filtering (i.e. DNS based filtering, filtering by destination IP address), so protocol will be
+    set to TCP by default and hidden from the user. In the future, users may be able to select HTTP
+    filtering (i.e. SNI based filtering, filtering by FQDN)."""
+
+    TCP = 'TCP'
+
+
+class EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType(Enum):
+
+    FQDN = 'FQDN'
+
+
+@dataclass
+class EgressNetworkPolicyInternetAccessPolicyLogOnlyMode:
+    log_only_mode_type: Optional[EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType] = None
+
+    workloads: Optional[List[EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType]] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicyInternetAccessPolicyLogOnlyMode into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.log_only_mode_type is not None: body['log_only_mode_type'] = self.log_only_mode_type.value
+        if self.workloads: body['workloads'] = [v.value for v in self.workloads]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicyInternetAccessPolicyLogOnlyMode into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.log_only_mode_type is not None: body['log_only_mode_type'] = self.log_only_mode_type
+        if self.workloads: body['workloads'] = self.workloads
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicyInternetAccessPolicyLogOnlyMode:
+        """Deserializes the EgressNetworkPolicyInternetAccessPolicyLogOnlyMode from a dictionary."""
+        return cls(log_only_mode_type=_enum(
+            d, 'log_only_mode_type', EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType),
+                   workloads=_repeated_enum(d, 'workloads',
+                                            EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType))
+
+
+class EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType(Enum):
+
+    ALL_SERVICES = 'ALL_SERVICES'
+    SELECTED_SERVICES = 'SELECTED_SERVICES'
+
+
+class EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType(Enum):
+    """The values should match the list of workloads used in networkconfig.proto"""
+
+    DBSQL = 'DBSQL'
+    ML_SERVING = 'ML_SERVING'
+
+
+class EgressNetworkPolicyInternetAccessPolicyRestrictionMode(Enum):
+    """At which level can Databricks and Databricks managed compute access Internet. FULL_ACCESS:
+    Databricks can access Internet. No blocking rules will apply. RESTRICTED_ACCESS: Databricks can
+    only access explicitly allowed internet and storage destinations, as well as UC connections and
+    external locations. PRIVATE_ACCESS_ONLY (not used): Databricks can only access destinations via
+    private link."""
+
+    FULL_ACCESS = 'FULL_ACCESS'
+    PRIVATE_ACCESS_ONLY = 'PRIVATE_ACCESS_ONLY'
+    RESTRICTED_ACCESS = 'RESTRICTED_ACCESS'
+
+
+@dataclass
+class EgressNetworkPolicyInternetAccessPolicyStorageDestination:
+    """Users can specify accessible storage destinations."""
+
+    allowed_paths: Optional[List[str]] = None
+
+    azure_container: Optional[str] = None
+
+    azure_dns_zone: Optional[str] = None
+
+    azure_storage_account: Optional[str] = None
+
+    azure_storage_service: Optional[str] = None
+
+    bucket_name: Optional[str] = None
+
+    region: Optional[str] = None
+
+    type: Optional[EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicyInternetAccessPolicyStorageDestination into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.allowed_paths: body['allowed_paths'] = [v for v in self.allowed_paths]
+        if self.azure_container is not None: body['azure_container'] = self.azure_container
+        if self.azure_dns_zone is not None: body['azure_dns_zone'] = self.azure_dns_zone
+        if self.azure_storage_account is not None: body['azure_storage_account'] = self.azure_storage_account
+        if self.azure_storage_service is not None: body['azure_storage_service'] = self.azure_storage_service
+        if self.bucket_name is not None: body['bucket_name'] = self.bucket_name
+        if self.region is not None: body['region'] = self.region
+        if self.type is not None: body['type'] = self.type.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicyInternetAccessPolicyStorageDestination into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allowed_paths: body['allowed_paths'] = self.allowed_paths
+        if self.azure_container is not None: body['azure_container'] = self.azure_container
+        if self.azure_dns_zone is not None: body['azure_dns_zone'] = self.azure_dns_zone
+        if self.azure_storage_account is not None: body['azure_storage_account'] = self.azure_storage_account
+        if self.azure_storage_service is not None: body['azure_storage_service'] = self.azure_storage_service
+        if self.bucket_name is not None: body['bucket_name'] = self.bucket_name
+        if self.region is not None: body['region'] = self.region
+        if self.type is not None: body['type'] = self.type
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicyInternetAccessPolicyStorageDestination:
+        """Deserializes the EgressNetworkPolicyInternetAccessPolicyStorageDestination from a dictionary."""
+        return cls(allowed_paths=d.get('allowed_paths', None),
+                   azure_container=d.get('azure_container', None),
+                   azure_dns_zone=d.get('azure_dns_zone', None),
+                   azure_storage_account=d.get('azure_storage_account', None),
+                   azure_storage_service=d.get('azure_storage_service', None),
+                   bucket_name=d.get('bucket_name', None),
+                   region=d.get('region', None),
+                   type=_enum(
+                       d, 'type',
+                       EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType))
+
+
+class EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType(Enum):
+
+    AWS_S3 = 'AWS_S3'
+    AZURE_STORAGE = 'AZURE_STORAGE'
+    CLOUDFLARE_R2 = 'CLOUDFLARE_R2'
+    GOOGLE_CLOUD_STORAGE = 'GOOGLE_CLOUD_STORAGE'
+
+
 @dataclass
 class EmailConfig:
     addresses: Optional[List[str]] = None
@@ -1083,6 +1680,12 @@ def as_dict(self) -> dict:
         if self.addresses: body['addresses'] = [v for v in self.addresses]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EmailConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.addresses: body['addresses'] = self.addresses
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EmailConfig:
         """Deserializes the EmailConfig from a dictionary."""
@@ -1097,6 +1700,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Empty into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Empty:
         """Deserializes the Empty from a dictionary."""
@@ -1115,6 +1723,12 @@ def as_dict(self) -> dict:
         if self.is_enabled is not None: body['is_enabled'] = self.is_enabled
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EnhancedSecurityMonitoring into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_enabled is not None: body['is_enabled'] = self.is_enabled
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnhancedSecurityMonitoring:
         """Deserializes the EnhancedSecurityMonitoring from a dictionary."""
@@ -1151,6 +1765,15 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EnhancedSecurityMonitoringSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enhanced_security_monitoring_workspace:
+            body['enhanced_security_monitoring_workspace'] = self.enhanced_security_monitoring_workspace
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnhancedSecurityMonitoringSetting:
         """Deserializes the EnhancedSecurityMonitoringSetting from a dictionary."""
@@ -1172,6 +1795,12 @@ def as_dict(self) -> dict:
         if self.is_enforced is not None: body['is_enforced'] = self.is_enforced
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EsmEnablementAccount into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_enforced is not None: body['is_enforced'] = self.is_enforced
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EsmEnablementAccount:
         """Deserializes the EsmEnablementAccount from a dictionary."""
@@ -1205,6 +1834,14 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EsmEnablementAccountSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.esm_enablement_account: body['esm_enablement_account'] = self.esm_enablement_account
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EsmEnablementAccountSetting:
         """Deserializes the EsmEnablementAccountSetting from a dictionary."""
@@ -1242,6 +1879,16 @@ def as_dict(self) -> dict:
         if self.token_type is not None: body['tokenType'] = self.token_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExchangeToken into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential is not None: body['credential'] = self.credential
+        if self.credential_eol_time is not None: body['credentialEolTime'] = self.credential_eol_time
+        if self.owner_id is not None: body['ownerId'] = self.owner_id
+        if self.scopes: body['scopes'] = self.scopes
+        if self.token_type is not None: body['tokenType'] = self.token_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExchangeToken:
         """Deserializes the ExchangeToken from a dictionary."""
@@ -1273,6 +1920,14 @@ def as_dict(self) -> dict:
         if self.token_type: body['tokenType'] = [v.value for v in self.token_type]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExchangeTokenRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.partition_id: body['partitionId'] = self.partition_id
+        if self.scopes: body['scopes'] = self.scopes
+        if self.token_type: body['tokenType'] = self.token_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExchangeTokenRequest:
         """Deserializes the ExchangeTokenRequest from a dictionary."""
@@ -1293,6 +1948,12 @@ def as_dict(self) -> dict:
         if self.values: body['values'] = [v.as_dict() for v in self.values]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExchangeTokenResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.values: body['values'] = self.values
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExchangeTokenResponse:
         """Deserializes the ExchangeTokenResponse from a dictionary."""
@@ -1312,6 +1973,12 @@ def as_dict(self) -> dict:
         if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FetchIpAccessListResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FetchIpAccessListResponse:
         """Deserializes the FetchIpAccessListResponse from a dictionary."""
@@ -1349,6 +2016,17 @@ def as_dict(self) -> dict:
         if self.username_set is not None: body['username_set'] = self.username_set
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenericWebhookConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.password is not None: body['password'] = self.password
+        if self.password_set is not None: body['password_set'] = self.password_set
+        if self.url is not None: body['url'] = self.url
+        if self.url_set is not None: body['url_set'] = self.url_set
+        if self.username is not None: body['username'] = self.username
+        if self.username_set is not None: body['username_set'] = self.username_set
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenericWebhookConfig:
         """Deserializes the GenericWebhookConfig from a dictionary."""
@@ -1371,6 +2049,12 @@ def as_dict(self) -> dict:
         if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetIpAccessListResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetIpAccessListResponse:
         """Deserializes the GetIpAccessListResponse from a dictionary."""
@@ -1389,6 +2073,12 @@ def as_dict(self) -> dict:
         if self.ip_access_lists: body['ip_access_lists'] = [v.as_dict() for v in self.ip_access_lists]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetIpAccessListsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ip_access_lists: body['ip_access_lists'] = self.ip_access_lists
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetIpAccessListsResponse:
         """Deserializes the GetIpAccessListsResponse from a dictionary."""
@@ -1406,6 +2096,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetTokenPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetTokenPermissionLevelsResponse:
         """Deserializes the GetTokenPermissionLevelsResponse from a dictionary."""
@@ -1424,6 +2120,12 @@ def as_dict(self) -> dict:
         if self.token_info: body['token_info'] = self.token_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetTokenResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.token_info: body['token_info'] = self.token_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetTokenResponse:
         """Deserializes the GetTokenResponse from a dictionary."""
@@ -1481,6 +2183,21 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the IpAccessListInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.address_count is not None: body['address_count'] = self.address_count
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.ip_addresses: body['ip_addresses'] = self.ip_addresses
+        if self.label is not None: body['label'] = self.label
+        if self.list_id is not None: body['list_id'] = self.list_id
+        if self.list_type is not None: body['list_type'] = self.list_type
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> IpAccessListInfo:
         """Deserializes the IpAccessListInfo from a dictionary."""
@@ -1508,6 +2225,12 @@ def as_dict(self) -> dict:
         if self.ip_access_lists: body['ip_access_lists'] = [v.as_dict() for v in self.ip_access_lists]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListIpAccessListResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ip_access_lists: body['ip_access_lists'] = self.ip_access_lists
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListIpAccessListResponse:
         """Deserializes the ListIpAccessListResponse from a dictionary."""
@@ -1529,6 +2252,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListNccAzurePrivateEndpointRulesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.items: body['items'] = self.items
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListNccAzurePrivateEndpointRulesResponse:
         """Deserializes the ListNccAzurePrivateEndpointRulesResponse from a dictionary."""
@@ -1551,6 +2281,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListNetworkConnectivityConfigurationsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.items: body['items'] = self.items
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListNetworkConnectivityConfigurationsResponse:
         """Deserializes the ListNetworkConnectivityConfigurationsResponse from a dictionary."""
@@ -1572,6 +2309,13 @@ def as_dict(self) -> dict:
         if self.results: body['results'] = [v.as_dict() for v in self.results]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListNotificationDestinationsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.results: body['results'] = self.results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListNotificationDestinationsResponse:
         """Deserializes the ListNotificationDestinationsResponse from a dictionary."""
@@ -1598,6 +2342,14 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListNotificationDestinationsResult into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination_type is not None: body['destination_type'] = self.destination_type
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListNotificationDestinationsResult:
         """Deserializes the ListNotificationDestinationsResult from a dictionary."""
@@ -1617,6 +2369,12 @@ def as_dict(self) -> dict:
         if self.token_infos: body['token_infos'] = [v.as_dict() for v in self.token_infos]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListPublicTokensResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.token_infos: body['token_infos'] = self.token_infos
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListPublicTokensResponse:
         """Deserializes the ListPublicTokensResponse from a dictionary."""
@@ -1636,6 +2394,12 @@ def as_dict(self) -> dict:
         if self.token_infos: body['token_infos'] = [v.as_dict() for v in self.token_infos]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListTokensResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.token_infos: body['token_infos'] = self.token_infos
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListTokensResponse:
         """Deserializes the ListTokensResponse from a dictionary."""
@@ -1667,6 +2431,13 @@ def as_dict(self) -> dict:
         if self.url_set is not None: body['url_set'] = self.url_set
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MicrosoftTeamsConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.url is not None: body['url'] = self.url
+        if self.url_set is not None: body['url_set'] = self.url_set
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MicrosoftTeamsConfig:
         """Deserializes the MicrosoftTeamsConfig from a dictionary."""
@@ -1688,6 +2459,12 @@ def as_dict(self) -> dict:
         if self.cidr_blocks: body['cidr_blocks'] = [v for v in self.cidr_blocks]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NccAwsStableIpRule into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cidr_blocks: body['cidr_blocks'] = self.cidr_blocks
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NccAwsStableIpRule:
         """Deserializes the NccAwsStableIpRule from a dictionary."""
@@ -1745,7 +2522,23 @@ def as_dict(self) -> dict:
         if self.deactivated is not None: body['deactivated'] = self.deactivated
         if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at
         if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
-        if self.group_id is not None: body['group_id'] = self.group_id.value
+        if self.group_id is not None: body['group_id'] = self.group_id.value
+        if self.network_connectivity_config_id is not None:
+            body['network_connectivity_config_id'] = self.network_connectivity_config_id
+        if self.resource_id is not None: body['resource_id'] = self.resource_id
+        if self.rule_id is not None: body['rule_id'] = self.rule_id
+        if self.updated_time is not None: body['updated_time'] = self.updated_time
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NccAzurePrivateEndpointRule into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.connection_state is not None: body['connection_state'] = self.connection_state
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.deactivated is not None: body['deactivated'] = self.deactivated
+        if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at
+        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
+        if self.group_id is not None: body['group_id'] = self.group_id
         if self.network_connectivity_config_id is not None:
             body['network_connectivity_config_id'] = self.network_connectivity_config_id
         if self.resource_id is not None: body['resource_id'] = self.resource_id
@@ -1820,6 +2613,14 @@ def as_dict(self) -> dict:
         if self.target_services: body['target_services'] = [v for v in self.target_services]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NccAzureServiceEndpointRule into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.subnets: body['subnets'] = self.subnets
+        if self.target_region is not None: body['target_region'] = self.target_region
+        if self.target_services: body['target_services'] = self.target_services
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NccAzureServiceEndpointRule:
         """Deserializes the NccAzureServiceEndpointRule from a dictionary."""
@@ -1849,6 +2650,13 @@ def as_dict(self) -> dict:
         if self.target_rules: body['target_rules'] = self.target_rules.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NccEgressConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.default_rules: body['default_rules'] = self.default_rules
+        if self.target_rules: body['target_rules'] = self.target_rules
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NccEgressConfig:
         """Deserializes the NccEgressConfig from a dictionary."""
@@ -1878,6 +2686,14 @@ def as_dict(self) -> dict:
             body['azure_service_endpoint_rule'] = self.azure_service_endpoint_rule.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NccEgressDefaultRules into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_stable_ip_rule: body['aws_stable_ip_rule'] = self.aws_stable_ip_rule
+        if self.azure_service_endpoint_rule:
+            body['azure_service_endpoint_rule'] = self.azure_service_endpoint_rule
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NccEgressDefaultRules:
         """Deserializes the NccEgressDefaultRules from a dictionary."""
@@ -1900,6 +2716,13 @@ def as_dict(self) -> dict:
             body['azure_private_endpoint_rules'] = [v.as_dict() for v in self.azure_private_endpoint_rules]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NccEgressTargetRules into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.azure_private_endpoint_rules:
+            body['azure_private_endpoint_rules'] = self.azure_private_endpoint_rules
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NccEgressTargetRules:
         """Deserializes the NccEgressTargetRules from a dictionary."""
@@ -1947,6 +2770,19 @@ def as_dict(self) -> dict:
         if self.updated_time is not None: body['updated_time'] = self.updated_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NetworkConnectivityConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.egress_config: body['egress_config'] = self.egress_config
+        if self.name is not None: body['name'] = self.name
+        if self.network_connectivity_config_id is not None:
+            body['network_connectivity_config_id'] = self.network_connectivity_config_id
+        if self.region is not None: body['region'] = self.region
+        if self.updated_time is not None: body['updated_time'] = self.updated_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NetworkConnectivityConfiguration:
         """Deserializes the NetworkConnectivityConfiguration from a dictionary."""
@@ -1983,6 +2819,15 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NotificationDestination into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.config: body['config'] = self.config
+        if self.destination_type is not None: body['destination_type'] = self.destination_type
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NotificationDestination:
         """Deserializes the NotificationDestination from a dictionary."""
@@ -2007,6 +2852,13 @@ def as_dict(self) -> dict:
         if self.integration_key_set is not None: body['integration_key_set'] = self.integration_key_set
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PagerdutyConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.integration_key is not None: body['integration_key'] = self.integration_key
+        if self.integration_key_set is not None: body['integration_key_set'] = self.integration_key_set
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PagerdutyConfig:
         """Deserializes the PagerdutyConfig from a dictionary."""
@@ -2027,6 +2879,12 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspaceId'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PartitionId into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.workspace_id is not None: body['workspaceId'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PartitionId:
         """Deserializes the PartitionId from a dictionary."""
@@ -2048,6 +2906,12 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PersonalComputeMessage into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PersonalComputeMessage:
         """Deserializes the PersonalComputeMessage from a dictionary."""
@@ -2091,6 +2955,14 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PersonalComputeSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        if self.personal_compute: body['personal_compute'] = self.personal_compute
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PersonalComputeSetting:
         """Deserializes the PersonalComputeSetting from a dictionary."""
@@ -2122,6 +2994,15 @@ def as_dict(self) -> dict:
         if self.token_id is not None: body['token_id'] = self.token_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PublicTokenInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.expiry_time is not None: body['expiry_time'] = self.expiry_time
+        if self.token_id is not None: body['token_id'] = self.token_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PublicTokenInfo:
         """Deserializes the PublicTokenInfo from a dictionary."""
@@ -2162,6 +3043,16 @@ def as_dict(self) -> dict:
         if self.list_type is not None: body['list_type'] = self.list_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ReplaceIpAccessList into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.ip_access_list_id is not None: body['ip_access_list_id'] = self.ip_access_list_id
+        if self.ip_addresses: body['ip_addresses'] = self.ip_addresses
+        if self.label is not None: body['label'] = self.label
+        if self.list_type is not None: body['list_type'] = self.list_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ReplaceIpAccessList:
         """Deserializes the ReplaceIpAccessList from a dictionary."""
@@ -2180,6 +3071,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ReplaceResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ReplaceResponse:
         """Deserializes the ReplaceResponse from a dictionary."""
@@ -2196,6 +3092,12 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestrictWorkspaceAdminsMessage into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestrictWorkspaceAdminsMessage:
         """Deserializes the RestrictWorkspaceAdminsMessage from a dictionary."""
@@ -2235,6 +3137,14 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestrictWorkspaceAdminsSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        if self.restrict_workspace_admins: body['restrict_workspace_admins'] = self.restrict_workspace_admins
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestrictWorkspaceAdminsSetting:
         """Deserializes the RestrictWorkspaceAdminsSetting from a dictionary."""
@@ -2255,6 +3165,12 @@ def as_dict(self) -> dict:
         if self.token_id is not None: body['token_id'] = self.token_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RevokeTokenRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.token_id is not None: body['token_id'] = self.token_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RevokeTokenRequest:
         """Deserializes the RevokeTokenRequest from a dictionary."""
@@ -2269,6 +3185,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RevokeTokenResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RevokeTokenResponse:
         """Deserializes the RevokeTokenResponse from a dictionary."""
@@ -2283,6 +3204,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetStatusResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetStatusResponse:
         """Deserializes the SetStatusResponse from a dictionary."""
@@ -2304,6 +3230,13 @@ def as_dict(self) -> dict:
         if self.url_set is not None: body['url_set'] = self.url_set
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SlackConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.url is not None: body['url'] = self.url
+        if self.url_set is not None: body['url_set'] = self.url_set
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SlackConfig:
         """Deserializes the SlackConfig from a dictionary."""
@@ -2321,6 +3254,12 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StringMessage into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StringMessage:
         """Deserializes the StringMessage from a dictionary."""
@@ -2351,6 +3290,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenAccessControlRequest:
         """Deserializes the TokenAccessControlRequest from a dictionary."""
@@ -2388,6 +3337,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenAccessControlResponse:
         """Deserializes the TokenAccessControlResponse from a dictionary."""
@@ -2441,6 +3401,20 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_by_id is not None: body['created_by_id'] = self.created_by_id
+        if self.created_by_username is not None: body['created_by_username'] = self.created_by_username
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.expiry_time is not None: body['expiry_time'] = self.expiry_time
+        if self.last_used_day is not None: body['last_used_day'] = self.last_used_day
+        if self.owner_id is not None: body['owner_id'] = self.owner_id
+        if self.token_id is not None: body['token_id'] = self.token_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenInfo:
         """Deserializes the TokenInfo from a dictionary."""
@@ -2472,6 +3446,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenPermission:
         """Deserializes the TokenPermission from a dictionary."""
@@ -2503,6 +3485,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenPermissions:
         """Deserializes the TokenPermissions from a dictionary."""
@@ -2525,6 +3515,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenPermissionsDescription:
         """Deserializes the TokenPermissionsDescription from a dictionary."""
@@ -2543,6 +3540,12 @@ def as_dict(self) -> dict:
             body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenPermissionsRequest:
         """Deserializes the TokenPermissionsRequest from a dictionary."""
@@ -2578,6 +3581,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateAibiDashboardEmbeddingAccessPolicySettingRequest:
         """Deserializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest from a dictionary."""
@@ -2608,6 +3619,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest:
         """Deserializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest from a dictionary."""
@@ -2638,6 +3657,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateAutomaticClusterUpdateSettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateAutomaticClusterUpdateSettingRequest:
         """Deserializes the UpdateAutomaticClusterUpdateSettingRequest from a dictionary."""
@@ -2668,6 +3695,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateComplianceSecurityProfileSettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateComplianceSecurityProfileSettingRequest:
         """Deserializes the UpdateComplianceSecurityProfileSettingRequest from a dictionary."""
@@ -2698,6 +3733,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCspEnablementAccountSettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCspEnablementAccountSettingRequest:
         """Deserializes the UpdateCspEnablementAccountSettingRequest from a dictionary."""
@@ -2735,6 +3778,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateDefaultNamespaceSettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateDefaultNamespaceSettingRequest:
         """Deserializes the UpdateDefaultNamespaceSettingRequest from a dictionary."""
@@ -2765,6 +3816,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateDisableLegacyAccessRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyAccessRequest:
         """Deserializes the UpdateDisableLegacyAccessRequest from a dictionary."""
@@ -2795,6 +3854,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateDisableLegacyDbfsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyDbfsRequest:
         """Deserializes the UpdateDisableLegacyDbfsRequest from a dictionary."""
@@ -2825,6 +3892,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateDisableLegacyFeaturesRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyFeaturesRequest:
         """Deserializes the UpdateDisableLegacyFeaturesRequest from a dictionary."""
@@ -2855,6 +3930,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateEnhancedSecurityMonitoringSettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateEnhancedSecurityMonitoringSettingRequest:
         """Deserializes the UpdateEnhancedSecurityMonitoringSettingRequest from a dictionary."""
@@ -2885,6 +3968,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateEsmEnablementAccountSettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateEsmEnablementAccountSettingRequest:
         """Deserializes the UpdateEsmEnablementAccountSettingRequest from a dictionary."""
@@ -2924,6 +4015,16 @@ def as_dict(self) -> dict:
         if self.list_type is not None: body['list_type'] = self.list_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateIpAccessList into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.ip_access_list_id is not None: body['ip_access_list_id'] = self.ip_access_list_id
+        if self.ip_addresses: body['ip_addresses'] = self.ip_addresses
+        if self.label is not None: body['label'] = self.label
+        if self.list_type is not None: body['list_type'] = self.list_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateIpAccessList:
         """Deserializes the UpdateIpAccessList from a dictionary."""
@@ -2953,6 +4054,14 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateNotificationDestinationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.config: body['config'] = self.config
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateNotificationDestinationRequest:
         """Deserializes the UpdateNotificationDestinationRequest from a dictionary."""
@@ -2983,6 +4092,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdatePersonalComputeSettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdatePersonalComputeSettingRequest:
         """Deserializes the UpdatePersonalComputeSettingRequest from a dictionary."""
@@ -2999,6 +4116,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
         """Deserializes the UpdateResponse from a dictionary."""
@@ -3027,6 +4149,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateRestrictWorkspaceAdminsSettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRestrictWorkspaceAdminsSettingRequest:
         """Deserializes the UpdateRestrictWorkspaceAdminsSettingRequest from a dictionary."""
@@ -3292,6 +4422,33 @@ class AibiDashboardEmbeddingAccessPolicyAPI:
     def __init__(self, api_client):
         self._api = api_client
 
+    def delete(self,
+               *,
+               etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse:
+        """Delete the AI/BI dashboard embedding access policy.
+        
+        Delete the AI/BI dashboard embedding access policy, reverting back to the default.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteAibiDashboardEmbeddingAccessPolicySettingResponse`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('DELETE',
+                           '/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default',
+                           query=query,
+                           headers=headers)
+        return DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.from_dict(res)
+
     def get(self, *, etag: Optional[str] = None) -> AibiDashboardEmbeddingAccessPolicySetting:
         """Retrieve the AI/BI dashboard embedding access policy.
         
@@ -3354,6 +4511,34 @@ class AibiDashboardEmbeddingApprovedDomainsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
+    def delete(self,
+               *,
+               etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse:
+        """Delete AI/BI dashboard embedding approved domains.
+        
+        Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the default
+        empty list.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('DELETE',
+                           '/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default',
+                           query=query,
+                           headers=headers)
+        return DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.from_dict(res)
+
     def get(self, *, etag: Optional[str] = None) -> AibiDashboardEmbeddingApprovedDomainsSetting:
         """Retrieve the list of domains approved to host embedded AI/BI dashboards.
         
diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py
index 13cba2ccf..000c85e2c 100755
--- a/databricks/sdk/service/sharing.py
+++ b/databricks/sdk/service/sharing.py
@@ -46,6 +46,15 @@ def as_dict(self) -> dict:
         if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateProvider into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.authentication_type is not None: body['authentication_type'] = self.authentication_type
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateProvider:
         """Deserializes the CreateProvider from a dictionary."""
@@ -102,6 +111,21 @@ def as_dict(self) -> dict:
         if self.sharing_code is not None: body['sharing_code'] = self.sharing_code
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateRecipient into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.authentication_type is not None: body['authentication_type'] = self.authentication_type
+        if self.comment is not None: body['comment'] = self.comment
+        if self.data_recipient_global_metastore_id is not None:
+            body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs
+        if self.sharing_code is not None: body['sharing_code'] = self.sharing_code
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRecipient:
         """Deserializes the CreateRecipient from a dictionary."""
@@ -135,6 +159,14 @@ def as_dict(self) -> dict:
         if self.storage_root is not None: body['storage_root'] = self.storage_root
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateShare into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateShare:
         """Deserializes the CreateShare from a dictionary."""
@@ -151,6 +183,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -165,6 +202,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetActivationUrlInfoResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetActivationUrlInfoResponse:
         """Deserializes the GetActivationUrlInfoResponse from a dictionary."""
@@ -187,6 +229,13 @@ def as_dict(self) -> dict:
         if self.permissions_out: body['permissions_out'] = [v.as_dict() for v in self.permissions_out]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetRecipientSharePermissionsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.permissions_out: body['permissions_out'] = self.permissions_out
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetRecipientSharePermissionsResponse:
         """Deserializes the GetRecipientSharePermissionsResponse from a dictionary."""
@@ -205,6 +254,12 @@ def as_dict(self) -> dict:
         if self.allowed_ip_addresses: body['allowed_ip_addresses'] = [v for v in self.allowed_ip_addresses]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the IpAccessList into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allowed_ip_addresses: body['allowed_ip_addresses'] = self.allowed_ip_addresses
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> IpAccessList:
         """Deserializes the IpAccessList from a dictionary."""
@@ -227,6 +282,13 @@ def as_dict(self) -> dict:
         if self.shares: body['shares'] = [v.as_dict() for v in self.shares]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListProviderSharesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.shares: body['shares'] = self.shares
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListProviderSharesResponse:
         """Deserializes the ListProviderSharesResponse from a dictionary."""
@@ -250,6 +312,13 @@ def as_dict(self) -> dict:
         if self.providers: body['providers'] = [v.as_dict() for v in self.providers]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListProvidersResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.providers: body['providers'] = self.providers
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListProvidersResponse:
         """Deserializes the ListProvidersResponse from a dictionary."""
@@ -273,6 +342,13 @@ def as_dict(self) -> dict:
         if self.recipients: body['recipients'] = [v.as_dict() for v in self.recipients]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListRecipientsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.recipients: body['recipients'] = self.recipients
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListRecipientsResponse:
         """Deserializes the ListRecipientsResponse from a dictionary."""
@@ -296,6 +372,13 @@ def as_dict(self) -> dict:
         if self.shares: body['shares'] = [v.as_dict() for v in self.shares]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListSharesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.shares: body['shares'] = self.shares
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSharesResponse:
         """Deserializes the ListSharesResponse from a dictionary."""
@@ -314,12 +397,41 @@ def as_dict(self) -> dict:
         if self.values: body['values'] = [v.as_dict() for v in self.values]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Partition into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.values: body['values'] = self.values
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Partition:
         """Deserializes the Partition from a dictionary."""
         return cls(values=_repeated_dict(d, 'values', PartitionValue))
 
 
+@dataclass
+class PartitionSpecificationPartition:
+    values: Optional[List[PartitionValue]] = None
+    """An array of partition values."""
+
+    def as_dict(self) -> dict:
+        """Serializes the PartitionSpecificationPartition into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.values: body['values'] = [v.as_dict() for v in self.values]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PartitionSpecificationPartition into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.values: body['values'] = self.values
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> PartitionSpecificationPartition:
+        """Deserializes the PartitionSpecificationPartition from a dictionary."""
+        return cls(values=_repeated_dict(d, 'values', PartitionValue))
+
+
 @dataclass
 class PartitionValue:
     name: Optional[str] = None
@@ -329,7 +441,7 @@ class PartitionValue:
     """The operator to apply for the value."""
 
     recipient_property_key: Optional[str] = None
-    """The key of a Delta Sharing recipient's property. For example `databricks-account-id`. When this
+    """The key of a Delta Sharing recipient's property. For example "databricks-account-id". When this
     field is set, field `value` can not be set."""
 
     value: Optional[str] = None
@@ -346,6 +458,16 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PartitionValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.op is not None: body['op'] = self.op
+        if self.recipient_property_key is not None:
+            body['recipient_property_key'] = self.recipient_property_key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PartitionValue:
         """Deserializes the PartitionValue from a dictionary."""
@@ -356,7 +478,6 @@ def from_dict(cls, d: Dict[str, any]) -> PartitionValue:
 
 
 class PartitionValueOp(Enum):
-    """The operator to apply for the value."""
 
     EQUAL = 'EQUAL'
     LIKE = 'LIKE'
@@ -374,6 +495,7 @@ class Privilege(Enum):
     CREATE_EXTERNAL_TABLE = 'CREATE_EXTERNAL_TABLE'
     CREATE_EXTERNAL_VOLUME = 'CREATE_EXTERNAL_VOLUME'
     CREATE_FOREIGN_CATALOG = 'CREATE_FOREIGN_CATALOG'
+    CREATE_FOREIGN_SECURABLE = 'CREATE_FOREIGN_SECURABLE'
     CREATE_FUNCTION = 'CREATE_FUNCTION'
     CREATE_MANAGED_STORAGE = 'CREATE_MANAGED_STORAGE'
     CREATE_MATERIALIZED_VIEW = 'CREATE_MATERIALIZED_VIEW'
@@ -425,6 +547,13 @@ def as_dict(self) -> dict:
         if self.privileges: body['privileges'] = [v.value for v in self.privileges]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PrivilegeAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.principal is not None: body['principal'] = self.principal
+        if self.privileges: body['privileges'] = self.privileges
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PrivilegeAssignment:
         """Deserializes the PrivilegeAssignment from a dictionary."""
@@ -500,6 +629,26 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ProviderInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.authentication_type is not None: body['authentication_type'] = self.authentication_type
+        if self.cloud is not None: body['cloud'] = self.cloud
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.data_provider_global_metastore_id is not None:
+            body['data_provider_global_metastore_id'] = self.data_provider_global_metastore_id
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.recipient_profile: body['recipient_profile'] = self.recipient_profile
+        if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str
+        if self.region is not None: body['region'] = self.region
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ProviderInfo:
         """Deserializes the ProviderInfo from a dictionary."""
@@ -530,6 +679,12 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ProviderShare into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ProviderShare:
         """Deserializes the ProviderShare from a dictionary."""
@@ -623,6 +778,30 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RecipientInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.activated is not None: body['activated'] = self.activated
+        if self.activation_url is not None: body['activation_url'] = self.activation_url
+        if self.authentication_type is not None: body['authentication_type'] = self.authentication_type
+        if self.cloud is not None: body['cloud'] = self.cloud
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.data_recipient_global_metastore_id is not None:
+            body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id
+        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs
+        if self.region is not None: body['region'] = self.region
+        if self.sharing_code is not None: body['sharing_code'] = self.sharing_code
+        if self.tokens: body['tokens'] = self.tokens
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RecipientInfo:
         """Deserializes the RecipientInfo from a dictionary."""
@@ -666,6 +845,15 @@ def as_dict(self) -> dict:
             body['share_credentials_version'] = self.share_credentials_version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RecipientProfile into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.bearer_token is not None: body['bearer_token'] = self.bearer_token
+        if self.endpoint is not None: body['endpoint'] = self.endpoint
+        if self.share_credentials_version is not None:
+            body['share_credentials_version'] = self.share_credentials_version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RecipientProfile:
         """Deserializes the RecipientProfile from a dictionary."""
@@ -710,6 +898,18 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RecipientTokenInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.activation_url is not None: body['activation_url'] = self.activation_url
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.id is not None: body['id'] = self.id
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RecipientTokenInfo:
         """Deserializes the RecipientTokenInfo from a dictionary."""
@@ -746,6 +946,16 @@ def as_dict(self) -> dict:
             body['shareCredentialsVersion'] = self.share_credentials_version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RetrieveTokenResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.bearer_token is not None: body['bearerToken'] = self.bearer_token
+        if self.endpoint is not None: body['endpoint'] = self.endpoint
+        if self.expiration_time is not None: body['expirationTime'] = self.expiration_time
+        if self.share_credentials_version is not None:
+            body['shareCredentialsVersion'] = self.share_credentials_version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RetrieveTokenResponse:
         """Deserializes the RetrieveTokenResponse from a dictionary."""
@@ -773,6 +983,14 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RotateRecipientToken into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.existing_token_expire_in_seconds is not None:
+            body['existing_token_expire_in_seconds'] = self.existing_token_expire_in_seconds
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RotateRecipientToken:
         """Deserializes the RotateRecipientToken from a dictionary."""
@@ -793,6 +1011,12 @@ def as_dict(self) -> dict:
         if self.properties: body['properties'] = self.properties
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SecurablePropertiesKvPairs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.properties: body['properties'] = self.properties
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SecurablePropertiesKvPairs:
         """Deserializes the SecurablePropertiesKvPairs from a dictionary."""
@@ -849,6 +1073,21 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ShareInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.name is not None: body['name'] = self.name
+        if self.objects: body['objects'] = self.objects
+        if self.owner is not None: body['owner'] = self.owner
+        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ShareInfo:
         """Deserializes the ShareInfo from a dictionary."""
@@ -880,6 +1119,13 @@ def as_dict(self) -> dict:
         if self.share_name is not None: body['share_name'] = self.share_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ShareToPrivilegeAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments
+        if self.share_name is not None: body['share_name'] = self.share_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ShareToPrivilegeAssignment:
         """Deserializes the ShareToPrivilegeAssignment from a dictionary."""
@@ -962,6 +1208,25 @@ def as_dict(self) -> dict:
         if self.string_shared_as is not None: body['string_shared_as'] = self.string_shared_as
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SharedDataObject into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.added_at is not None: body['added_at'] = self.added_at
+        if self.added_by is not None: body['added_by'] = self.added_by
+        if self.cdf_enabled is not None: body['cdf_enabled'] = self.cdf_enabled
+        if self.comment is not None: body['comment'] = self.comment
+        if self.content is not None: body['content'] = self.content
+        if self.data_object_type is not None: body['data_object_type'] = self.data_object_type
+        if self.history_data_sharing_status is not None:
+            body['history_data_sharing_status'] = self.history_data_sharing_status
+        if self.name is not None: body['name'] = self.name
+        if self.partitions: body['partitions'] = self.partitions
+        if self.shared_as is not None: body['shared_as'] = self.shared_as
+        if self.start_version is not None: body['start_version'] = self.start_version
+        if self.status is not None: body['status'] = self.status
+        if self.string_shared_as is not None: body['string_shared_as'] = self.string_shared_as
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SharedDataObject:
         """Deserializes the SharedDataObject from a dictionary."""
@@ -1025,6 +1290,13 @@ def as_dict(self) -> dict:
         if self.data_object: body['data_object'] = self.data_object.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SharedDataObjectUpdate into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.action is not None: body['action'] = self.action
+        if self.data_object: body['data_object'] = self.data_object
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SharedDataObjectUpdate:
         """Deserializes the SharedDataObjectUpdate from a dictionary."""
@@ -1048,6 +1320,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdatePermissionsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdatePermissionsResponse:
         """Deserializes the UpdatePermissionsResponse from a dictionary."""
@@ -1081,6 +1358,16 @@ def as_dict(self) -> dict:
         if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateProvider into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateProvider:
         """Deserializes the UpdateProvider from a dictionary."""
@@ -1128,6 +1415,18 @@ def as_dict(self) -> dict:
         if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateRecipient into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list
+        if self.name is not None: body['name'] = self.name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRecipient:
         """Deserializes the UpdateRecipient from a dictionary."""
@@ -1148,6 +1447,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
         """Deserializes the UpdateResponse from a dictionary."""
@@ -1185,6 +1489,17 @@ def as_dict(self) -> dict:
         if self.updates: body['updates'] = [v.as_dict() for v in self.updates]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateShare into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.updates: body['updates'] = self.updates
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateShare:
         """Deserializes the UpdateShare from a dictionary."""
@@ -1225,6 +1540,15 @@ def as_dict(self) -> dict:
         if self.page_token is not None: body['page_token'] = self.page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateSharePermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.changes: body['changes'] = self.changes
+        if self.max_results is not None: body['max_results'] = self.max_results
+        if self.name is not None: body['name'] = self.name
+        if self.page_token is not None: body['page_token'] = self.page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateSharePermissions:
         """Deserializes the UpdateSharePermissions from a dictionary."""
diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py
index 390aee5ee..2c20a7aef 100755
--- a/databricks/sdk/service/sql.py
+++ b/databricks/sdk/service/sql.py
@@ -36,6 +36,14 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccessControl into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccessControl:
         """Deserializes the AccessControl from a dictionary."""
@@ -118,6 +126,26 @@ def as_dict(self) -> dict:
         if self.update_time is not None: body['update_time'] = self.update_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Alert into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.condition: body['condition'] = self.condition
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.custom_body is not None: body['custom_body'] = self.custom_body
+        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state
+        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.parent_path is not None: body['parent_path'] = self.parent_path
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
+        if self.state is not None: body['state'] = self.state
+        if self.trigger_time is not None: body['trigger_time'] = self.trigger_time
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Alert:
         """Deserializes the Alert from a dictionary."""
@@ -161,6 +189,15 @@ def as_dict(self) -> dict:
         if self.threshold: body['threshold'] = self.threshold.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AlertCondition into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state
+        if self.op is not None: body['op'] = self.op
+        if self.operand: body['operand'] = self.operand
+        if self.threshold: body['threshold'] = self.threshold
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertCondition:
         """Deserializes the AlertCondition from a dictionary."""
@@ -180,6 +217,12 @@ def as_dict(self) -> dict:
         if self.column: body['column'] = self.column.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AlertConditionOperand into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.column: body['column'] = self.column
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertConditionOperand:
         """Deserializes the AlertConditionOperand from a dictionary."""
@@ -196,6 +239,12 @@ def as_dict(self) -> dict:
         if self.value: body['value'] = self.value.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AlertConditionThreshold into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.value: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertConditionThreshold:
         """Deserializes the AlertConditionThreshold from a dictionary."""
@@ -212,6 +261,12 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AlertOperandColumn into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertOperandColumn:
         """Deserializes the AlertOperandColumn from a dictionary."""
@@ -234,6 +289,14 @@ def as_dict(self) -> dict:
         if self.string_value is not None: body['string_value'] = self.string_value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AlertOperandValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.bool_value is not None: body['bool_value'] = self.bool_value
+        if self.double_value is not None: body['double_value'] = self.double_value
+        if self.string_value is not None: body['string_value'] = self.string_value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertOperandValue:
         """Deserializes the AlertOperandValue from a dictionary."""
@@ -297,6 +360,18 @@ def as_dict(self) -> dict:
         if self.value: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AlertOptions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.column is not None: body['column'] = self.column
+        if self.custom_body is not None: body['custom_body'] = self.custom_body
+        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
+        if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state
+        if self.muted is not None: body['muted'] = self.muted
+        if self.op is not None: body['op'] = self.op
+        if self.value: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertOptions:
         """Deserializes the AlertOptions from a dictionary."""
@@ -382,6 +457,24 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AlertQuery into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
+        if self.description is not None: body['description'] = self.description
+        if self.id is not None: body['id'] = self.id
+        if self.is_archived is not None: body['is_archived'] = self.is_archived
+        if self.is_draft is not None: body['is_draft'] = self.is_draft
+        if self.is_safe is not None: body['is_safe'] = self.is_safe
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.query is not None: body['query'] = self.query
+        if self.tags: body['tags'] = self.tags
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertQuery:
         """Deserializes the AlertQuery from a dictionary."""
@@ -434,6 +527,15 @@ def as_dict(self) -> dict:
         if self.row_offset is not None: body['row_offset'] = self.row_offset
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BaseChunkInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.byte_count is not None: body['byte_count'] = self.byte_count
+        if self.chunk_index is not None: body['chunk_index'] = self.chunk_index
+        if self.row_count is not None: body['row_count'] = self.row_count
+        if self.row_offset is not None: body['row_offset'] = self.row_offset
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BaseChunkInfo:
         """Deserializes the BaseChunkInfo from a dictionary."""
@@ -451,6 +553,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelExecutionResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelExecutionResponse:
         """Deserializes the CancelExecutionResponse from a dictionary."""
@@ -473,6 +580,13 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Channel into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dbsql_version is not None: body['dbsql_version'] = self.dbsql_version
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Channel:
         """Deserializes the Channel from a dictionary."""
@@ -496,6 +610,13 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ChannelInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dbsql_version is not None: body['dbsql_version'] = self.dbsql_version
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ChannelInfo:
         """Deserializes the ChannelInfo from a dictionary."""
@@ -547,6 +668,18 @@ def as_dict(self) -> dict:
         if self.type_text is not None: body['type_text'] = self.type_text
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ColumnInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.position is not None: body['position'] = self.position
+        if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type
+        if self.type_name is not None: body['type_name'] = self.type_name
+        if self.type_precision is not None: body['type_precision'] = self.type_precision
+        if self.type_scale is not None: body['type_scale'] = self.type_scale
+        if self.type_text is not None: body['type_text'] = self.type_text
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ColumnInfo:
         """Deserializes the ColumnInfo from a dictionary."""
@@ -612,6 +745,16 @@ def as_dict(self) -> dict:
         if self.rearm is not None: body['rearm'] = self.rearm
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateAlert into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.parent is not None: body['parent'] = self.parent
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.rearm is not None: body['rearm'] = self.rearm
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateAlert:
         """Deserializes the CreateAlert from a dictionary."""
@@ -632,6 +775,12 @@ def as_dict(self) -> dict:
         if self.alert: body['alert'] = self.alert.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateAlertRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alert: body['alert'] = self.alert
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateAlertRequest:
         """Deserializes the CreateAlertRequest from a dictionary."""
@@ -683,6 +832,19 @@ def as_dict(self) -> dict:
         if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateAlertRequestAlert into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.condition: body['condition'] = self.condition
+        if self.custom_body is not None: body['custom_body'] = self.custom_body
+        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
+        if self.parent_path is not None: body['parent_path'] = self.parent_path
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateAlertRequestAlert:
         """Deserializes the CreateAlertRequestAlert from a dictionary."""
@@ -706,6 +868,12 @@ def as_dict(self) -> dict:
         if self.query: body['query'] = self.query.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateQueryRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.query: body['query'] = self.query
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateQueryRequest:
         """Deserializes the CreateQueryRequest from a dictionary."""
@@ -762,6 +930,22 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateQueryRequestQuery into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.description is not None: body['description'] = self.description
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.parameters: body['parameters'] = self.parameters
+        if self.parent_path is not None: body['parent_path'] = self.parent_path
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode
+        if self.schema is not None: body['schema'] = self.schema
+        if self.tags: body['tags'] = self.tags
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateQueryRequestQuery:
         """Deserializes the CreateQueryRequestQuery from a dictionary."""
@@ -788,6 +972,12 @@ def as_dict(self) -> dict:
         if self.visualization: body['visualization'] = self.visualization.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateVisualizationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.visualization: body['visualization'] = self.visualization
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateVisualizationRequest:
         """Deserializes the CreateVisualizationRequest from a dictionary."""
@@ -823,6 +1013,16 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateVisualizationRequestVisualization into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
+        if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateVisualizationRequestVisualization:
         """Deserializes the CreateVisualizationRequestVisualization from a dictionary."""
@@ -924,6 +1124,25 @@ def as_dict(self) -> dict:
         if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateWarehouseRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins
+        if self.channel: body['channel'] = self.channel
+        if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
+        if self.creator_name is not None: body['creator_name'] = self.creator_name
+        if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
+        if self.enable_serverless_compute is not None:
+            body['enable_serverless_compute'] = self.enable_serverless_compute
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
+        if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
+        if self.name is not None: body['name'] = self.name
+        if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy
+        if self.tags: body['tags'] = self.tags
+        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateWarehouseRequest:
         """Deserializes the CreateWarehouseRequest from a dictionary."""
@@ -962,6 +1181,12 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateWarehouseResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateWarehouseResponse:
         """Deserializes the CreateWarehouseResponse from a dictionary."""
@@ -999,6 +1224,17 @@ def as_dict(self) -> dict:
         if self.width is not None: body['width'] = self.width
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateWidget into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.id is not None: body['id'] = self.id
+        if self.options: body['options'] = self.options
+        if self.text is not None: body['text'] = self.text
+        if self.visualization_id is not None: body['visualization_id'] = self.visualization_id
+        if self.width is not None: body['width'] = self.width
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateWidget:
         """Deserializes the CreateWidget from a dictionary."""
@@ -1090,6 +1326,29 @@ def as_dict(self) -> dict:
         if self.widgets: body['widgets'] = [v.as_dict() for v in self.widgets]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Dashboard into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.can_edit is not None: body['can_edit'] = self.can_edit
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.dashboard_filters_enabled is not None:
+            body['dashboard_filters_enabled'] = self.dashboard_filters_enabled
+        if self.id is not None: body['id'] = self.id
+        if self.is_archived is not None: body['is_archived'] = self.is_archived
+        if self.is_draft is not None: body['is_draft'] = self.is_draft
+        if self.is_favorite is not None: body['is_favorite'] = self.is_favorite
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.parent is not None: body['parent'] = self.parent
+        if self.permission_tier is not None: body['permission_tier'] = self.permission_tier
+        if self.slug is not None: body['slug'] = self.slug
+        if self.tags: body['tags'] = self.tags
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.user: body['user'] = self.user
+        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.widgets: body['widgets'] = self.widgets
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Dashboard:
         """Deserializes the Dashboard from a dictionary."""
@@ -1134,6 +1393,15 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DashboardEditContent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.name is not None: body['name'] = self.name
+        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DashboardEditContent:
         """Deserializes the DashboardEditContent from a dictionary."""
@@ -1155,6 +1423,12 @@ def as_dict(self) -> dict:
         if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DashboardOptions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DashboardOptions:
         """Deserializes the DashboardOptions from a dictionary."""
@@ -1193,6 +1467,18 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DashboardPostContent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_filters_enabled is not None:
+            body['dashboard_filters_enabled'] = self.dashboard_filters_enabled
+        if self.is_favorite is not None: body['is_favorite'] = self.is_favorite
+        if self.name is not None: body['name'] = self.name
+        if self.parent is not None: body['parent'] = self.parent
+        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DashboardPostContent:
         """Deserializes the DashboardPostContent from a dictionary."""
@@ -1253,6 +1539,20 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DataSource into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.name is not None: body['name'] = self.name
+        if self.pause_reason is not None: body['pause_reason'] = self.pause_reason
+        if self.paused is not None: body['paused'] = self.paused
+        if self.supports_auto_limit is not None: body['supports_auto_limit'] = self.supports_auto_limit
+        if self.syntax is not None: body['syntax'] = self.syntax
+        if self.type is not None: body['type'] = self.type
+        if self.view_only is not None: body['view_only'] = self.view_only
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DataSource:
         """Deserializes the DataSource from a dictionary."""
@@ -1287,6 +1587,13 @@ def as_dict(self) -> dict:
         if self.start is not None: body['start'] = self.start
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DateRange into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.end is not None: body['end'] = self.end
+        if self.start is not None: body['start'] = self.start
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DateRange:
         """Deserializes the DateRange from a dictionary."""
@@ -1317,6 +1624,16 @@ def as_dict(self) -> dict:
         if self.start_day_of_week is not None: body['start_day_of_week'] = self.start_day_of_week
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DateRangeValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.date_range_value: body['date_range_value'] = self.date_range_value
+        if self.dynamic_date_range_value is not None:
+            body['dynamic_date_range_value'] = self.dynamic_date_range_value
+        if self.precision is not None: body['precision'] = self.precision
+        if self.start_day_of_week is not None: body['start_day_of_week'] = self.start_day_of_week
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DateRangeValue:
         """Deserializes the DateRangeValue from a dictionary."""
@@ -1368,6 +1685,14 @@ def as_dict(self) -> dict:
         if self.precision is not None: body['precision'] = self.precision.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DateValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.date_value is not None: body['date_value'] = self.date_value
+        if self.dynamic_date_value is not None: body['dynamic_date_value'] = self.dynamic_date_value
+        if self.precision is not None: body['precision'] = self.precision
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DateValue:
         """Deserializes the DateValue from a dictionary."""
@@ -1390,6 +1715,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -1404,6 +1734,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteWarehouseResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteWarehouseResponse:
         """Deserializes the DeleteWarehouseResponse from a dictionary."""
@@ -1443,6 +1778,16 @@ def as_dict(self) -> dict:
         if self.rearm is not None: body['rearm'] = self.rearm
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditAlert into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alert_id is not None: body['alert_id'] = self.alert_id
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.rearm is not None: body['rearm'] = self.rearm
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditAlert:
         """Deserializes the EditAlert from a dictionary."""
@@ -1547,6 +1892,26 @@ def as_dict(self) -> dict:
         if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditWarehouseRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins
+        if self.channel: body['channel'] = self.channel
+        if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
+        if self.creator_name is not None: body['creator_name'] = self.creator_name
+        if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
+        if self.enable_serverless_compute is not None:
+            body['enable_serverless_compute'] = self.enable_serverless_compute
+        if self.id is not None: body['id'] = self.id
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
+        if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
+        if self.name is not None: body['name'] = self.name
+        if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy
+        if self.tags: body['tags'] = self.tags
+        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditWarehouseRequest:
         """Deserializes the EditWarehouseRequest from a dictionary."""
@@ -1583,6 +1948,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditWarehouseResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditWarehouseResponse:
         """Deserializes the EditWarehouseResponse from a dictionary."""
@@ -1599,6 +1969,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Empty into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Empty:
         """Deserializes the Empty from a dictionary."""
@@ -1618,6 +1993,13 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointConfPair into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointConfPair:
         """Deserializes the EndpointConfPair from a dictionary."""
@@ -1652,6 +2034,16 @@ def as_dict(self) -> dict:
         if self.summary is not None: body['summary'] = self.summary
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointHealth into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.details is not None: body['details'] = self.details
+        if self.failure_reason: body['failure_reason'] = self.failure_reason
+        if self.message is not None: body['message'] = self.message
+        if self.status is not None: body['status'] = self.status
+        if self.summary is not None: body['summary'] = self.summary
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointHealth:
         """Deserializes the EndpointHealth from a dictionary."""
@@ -1780,6 +2172,32 @@ def as_dict(self) -> dict:
         if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins
+        if self.channel: body['channel'] = self.channel
+        if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
+        if self.creator_name is not None: body['creator_name'] = self.creator_name
+        if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
+        if self.enable_serverless_compute is not None:
+            body['enable_serverless_compute'] = self.enable_serverless_compute
+        if self.health: body['health'] = self.health
+        if self.id is not None: body['id'] = self.id
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.jdbc_url is not None: body['jdbc_url'] = self.jdbc_url
+        if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
+        if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
+        if self.name is not None: body['name'] = self.name
+        if self.num_active_sessions is not None: body['num_active_sessions'] = self.num_active_sessions
+        if self.num_clusters is not None: body['num_clusters'] = self.num_clusters
+        if self.odbc_params: body['odbc_params'] = self.odbc_params
+        if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy
+        if self.state is not None: body['state'] = self.state
+        if self.tags: body['tags'] = self.tags
+        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointInfo:
         """Deserializes the EndpointInfo from a dictionary."""
@@ -1827,6 +2245,13 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointTagPair into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointTagPair:
         """Deserializes the EndpointTagPair from a dictionary."""
@@ -1843,6 +2268,12 @@ def as_dict(self) -> dict:
         if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointTags into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointTags:
         """Deserializes the EndpointTags from a dictionary."""
@@ -1868,6 +2299,14 @@ def as_dict(self) -> dict:
         if self.values: body['values'] = [v for v in self.values]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EnumValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enum_options is not None: body['enum_options'] = self.enum_options
+        if self.multi_values_options: body['multi_values_options'] = self.multi_values_options
+        if self.values: body['values'] = self.values
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnumValue:
         """Deserializes the EnumValue from a dictionary."""
@@ -2009,6 +2448,22 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExecuteStatementRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.byte_limit is not None: body['byte_limit'] = self.byte_limit
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.disposition is not None: body['disposition'] = self.disposition
+        if self.format is not None: body['format'] = self.format
+        if self.on_wait_timeout is not None: body['on_wait_timeout'] = self.on_wait_timeout
+        if self.parameters: body['parameters'] = self.parameters
+        if self.row_limit is not None: body['row_limit'] = self.row_limit
+        if self.schema is not None: body['schema'] = self.schema
+        if self.statement is not None: body['statement'] = self.statement
+        if self.wait_timeout is not None: body['wait_timeout'] = self.wait_timeout
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExecuteStatementRequest:
         """Deserializes the ExecuteStatementRequest from a dictionary."""
@@ -2089,6 +2544,21 @@ def as_dict(self) -> dict:
         if self.row_offset is not None: body['row_offset'] = self.row_offset
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExternalLink into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.byte_count is not None: body['byte_count'] = self.byte_count
+        if self.chunk_index is not None: body['chunk_index'] = self.chunk_index
+        if self.expiration is not None: body['expiration'] = self.expiration
+        if self.external_link is not None: body['external_link'] = self.external_link
+        if self.http_headers: body['http_headers'] = self.http_headers
+        if self.next_chunk_index is not None: body['next_chunk_index'] = self.next_chunk_index
+        if self.next_chunk_internal_link is not None:
+            body['next_chunk_internal_link'] = self.next_chunk_internal_link
+        if self.row_count is not None: body['row_count'] = self.row_count
+        if self.row_offset is not None: body['row_offset'] = self.row_offset
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExternalLink:
         """Deserializes the ExternalLink from a dictionary."""
@@ -2129,6 +2599,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetResponse:
         """Deserializes the GetResponse from a dictionary."""
@@ -2148,6 +2626,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetWarehousePermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetWarehousePermissionLevelsResponse:
         """Deserializes the GetWarehousePermissionLevelsResponse from a dictionary."""
@@ -2272,6 +2756,32 @@ def as_dict(self) -> dict:
         if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetWarehouseResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins
+        if self.channel: body['channel'] = self.channel
+        if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
+        if self.creator_name is not None: body['creator_name'] = self.creator_name
+        if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
+        if self.enable_serverless_compute is not None:
+            body['enable_serverless_compute'] = self.enable_serverless_compute
+        if self.health: body['health'] = self.health
+        if self.id is not None: body['id'] = self.id
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.jdbc_url is not None: body['jdbc_url'] = self.jdbc_url
+        if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
+        if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
+        if self.name is not None: body['name'] = self.name
+        if self.num_active_sessions is not None: body['num_active_sessions'] = self.num_active_sessions
+        if self.num_clusters is not None: body['num_clusters'] = self.num_clusters
+        if self.odbc_params: body['odbc_params'] = self.odbc_params
+        if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy
+        if self.state is not None: body['state'] = self.state
+        if self.tags: body['tags'] = self.tags
+        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetWarehouseResponse:
         """Deserializes the GetWarehouseResponse from a dictionary."""
@@ -2358,6 +2868,22 @@ def as_dict(self) -> dict:
             body['sql_configuration_parameters'] = self.sql_configuration_parameters.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetWorkspaceWarehouseConfigResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.channel: body['channel'] = self.channel
+        if self.config_param: body['config_param'] = self.config_param
+        if self.data_access_config: body['data_access_config'] = self.data_access_config
+        if self.enabled_warehouse_types: body['enabled_warehouse_types'] = self.enabled_warehouse_types
+        if self.global_param: body['global_param'] = self.global_param
+        if self.google_service_account is not None:
+            body['google_service_account'] = self.google_service_account
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.security_policy is not None: body['security_policy'] = self.security_policy
+        if self.sql_configuration_parameters:
+            body['sql_configuration_parameters'] = self.sql_configuration_parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetWorkspaceWarehouseConfigResponse:
         """Deserializes the GetWorkspaceWarehouseConfigResponse from a dictionary."""
@@ -2433,6 +2959,22 @@ def as_dict(self) -> dict:
         if self.user: body['user'] = self.user.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LegacyAlert into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.id is not None: body['id'] = self.id
+        if self.last_triggered_at is not None: body['last_triggered_at'] = self.last_triggered_at
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.parent is not None: body['parent'] = self.parent
+        if self.query: body['query'] = self.query
+        if self.rearm is not None: body['rearm'] = self.rearm
+        if self.state is not None: body['state'] = self.state
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.user: body['user'] = self.user
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LegacyAlert:
         """Deserializes the LegacyAlert from a dictionary."""
@@ -2568,6 +3110,35 @@ def as_dict(self) -> dict:
         if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LegacyQuery into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.can_edit is not None: body['can_edit'] = self.can_edit
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
+        if self.description is not None: body['description'] = self.description
+        if self.id is not None: body['id'] = self.id
+        if self.is_archived is not None: body['is_archived'] = self.is_archived
+        if self.is_draft is not None: body['is_draft'] = self.is_draft
+        if self.is_favorite is not None: body['is_favorite'] = self.is_favorite
+        if self.is_safe is not None: body['is_safe'] = self.is_safe
+        if self.last_modified_by: body['last_modified_by'] = self.last_modified_by
+        if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id
+        if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.parent is not None: body['parent'] = self.parent
+        if self.permission_tier is not None: body['permission_tier'] = self.permission_tier
+        if self.query is not None: body['query'] = self.query
+        if self.query_hash is not None: body['query_hash'] = self.query_hash
+        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role
+        if self.tags: body['tags'] = self.tags
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.user: body['user'] = self.user
+        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.visualizations: body['visualizations'] = self.visualizations
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LegacyQuery:
         """Deserializes the LegacyQuery from a dictionary."""
@@ -2639,6 +3210,19 @@ def as_dict(self) -> dict:
         if self.updated_at is not None: body['updated_at'] = self.updated_at
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LegacyVisualization into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.description is not None: body['description'] = self.description
+        if self.id is not None: body['id'] = self.id
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.query: body['query'] = self.query
+        if self.type is not None: body['type'] = self.type
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LegacyVisualization:
         """Deserializes the LegacyVisualization from a dictionary."""
@@ -2671,6 +3255,13 @@ def as_dict(self) -> dict:
         if self.results: body['results'] = [v.as_dict() for v in self.results]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAlertsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.results: body['results'] = self.results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAlertsResponse:
         """Deserializes the ListAlertsResponse from a dictionary."""
@@ -2748,6 +3339,25 @@ def as_dict(self) -> dict:
         if self.update_time is not None: body['update_time'] = self.update_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAlertsResponseAlert into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.condition: body['condition'] = self.condition
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.custom_body is not None: body['custom_body'] = self.custom_body
+        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state
+        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
+        if self.state is not None: body['state'] = self.state
+        if self.trigger_time is not None: body['trigger_time'] = self.trigger_time
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAlertsResponseAlert:
         """Deserializes the ListAlertsResponseAlert from a dictionary."""
@@ -2791,6 +3401,14 @@ def as_dict(self) -> dict:
         if self.res: body['res'] = [v.as_dict() for v in self.res]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListQueriesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.has_next_page is not None: body['has_next_page'] = self.has_next_page
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.res: body['res'] = self.res
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListQueriesResponse:
         """Deserializes the ListQueriesResponse from a dictionary."""
@@ -2812,6 +3430,13 @@ def as_dict(self) -> dict:
         if self.results: body['results'] = [v.as_dict() for v in self.results]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListQueryObjectsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.results: body['results'] = self.results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListQueryObjectsResponse:
         """Deserializes the ListQueryObjectsResponse from a dictionary."""
@@ -2890,6 +3515,28 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListQueryObjectsResponseQuery into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.description is not None: body['description'] = self.description
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        if self.last_modifier_user_name is not None:
+            body['last_modifier_user_name'] = self.last_modifier_user_name
+        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.parameters: body['parameters'] = self.parameters
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode
+        if self.schema is not None: body['schema'] = self.schema
+        if self.tags: body['tags'] = self.tags
+        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListQueryObjectsResponseQuery:
         """Deserializes the ListQueryObjectsResponseQuery from a dictionary."""
@@ -2934,6 +3581,15 @@ def as_dict(self) -> dict:
         if self.results: body['results'] = [v.as_dict() for v in self.results]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.count is not None: body['count'] = self.count
+        if self.page is not None: body['page'] = self.page
+        if self.page_size is not None: body['page_size'] = self.page_size
+        if self.results: body['results'] = self.results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListResponse:
         """Deserializes the ListResponse from a dictionary."""
@@ -2956,6 +3612,13 @@ def as_dict(self) -> dict:
         if self.results: body['results'] = [v.as_dict() for v in self.results]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListVisualizationsForQueryResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.results: body['results'] = self.results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListVisualizationsForQueryResponse:
         """Deserializes the ListVisualizationsForQueryResponse from a dictionary."""
@@ -2974,6 +3637,12 @@ def as_dict(self) -> dict:
         if self.warehouses: body['warehouses'] = [v.as_dict() for v in self.warehouses]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListWarehousesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.warehouses: body['warehouses'] = self.warehouses
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListWarehousesResponse:
         """Deserializes the ListWarehousesResponse from a dictionary."""
@@ -2999,6 +3668,14 @@ def as_dict(self) -> dict:
         if self.suffix is not None: body['suffix'] = self.suffix
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MultiValuesOptions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.prefix is not None: body['prefix'] = self.prefix
+        if self.separator is not None: body['separator'] = self.separator
+        if self.suffix is not None: body['suffix'] = self.suffix
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MultiValuesOptions:
         """Deserializes the MultiValuesOptions from a dictionary."""
@@ -3017,6 +3694,12 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NumericValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NumericValue:
         """Deserializes the NumericValue from a dictionary."""
@@ -3060,6 +3743,15 @@ def as_dict(self) -> dict:
         if self.protocol is not None: body['protocol'] = self.protocol
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the OdbcParams into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.hostname is not None: body['hostname'] = self.hostname
+        if self.path is not None: body['path'] = self.path
+        if self.port is not None: body['port'] = self.port
+        if self.protocol is not None: body['protocol'] = self.protocol
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OdbcParams:
         """Deserializes the OdbcParams from a dictionary."""
@@ -3114,6 +3806,18 @@ def as_dict(self) -> dict:
         if self.value: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Parameter into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enum_options is not None: body['enumOptions'] = self.enum_options
+        if self.multi_values_options: body['multiValuesOptions'] = self.multi_values_options
+        if self.name is not None: body['name'] = self.name
+        if self.query_id is not None: body['queryId'] = self.query_id
+        if self.title is not None: body['title'] = self.title
+        if self.type is not None: body['type'] = self.type
+        if self.value: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Parameter:
         """Deserializes the Parameter from a dictionary."""
@@ -3232,6 +3936,29 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Query into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.description is not None: body['description'] = self.description
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        if self.last_modifier_user_name is not None:
+            body['last_modifier_user_name'] = self.last_modifier_user_name
+        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.parameters: body['parameters'] = self.parameters
+        if self.parent_path is not None: body['parent_path'] = self.parent_path
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode
+        if self.schema is not None: body['schema'] = self.schema
+        if self.tags: body['tags'] = self.tags
+        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Query:
         """Deserializes the Query from a dictionary."""
@@ -3273,6 +4000,14 @@ def as_dict(self) -> dict:
         if self.values: body['values'] = [v for v in self.values]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryBackedValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.multi_values_options: body['multi_values_options'] = self.multi_values_options
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.values: body['values'] = self.values
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryBackedValue:
         """Deserializes the QueryBackedValue from a dictionary."""
@@ -3324,6 +4059,19 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryEditContent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.query is not None: body['query'] = self.query
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryEditContent:
         """Deserializes the QueryEditContent from a dictionary."""
@@ -3363,6 +4111,16 @@ def as_dict(self) -> dict:
         if self.warehouse_ids: body['warehouse_ids'] = [v for v in self.warehouse_ids]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryFilter into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.query_start_time_range: body['query_start_time_range'] = self.query_start_time_range
+        if self.statement_ids: body['statement_ids'] = self.statement_ids
+        if self.statuses: body['statuses'] = self.statuses
+        if self.user_ids: body['user_ids'] = self.user_ids
+        if self.warehouse_ids: body['warehouse_ids'] = self.warehouse_ids
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryFilter:
         """Deserializes the QueryFilter from a dictionary."""
@@ -3472,6 +4230,33 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.channel_used: body['channel_used'] = self.channel_used
+        if self.duration is not None: body['duration'] = self.duration
+        if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id
+        if self.error_message is not None: body['error_message'] = self.error_message
+        if self.executed_as_user_id is not None: body['executed_as_user_id'] = self.executed_as_user_id
+        if self.executed_as_user_name is not None: body['executed_as_user_name'] = self.executed_as_user_name
+        if self.execution_end_time_ms is not None: body['execution_end_time_ms'] = self.execution_end_time_ms
+        if self.is_final is not None: body['is_final'] = self.is_final
+        if self.lookup_key is not None: body['lookup_key'] = self.lookup_key
+        if self.metrics: body['metrics'] = self.metrics
+        if self.plans_state is not None: body['plans_state'] = self.plans_state
+        if self.query_end_time_ms is not None: body['query_end_time_ms'] = self.query_end_time_ms
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.query_start_time_ms is not None: body['query_start_time_ms'] = self.query_start_time_ms
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.rows_produced is not None: body['rows_produced'] = self.rows_produced
+        if self.spark_ui_url is not None: body['spark_ui_url'] = self.spark_ui_url
+        if self.statement_type is not None: body['statement_type'] = self.statement_type
+        if self.status is not None: body['status'] = self.status
+        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.user_name is not None: body['user_name'] = self.user_name
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryInfo:
         """Deserializes the QueryInfo from a dictionary."""
@@ -3522,6 +4307,15 @@ def as_dict(self) -> dict:
         if self.results: body['results'] = [v.as_dict() for v in self.results]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryList into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.count is not None: body['count'] = self.count
+        if self.page is not None: body['page'] = self.page
+        if self.page_size is not None: body['page_size'] = self.page_size
+        if self.results: body['results'] = self.results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryList:
         """Deserializes the QueryList from a dictionary."""
@@ -3605,8 +4399,38 @@ class QueryMetrics:
     write_remote_bytes: Optional[int] = None
     """Size pf persistent data written to cloud object storage in your cloud tenant, in bytes."""
 
-    def as_dict(self) -> dict:
-        """Serializes the QueryMetrics into a dictionary suitable for use as a JSON request body."""
+    def as_dict(self) -> dict:
+        """Serializes the QueryMetrics into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.compilation_time_ms is not None: body['compilation_time_ms'] = self.compilation_time_ms
+        if self.execution_time_ms is not None: body['execution_time_ms'] = self.execution_time_ms
+        if self.network_sent_bytes is not None: body['network_sent_bytes'] = self.network_sent_bytes
+        if self.overloading_queue_start_timestamp is not None:
+            body['overloading_queue_start_timestamp'] = self.overloading_queue_start_timestamp
+        if self.photon_total_time_ms is not None: body['photon_total_time_ms'] = self.photon_total_time_ms
+        if self.provisioning_queue_start_timestamp is not None:
+            body['provisioning_queue_start_timestamp'] = self.provisioning_queue_start_timestamp
+        if self.pruned_bytes is not None: body['pruned_bytes'] = self.pruned_bytes
+        if self.pruned_files_count is not None: body['pruned_files_count'] = self.pruned_files_count
+        if self.query_compilation_start_timestamp is not None:
+            body['query_compilation_start_timestamp'] = self.query_compilation_start_timestamp
+        if self.read_bytes is not None: body['read_bytes'] = self.read_bytes
+        if self.read_cache_bytes is not None: body['read_cache_bytes'] = self.read_cache_bytes
+        if self.read_files_count is not None: body['read_files_count'] = self.read_files_count
+        if self.read_partitions_count is not None: body['read_partitions_count'] = self.read_partitions_count
+        if self.read_remote_bytes is not None: body['read_remote_bytes'] = self.read_remote_bytes
+        if self.result_fetch_time_ms is not None: body['result_fetch_time_ms'] = self.result_fetch_time_ms
+        if self.result_from_cache is not None: body['result_from_cache'] = self.result_from_cache
+        if self.rows_produced_count is not None: body['rows_produced_count'] = self.rows_produced_count
+        if self.rows_read_count is not None: body['rows_read_count'] = self.rows_read_count
+        if self.spill_to_disk_bytes is not None: body['spill_to_disk_bytes'] = self.spill_to_disk_bytes
+        if self.task_total_time_ms is not None: body['task_total_time_ms'] = self.task_total_time_ms
+        if self.total_time_ms is not None: body['total_time_ms'] = self.total_time_ms
+        if self.write_remote_bytes is not None: body['write_remote_bytes'] = self.write_remote_bytes
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryMetrics into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.compilation_time_ms is not None: body['compilation_time_ms'] = self.compilation_time_ms
         if self.execution_time_ms is not None: body['execution_time_ms'] = self.execution_time_ms
@@ -3685,6 +4509,15 @@ def as_dict(self) -> dict:
         if self.schema is not None: body['schema'] = self.schema
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryOptions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at
+        if self.parameters: body['parameters'] = self.parameters
+        if self.schema is not None: body['schema'] = self.schema
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryOptions:
         """Deserializes the QueryOptions from a dictionary."""
@@ -3734,6 +4567,19 @@ def as_dict(self) -> dict:
         if self.title is not None: body['title'] = self.title
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryParameter into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.date_range_value: body['date_range_value'] = self.date_range_value
+        if self.date_value: body['date_value'] = self.date_value
+        if self.enum_value: body['enum_value'] = self.enum_value
+        if self.name is not None: body['name'] = self.name
+        if self.numeric_value: body['numeric_value'] = self.numeric_value
+        if self.query_backed_value: body['query_backed_value'] = self.query_backed_value
+        if self.text_value: body['text_value'] = self.text_value
+        if self.title is not None: body['title'] = self.title
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryParameter:
         """Deserializes the QueryParameter from a dictionary."""
@@ -3791,6 +4637,19 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryPostContent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.parent is not None: body['parent'] = self.parent
+        if self.query is not None: body['query'] = self.query
+        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryPostContent:
         """Deserializes the QueryPostContent from a dictionary."""
@@ -3858,6 +4717,13 @@ def as_dict(self) -> dict:
             body['configuration_pairs'] = [v.as_dict() for v in self.configuration_pairs]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepeatedEndpointConfPairs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.config_pair: body['config_pair'] = self.config_pair
+        if self.configuration_pairs: body['configuration_pairs'] = self.configuration_pairs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepeatedEndpointConfPairs:
         """Deserializes the RepeatedEndpointConfPairs from a dictionary."""
@@ -3873,6 +4739,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestoreResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestoreResponse:
         """Deserializes the RestoreResponse from a dictionary."""
@@ -3924,6 +4795,20 @@ def as_dict(self) -> dict:
         if self.row_offset is not None: body['row_offset'] = self.row_offset
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResultData into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.byte_count is not None: body['byte_count'] = self.byte_count
+        if self.chunk_index is not None: body['chunk_index'] = self.chunk_index
+        if self.data_array: body['data_array'] = self.data_array
+        if self.external_links: body['external_links'] = self.external_links
+        if self.next_chunk_index is not None: body['next_chunk_index'] = self.next_chunk_index
+        if self.next_chunk_internal_link is not None:
+            body['next_chunk_internal_link'] = self.next_chunk_internal_link
+        if self.row_count is not None: body['row_count'] = self.row_count
+        if self.row_offset is not None: body['row_offset'] = self.row_offset
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResultData:
         """Deserializes the ResultData from a dictionary."""
@@ -3974,6 +4859,18 @@ def as_dict(self) -> dict:
         if self.truncated is not None: body['truncated'] = self.truncated
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResultManifest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.chunks: body['chunks'] = self.chunks
+        if self.format is not None: body['format'] = self.format
+        if self.schema: body['schema'] = self.schema
+        if self.total_byte_count is not None: body['total_byte_count'] = self.total_byte_count
+        if self.total_chunk_count is not None: body['total_chunk_count'] = self.total_chunk_count
+        if self.total_row_count is not None: body['total_row_count'] = self.total_row_count
+        if self.truncated is not None: body['truncated'] = self.truncated
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResultManifest:
         """Deserializes the ResultManifest from a dictionary."""
@@ -4001,6 +4898,13 @@ def as_dict(self) -> dict:
         if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResultSchema into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.column_count is not None: body['column_count'] = self.column_count
+        if self.columns: body['columns'] = self.columns
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResultSchema:
         """Deserializes the ResultSchema from a dictionary."""
@@ -4035,6 +4939,13 @@ def as_dict(self) -> dict:
         if self.message is not None: body['message'] = self.message
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServiceError into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.error_code is not None: body['error_code'] = self.error_code
+        if self.message is not None: body['message'] = self.message
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServiceError:
         """Deserializes the ServiceError from a dictionary."""
@@ -4078,6 +4989,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetResponse:
         """Deserializes the SetResponse from a dictionary."""
@@ -4138,6 +5057,22 @@ def as_dict(self) -> dict:
             body['sql_configuration_parameters'] = self.sql_configuration_parameters.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetWorkspaceWarehouseConfigRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.channel: body['channel'] = self.channel
+        if self.config_param: body['config_param'] = self.config_param
+        if self.data_access_config: body['data_access_config'] = self.data_access_config
+        if self.enabled_warehouse_types: body['enabled_warehouse_types'] = self.enabled_warehouse_types
+        if self.global_param: body['global_param'] = self.global_param
+        if self.google_service_account is not None:
+            body['google_service_account'] = self.google_service_account
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.security_policy is not None: body['security_policy'] = self.security_policy
+        if self.sql_configuration_parameters:
+            body['sql_configuration_parameters'] = self.sql_configuration_parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetWorkspaceWarehouseConfigRequest:
         """Deserializes the SetWorkspaceWarehouseConfigRequest from a dictionary."""
@@ -4170,6 +5105,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetWorkspaceWarehouseConfigResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetWorkspaceWarehouseConfigResponse:
         """Deserializes the SetWorkspaceWarehouseConfigResponse from a dictionary."""
@@ -4192,6 +5132,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StartWarehouseResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StartWarehouseResponse:
         """Deserializes the StartWarehouseResponse from a dictionary."""
@@ -4233,6 +5178,14 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StatementParameterListItem into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.type is not None: body['type'] = self.type
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StatementParameterListItem:
         """Deserializes the StatementParameterListItem from a dictionary."""
@@ -4262,6 +5215,15 @@ def as_dict(self) -> dict:
         if self.status: body['status'] = self.status.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StatementResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.manifest: body['manifest'] = self.manifest
+        if self.result: body['result'] = self.result
+        if self.statement_id is not None: body['statement_id'] = self.statement_id
+        if self.status: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StatementResponse:
         """Deserializes the StatementResponse from a dictionary."""
@@ -4306,6 +5268,13 @@ def as_dict(self) -> dict:
         if self.state is not None: body['state'] = self.state.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StatementStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.error: body['error'] = self.error
+        if self.state is not None: body['state'] = self.state
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StatementStatus:
         """Deserializes the StatementStatus from a dictionary."""
@@ -4329,6 +5298,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StopWarehouseResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StopWarehouseResponse:
         """Deserializes the StopWarehouseResponse from a dictionary."""
@@ -4345,6 +5319,12 @@ def as_dict(self) -> dict:
         if self.message is not None: body['message'] = self.message.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Success into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Success:
         """Deserializes the Success from a dictionary."""
@@ -4375,6 +5355,14 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TerminationReason into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.code is not None: body['code'] = self.code
+        if self.parameters: body['parameters'] = self.parameters
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TerminationReason:
         """Deserializes the TerminationReason from a dictionary."""
@@ -4486,6 +5474,12 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TextValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TextValue:
         """Deserializes the TextValue from a dictionary."""
@@ -4507,6 +5501,13 @@ def as_dict(self) -> dict:
         if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TimeRange into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.end_time_ms is not None: body['end_time_ms'] = self.end_time_ms
+        if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TimeRange:
         """Deserializes the TimeRange from a dictionary."""
@@ -4524,6 +5525,12 @@ def as_dict(self) -> dict:
         if self.new_owner is not None: body['new_owner'] = self.new_owner
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TransferOwnershipObjectId into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.new_owner is not None: body['new_owner'] = self.new_owner
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TransferOwnershipObjectId:
         """Deserializes the TransferOwnershipObjectId from a dictionary."""
@@ -4549,6 +5556,14 @@ def as_dict(self) -> dict:
         if self.update_mask is not None: body['update_mask'] = self.update_mask
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateAlertRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alert: body['alert'] = self.alert
+        if self.id is not None: body['id'] = self.id
+        if self.update_mask is not None: body['update_mask'] = self.update_mask
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequest:
         """Deserializes the UpdateAlertRequest from a dictionary."""
@@ -4602,6 +5617,19 @@ def as_dict(self) -> dict:
         if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateAlertRequestAlert into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.condition: body['condition'] = self.condition
+        if self.custom_body is not None: body['custom_body'] = self.custom_body
+        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequestAlert:
         """Deserializes the UpdateAlertRequestAlert from a dictionary."""
@@ -4634,6 +5662,14 @@ def as_dict(self) -> dict:
         if self.update_mask is not None: body['update_mask'] = self.update_mask
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateQueryRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.query: body['query'] = self.query
+        if self.update_mask is not None: body['update_mask'] = self.update_mask
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateQueryRequest:
         """Deserializes the UpdateQueryRequest from a dictionary."""
@@ -4692,6 +5728,22 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateQueryRequestQuery into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.description is not None: body['description'] = self.description
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.parameters: body['parameters'] = self.parameters
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode
+        if self.schema is not None: body['schema'] = self.schema
+        if self.tags: body['tags'] = self.tags
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateQueryRequestQuery:
         """Deserializes the UpdateQueryRequestQuery from a dictionary."""
@@ -4716,6 +5768,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
         """Deserializes the UpdateResponse from a dictionary."""
@@ -4741,6 +5798,14 @@ def as_dict(self) -> dict:
         if self.visualization: body['visualization'] = self.visualization.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateVisualizationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.update_mask is not None: body['update_mask'] = self.update_mask
+        if self.visualization: body['visualization'] = self.visualization
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateVisualizationRequest:
         """Deserializes the UpdateVisualizationRequest from a dictionary."""
@@ -4774,6 +5839,15 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateVisualizationRequestVisualization into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
+        if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateVisualizationRequestVisualization:
         """Deserializes the UpdateVisualizationRequestVisualization from a dictionary."""
@@ -4799,6 +5873,14 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the User into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.email is not None: body['email'] = self.email
+        if self.id is not None: body['id'] = self.id
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> User:
         """Deserializes the User from a dictionary."""
@@ -4846,6 +5928,19 @@ def as_dict(self) -> dict:
         if self.update_time is not None: body['update_time'] = self.update_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Visualization into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
+        if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
+        if self.type is not None: body['type'] = self.type
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Visualization:
         """Deserializes the Visualization from a dictionary."""
@@ -4883,6 +5978,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WarehouseAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehouseAccessControlRequest:
         """Deserializes the WarehouseAccessControlRequest from a dictionary."""
@@ -4920,6 +6025,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WarehouseAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehouseAccessControlResponse:
         """Deserializes the WarehouseAccessControlResponse from a dictionary."""
@@ -4947,6 +6063,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WarehousePermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehousePermission:
         """Deserializes the WarehousePermission from a dictionary."""
@@ -4981,6 +6105,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WarehousePermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehousePermissions:
         """Deserializes the WarehousePermissions from a dictionary."""
@@ -5004,6 +6136,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WarehousePermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehousePermissionsDescription:
         """Deserializes the WarehousePermissionsDescription from a dictionary."""
@@ -5026,6 +6165,13 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WarehousePermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehousePermissionsRequest:
         """Deserializes the WarehousePermissionsRequest from a dictionary."""
@@ -5050,6 +6196,13 @@ def as_dict(self) -> dict:
         if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WarehouseTypePair into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehouseTypePair:
         """Deserializes the WarehouseTypePair from a dictionary."""
@@ -5090,6 +6243,15 @@ def as_dict(self) -> dict:
         if self.width is not None: body['width'] = self.width
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Widget into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.options: body['options'] = self.options
+        if self.visualization: body['visualization'] = self.visualization
+        if self.width is not None: body['width'] = self.width
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Widget:
         """Deserializes the Widget from a dictionary."""
@@ -5136,6 +6298,18 @@ def as_dict(self) -> dict:
         if self.updated_at is not None: body['updated_at'] = self.updated_at
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WidgetOptions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.description is not None: body['description'] = self.description
+        if self.is_hidden is not None: body['isHidden'] = self.is_hidden
+        if self.parameter_mappings: body['parameterMappings'] = self.parameter_mappings
+        if self.position: body['position'] = self.position
+        if self.title is not None: body['title'] = self.title
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WidgetOptions:
         """Deserializes the WidgetOptions from a dictionary."""
@@ -5178,6 +6352,16 @@ def as_dict(self) -> dict:
         if self.size_y is not None: body['sizeY'] = self.size_y
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WidgetPosition into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.auto_height is not None: body['autoHeight'] = self.auto_height
+        if self.col is not None: body['col'] = self.col
+        if self.row is not None: body['row'] = self.row
+        if self.size_x is not None: body['sizeX'] = self.size_x
+        if self.size_y is not None: body['sizeY'] = self.size_y
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WidgetPosition:
         """Deserializes the WidgetPosition from a dictionary."""
diff --git a/databricks/sdk/service/vectorsearch.py b/databricks/sdk/service/vectorsearch.py
index d6c28b840..f1e6aeaa3 100755
--- a/databricks/sdk/service/vectorsearch.py
+++ b/databricks/sdk/service/vectorsearch.py
@@ -29,6 +29,12 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ColumnInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ColumnInfo:
         """Deserializes the ColumnInfo from a dictionary."""
@@ -50,6 +56,13 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateEndpoint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.endpoint_type is not None: body['endpoint_type'] = self.endpoint_type
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateEndpoint:
         """Deserializes the CreateEndpoint from a dictionary."""
@@ -93,6 +106,17 @@ def as_dict(self) -> dict:
         if self.primary_key is not None: body['primary_key'] = self.primary_key
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateVectorIndexRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.delta_sync_index_spec: body['delta_sync_index_spec'] = self.delta_sync_index_spec
+        if self.direct_access_index_spec: body['direct_access_index_spec'] = self.direct_access_index_spec
+        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
+        if self.index_type is not None: body['index_type'] = self.index_type
+        if self.name is not None: body['name'] = self.name
+        if self.primary_key is not None: body['primary_key'] = self.primary_key
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateVectorIndexRequest:
         """Deserializes the CreateVectorIndexRequest from a dictionary."""
@@ -116,6 +140,12 @@ def as_dict(self) -> dict:
         if self.vector_index: body['vector_index'] = self.vector_index.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateVectorIndexResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.vector_index: body['vector_index'] = self.vector_index
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateVectorIndexResponse:
         """Deserializes the CreateVectorIndexResponse from a dictionary."""
@@ -139,6 +169,13 @@ def as_dict(self) -> dict:
         if self.success_row_count is not None: body['success_row_count'] = self.success_row_count
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteDataResult into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.failed_primary_keys: body['failed_primary_keys'] = self.failed_primary_keys
+        if self.success_row_count is not None: body['success_row_count'] = self.success_row_count
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDataResult:
         """Deserializes the DeleteDataResult from a dictionary."""
@@ -171,6 +208,13 @@ def as_dict(self) -> dict:
         if self.primary_keys: body['primary_keys'] = [v for v in self.primary_keys]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteDataVectorIndexRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.index_name is not None: body['index_name'] = self.index_name
+        if self.primary_keys: body['primary_keys'] = self.primary_keys
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDataVectorIndexRequest:
         """Deserializes the DeleteDataVectorIndexRequest from a dictionary."""
@@ -194,6 +238,13 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteDataVectorIndexResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.result: body['result'] = self.result
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDataVectorIndexResponse:
         """Deserializes the DeleteDataVectorIndexResponse from a dictionary."""
@@ -209,6 +260,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteEndpointResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteEndpointResponse:
         """Deserializes the DeleteEndpointResponse from a dictionary."""
@@ -223,6 +279,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteIndexResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteIndexResponse:
         """Deserializes the DeleteIndexResponse from a dictionary."""
@@ -272,6 +333,18 @@ def as_dict(self) -> dict:
         if self.source_table is not None: body['source_table'] = self.source_table
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeltaSyncVectorIndexSpecRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.columns_to_sync: body['columns_to_sync'] = self.columns_to_sync
+        if self.embedding_source_columns: body['embedding_source_columns'] = self.embedding_source_columns
+        if self.embedding_vector_columns: body['embedding_vector_columns'] = self.embedding_vector_columns
+        if self.embedding_writeback_table is not None:
+            body['embedding_writeback_table'] = self.embedding_writeback_table
+        if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type
+        if self.source_table is not None: body['source_table'] = self.source_table
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeltaSyncVectorIndexSpecRequest:
         """Deserializes the DeltaSyncVectorIndexSpecRequest from a dictionary."""
@@ -325,6 +398,18 @@ def as_dict(self) -> dict:
         if self.source_table is not None: body['source_table'] = self.source_table
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeltaSyncVectorIndexSpecResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.embedding_source_columns: body['embedding_source_columns'] = self.embedding_source_columns
+        if self.embedding_vector_columns: body['embedding_vector_columns'] = self.embedding_vector_columns
+        if self.embedding_writeback_table is not None:
+            body['embedding_writeback_table'] = self.embedding_writeback_table
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type
+        if self.source_table is not None: body['source_table'] = self.source_table
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeltaSyncVectorIndexSpecResponse:
         """Deserializes the DeltaSyncVectorIndexSpecResponse from a dictionary."""
@@ -363,6 +448,14 @@ def as_dict(self) -> dict:
         if self.schema_json is not None: body['schema_json'] = self.schema_json
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DirectAccessVectorIndexSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.embedding_source_columns: body['embedding_source_columns'] = self.embedding_source_columns
+        if self.embedding_vector_columns: body['embedding_vector_columns'] = self.embedding_vector_columns
+        if self.schema_json is not None: body['schema_json'] = self.schema_json
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DirectAccessVectorIndexSpec:
         """Deserializes the DirectAccessVectorIndexSpec from a dictionary."""
@@ -389,6 +482,14 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EmbeddingSourceColumn into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.embedding_model_endpoint_name is not None:
+            body['embedding_model_endpoint_name'] = self.embedding_model_endpoint_name
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EmbeddingSourceColumn:
         """Deserializes the EmbeddingSourceColumn from a dictionary."""
@@ -411,6 +512,13 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EmbeddingVectorColumn into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.embedding_dimension is not None: body['embedding_dimension'] = self.embedding_dimension
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EmbeddingVectorColumn:
         """Deserializes the EmbeddingVectorColumn from a dictionary."""
@@ -461,6 +569,21 @@ def as_dict(self) -> dict:
         if self.num_indexes is not None: body['num_indexes'] = self.num_indexes
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.creator is not None: body['creator'] = self.creator
+        if self.endpoint_status: body['endpoint_status'] = self.endpoint_status
+        if self.endpoint_type is not None: body['endpoint_type'] = self.endpoint_type
+        if self.id is not None: body['id'] = self.id
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.last_updated_user is not None: body['last_updated_user'] = self.last_updated_user
+        if self.name is not None: body['name'] = self.name
+        if self.num_indexes is not None: body['num_indexes'] = self.num_indexes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointInfo:
         """Deserializes the EndpointInfo from a dictionary."""
@@ -492,6 +615,13 @@ def as_dict(self) -> dict:
         if self.state is not None: body['state'] = self.state.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        if self.state is not None: body['state'] = self.state
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointStatus:
         """Deserializes the EndpointStatus from a dictionary."""
@@ -528,6 +658,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListEndpointResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.endpoints: body['endpoints'] = self.endpoints
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListEndpointResponse:
         """Deserializes the ListEndpointResponse from a dictionary."""
@@ -545,6 +682,12 @@ def as_dict(self) -> dict:
         if self.values: body['values'] = [v.as_dict() for v in self.values]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.values: body['values'] = self.values
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListValue:
         """Deserializes the ListValue from a dictionary."""
@@ -566,6 +709,13 @@ def as_dict(self) -> dict:
         if self.vector_indexes: body['vector_indexes'] = [v.as_dict() for v in self.vector_indexes]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListVectorIndexesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.vector_indexes: body['vector_indexes'] = self.vector_indexes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListVectorIndexesResponse:
         """Deserializes the ListVectorIndexesResponse from a dictionary."""
@@ -590,6 +740,13 @@ def as_dict(self) -> dict:
         if self.value: body['value'] = self.value.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MapStringValueEntry into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MapStringValueEntry:
         """Deserializes the MapStringValueEntry from a dictionary."""
@@ -628,6 +785,16 @@ def as_dict(self) -> dict:
         if self.primary_key is not None: body['primary_key'] = self.primary_key
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MiniVectorIndex into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creator is not None: body['creator'] = self.creator
+        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
+        if self.index_type is not None: body['index_type'] = self.index_type
+        if self.name is not None: body['name'] = self.name
+        if self.primary_key is not None: body['primary_key'] = self.primary_key
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MiniVectorIndex:
         """Deserializes the MiniVectorIndex from a dictionary."""
@@ -672,6 +839,14 @@ def as_dict(self) -> dict:
         if self.page_token is not None: body['page_token'] = self.page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryVectorIndexNextPageRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
+        if self.index_name is not None: body['index_name'] = self.index_name
+        if self.page_token is not None: body['page_token'] = self.page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryVectorIndexNextPageRequest:
         """Deserializes the QueryVectorIndexNextPageRequest from a dictionary."""
@@ -724,6 +899,19 @@ def as_dict(self) -> dict:
         if self.score_threshold is not None: body['score_threshold'] = self.score_threshold
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryVectorIndexRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.columns: body['columns'] = self.columns
+        if self.filters_json is not None: body['filters_json'] = self.filters_json
+        if self.index_name is not None: body['index_name'] = self.index_name
+        if self.num_results is not None: body['num_results'] = self.num_results
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.query_type is not None: body['query_type'] = self.query_type
+        if self.query_vector: body['query_vector'] = self.query_vector
+        if self.score_threshold is not None: body['score_threshold'] = self.score_threshold
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryVectorIndexRequest:
         """Deserializes the QueryVectorIndexRequest from a dictionary."""
@@ -758,6 +946,14 @@ def as_dict(self) -> dict:
         if self.result: body['result'] = self.result.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryVectorIndexResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.manifest: body['manifest'] = self.manifest
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.result: body['result'] = self.result
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryVectorIndexResponse:
         """Deserializes the QueryVectorIndexResponse from a dictionary."""
@@ -783,6 +979,13 @@ def as_dict(self) -> dict:
         if self.row_count is not None: body['row_count'] = self.row_count
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResultData into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data_array: body['data_array'] = self.data_array
+        if self.row_count is not None: body['row_count'] = self.row_count
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResultData:
         """Deserializes the ResultData from a dictionary."""
@@ -806,6 +1009,13 @@ def as_dict(self) -> dict:
         if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResultManifest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.column_count is not None: body['column_count'] = self.column_count
+        if self.columns: body['columns'] = self.columns
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResultManifest:
         """Deserializes the ResultManifest from a dictionary."""
@@ -833,6 +1043,14 @@ def as_dict(self) -> dict:
         if self.num_results is not None: body['num_results'] = self.num_results
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ScanVectorIndexRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.index_name is not None: body['index_name'] = self.index_name
+        if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key
+        if self.num_results is not None: body['num_results'] = self.num_results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ScanVectorIndexRequest:
         """Deserializes the ScanVectorIndexRequest from a dictionary."""
@@ -858,6 +1076,13 @@ def as_dict(self) -> dict:
         if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ScanVectorIndexResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data: body['data'] = self.data
+        if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ScanVectorIndexResponse:
         """Deserializes the ScanVectorIndexResponse from a dictionary."""
@@ -875,6 +1100,12 @@ def as_dict(self) -> dict:
         if self.fields: body['fields'] = [v.as_dict() for v in self.fields]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Struct into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.fields: body['fields'] = self.fields
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Struct:
         """Deserializes the Struct from a dictionary."""
@@ -889,6 +1120,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SyncIndexResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SyncIndexResponse:
         """Deserializes the SyncIndexResponse from a dictionary."""
@@ -912,6 +1148,13 @@ def as_dict(self) -> dict:
         if self.success_row_count is not None: body['success_row_count'] = self.success_row_count
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpsertDataResult into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.failed_primary_keys: body['failed_primary_keys'] = self.failed_primary_keys
+        if self.success_row_count is not None: body['success_row_count'] = self.success_row_count
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpsertDataResult:
         """Deserializes the UpsertDataResult from a dictionary."""
@@ -944,6 +1187,13 @@ def as_dict(self) -> dict:
         if self.inputs_json is not None: body['inputs_json'] = self.inputs_json
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpsertDataVectorIndexRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.index_name is not None: body['index_name'] = self.index_name
+        if self.inputs_json is not None: body['inputs_json'] = self.inputs_json
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpsertDataVectorIndexRequest:
         """Deserializes the UpsertDataVectorIndexRequest from a dictionary."""
@@ -967,6 +1217,13 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpsertDataVectorIndexResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.result: body['result'] = self.result
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpsertDataVectorIndexResponse:
         """Deserializes the UpsertDataVectorIndexResponse from a dictionary."""
@@ -999,6 +1256,17 @@ def as_dict(self) -> dict:
         if self.struct_value: body['struct_value'] = self.struct_value.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Value into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.bool_value is not None: body['bool_value'] = self.bool_value
+        if self.list_value: body['list_value'] = self.list_value
+        if self.null_value is not None: body['null_value'] = self.null_value
+        if self.number_value is not None: body['number_value'] = self.number_value
+        if self.string_value is not None: body['string_value'] = self.string_value
+        if self.struct_value: body['struct_value'] = self.struct_value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Value:
         """Deserializes the Value from a dictionary."""
@@ -1052,6 +1320,19 @@ def as_dict(self) -> dict:
         if self.status: body['status'] = self.status.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the VectorIndex into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creator is not None: body['creator'] = self.creator
+        if self.delta_sync_index_spec: body['delta_sync_index_spec'] = self.delta_sync_index_spec
+        if self.direct_access_index_spec: body['direct_access_index_spec'] = self.direct_access_index_spec
+        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
+        if self.index_type is not None: body['index_type'] = self.index_type
+        if self.name is not None: body['name'] = self.name
+        if self.primary_key is not None: body['primary_key'] = self.primary_key
+        if self.status: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> VectorIndex:
         """Deserializes the VectorIndex from a dictionary."""
@@ -1090,6 +1371,15 @@ def as_dict(self) -> dict:
         if self.ready is not None: body['ready'] = self.ready
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the VectorIndexStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.index_url is not None: body['index_url'] = self.index_url
+        if self.indexed_row_count is not None: body['indexed_row_count'] = self.indexed_row_count
+        if self.message is not None: body['message'] = self.message
+        if self.ready is not None: body['ready'] = self.ready
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> VectorIndexStatus:
         """Deserializes the VectorIndexStatus from a dictionary."""
diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py
index 29380d4f3..eb5418987 100755
--- a/databricks/sdk/service/workspace.py
+++ b/databricks/sdk/service/workspace.py
@@ -29,6 +29,13 @@ def as_dict(self) -> dict:
         if self.principal is not None: body['principal'] = self.principal
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AclItem into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission is not None: body['permission'] = self.permission
+        if self.principal is not None: body['principal'] = self.principal
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AclItem:
         """Deserializes the AclItem from a dictionary."""
@@ -57,6 +64,13 @@ def as_dict(self) -> dict:
         if self.resource_id is not None: body['resource_id'] = self.resource_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AzureKeyVaultSecretScopeMetadata into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dns_name is not None: body['dns_name'] = self.dns_name
+        if self.resource_id is not None: body['resource_id'] = self.resource_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureKeyVaultSecretScopeMetadata:
         """Deserializes the AzureKeyVaultSecretScopeMetadata from a dictionary."""
@@ -91,6 +105,14 @@ def as_dict(self) -> dict:
         if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCredentialsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.git_provider is not None: body['git_provider'] = self.git_provider
+        if self.git_username is not None: body['git_username'] = self.git_username
+        if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCredentialsRequest:
         """Deserializes the CreateCredentialsRequest from a dictionary."""
@@ -119,6 +141,14 @@ def as_dict(self) -> dict:
         if self.git_username is not None: body['git_username'] = self.git_username
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCredentialsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.git_provider is not None: body['git_provider'] = self.git_provider
+        if self.git_username is not None: body['git_username'] = self.git_username
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCredentialsResponse:
         """Deserializes the CreateCredentialsResponse from a dictionary."""
@@ -154,6 +184,15 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateRepoRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.path is not None: body['path'] = self.path
+        if self.provider is not None: body['provider'] = self.provider
+        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRepoRequest:
         """Deserializes the CreateRepoRequest from a dictionary."""
@@ -198,6 +237,18 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateRepoResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.branch is not None: body['branch'] = self.branch
+        if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id
+        if self.id is not None: body['id'] = self.id
+        if self.path is not None: body['path'] = self.path
+        if self.provider is not None: body['provider'] = self.provider
+        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRepoResponse:
         """Deserializes the CreateRepoResponse from a dictionary."""
@@ -234,6 +285,16 @@ def as_dict(self) -> dict:
         if self.scope_backend_type is not None: body['scope_backend_type'] = self.scope_backend_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateScope into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.backend_azure_keyvault: body['backend_azure_keyvault'] = self.backend_azure_keyvault
+        if self.initial_manage_principal is not None:
+            body['initial_manage_principal'] = self.initial_manage_principal
+        if self.scope is not None: body['scope'] = self.scope
+        if self.scope_backend_type is not None: body['scope_backend_type'] = self.scope_backend_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateScope:
         """Deserializes the CreateScope from a dictionary."""
@@ -252,6 +313,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateScopeResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateScopeResponse:
         """Deserializes the CreateScopeResponse from a dictionary."""
@@ -278,6 +344,14 @@ def as_dict(self) -> dict:
         if self.git_username is not None: body['git_username'] = self.git_username
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CredentialInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.git_provider is not None: body['git_provider'] = self.git_provider
+        if self.git_username is not None: body['git_username'] = self.git_username
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CredentialInfo:
         """Deserializes the CredentialInfo from a dictionary."""
@@ -303,6 +377,13 @@ def as_dict(self) -> dict:
         if self.recursive is not None: body['recursive'] = self.recursive
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Delete into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.path is not None: body['path'] = self.path
+        if self.recursive is not None: body['recursive'] = self.recursive
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Delete:
         """Deserializes the Delete from a dictionary."""
@@ -324,6 +405,13 @@ def as_dict(self) -> dict:
         if self.scope is not None: body['scope'] = self.scope
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteAcl into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.principal is not None: body['principal'] = self.principal
+        if self.scope is not None: body['scope'] = self.scope
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteAcl:
         """Deserializes the DeleteAcl from a dictionary."""
@@ -338,6 +426,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteAclResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteAclResponse:
         """Deserializes the DeleteAclResponse from a dictionary."""
@@ -352,6 +445,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteCredentialsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteCredentialsResponse:
         """Deserializes the DeleteCredentialsResponse from a dictionary."""
@@ -366,6 +464,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteRepoResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRepoResponse:
         """Deserializes the DeleteRepoResponse from a dictionary."""
@@ -380,6 +483,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -397,6 +505,12 @@ def as_dict(self) -> dict:
         if self.scope is not None: body['scope'] = self.scope
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteScope into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.scope is not None: body['scope'] = self.scope
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteScope:
         """Deserializes the DeleteScope from a dictionary."""
@@ -411,6 +525,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteScopeResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteScopeResponse:
         """Deserializes the DeleteScopeResponse from a dictionary."""
@@ -432,6 +551,13 @@ def as_dict(self) -> dict:
         if self.scope is not None: body['scope'] = self.scope
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteSecret into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.scope is not None: body['scope'] = self.scope
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteSecret:
         """Deserializes the DeleteSecret from a dictionary."""
@@ -446,6 +572,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteSecretResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteSecretResponse:
         """Deserializes the DeleteSecretResponse from a dictionary."""
@@ -478,6 +609,13 @@ def as_dict(self) -> dict:
         if self.file_type is not None: body['file_type'] = self.file_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExportResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.content is not None: body['content'] = self.content
+        if self.file_type is not None: body['file_type'] = self.file_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExportResponse:
         """Deserializes the ExportResponse from a dictionary."""
@@ -504,6 +642,14 @@ def as_dict(self) -> dict:
         if self.git_username is not None: body['git_username'] = self.git_username
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetCredentialsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.git_provider is not None: body['git_provider'] = self.git_provider
+        if self.git_username is not None: body['git_username'] = self.git_username
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetCredentialsResponse:
         """Deserializes the GetCredentialsResponse from a dictionary."""
@@ -523,6 +669,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetRepoPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetRepoPermissionLevelsResponse:
         """Deserializes the GetRepoPermissionLevelsResponse from a dictionary."""
@@ -564,6 +716,18 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetRepoResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.branch is not None: body['branch'] = self.branch
+        if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id
+        if self.id is not None: body['id'] = self.id
+        if self.path is not None: body['path'] = self.path
+        if self.provider is not None: body['provider'] = self.provider
+        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetRepoResponse:
         """Deserializes the GetRepoResponse from a dictionary."""
@@ -591,6 +755,13 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetSecretResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetSecretResponse:
         """Deserializes the GetSecretResponse from a dictionary."""
@@ -608,6 +779,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetWorkspaceObjectPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetWorkspaceObjectPermissionLevelsResponse:
         """Deserializes the GetWorkspaceObjectPermissionLevelsResponse from a dictionary."""
@@ -657,6 +834,16 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Import into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.content is not None: body['content'] = self.content
+        if self.format is not None: body['format'] = self.format
+        if self.language is not None: body['language'] = self.language
+        if self.overwrite is not None: body['overwrite'] = self.overwrite
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Import:
         """Deserializes the Import from a dictionary."""
@@ -697,6 +884,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ImportResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ImportResponse:
         """Deserializes the ImportResponse from a dictionary."""
@@ -723,6 +915,12 @@ def as_dict(self) -> dict:
         if self.items: body['items'] = [v.as_dict() for v in self.items]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAclsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.items: body['items'] = self.items
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAclsResponse:
         """Deserializes the ListAclsResponse from a dictionary."""
@@ -740,6 +938,12 @@ def as_dict(self) -> dict:
         if self.credentials: body['credentials'] = [v.as_dict() for v in self.credentials]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListCredentialsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credentials: body['credentials'] = self.credentials
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListCredentialsResponse:
         """Deserializes the ListCredentialsResponse from a dictionary."""
@@ -762,6 +966,13 @@ def as_dict(self) -> dict:
         if self.repos: body['repos'] = [v.as_dict() for v in self.repos]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListReposResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.repos: body['repos'] = self.repos
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListReposResponse:
         """Deserializes the ListReposResponse from a dictionary."""
@@ -779,6 +990,12 @@ def as_dict(self) -> dict:
         if self.objects: body['objects'] = [v.as_dict() for v in self.objects]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.objects: body['objects'] = self.objects
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListResponse:
         """Deserializes the ListResponse from a dictionary."""
@@ -796,6 +1013,12 @@ def as_dict(self) -> dict:
         if self.scopes: body['scopes'] = [v.as_dict() for v in self.scopes]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListScopesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.scopes: body['scopes'] = self.scopes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListScopesResponse:
         """Deserializes the ListScopesResponse from a dictionary."""
@@ -813,6 +1036,12 @@ def as_dict(self) -> dict:
         if self.secrets: body['secrets'] = [v.as_dict() for v in self.secrets]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListSecretsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.secrets: body['secrets'] = self.secrets
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSecretsResponse:
         """Deserializes the ListSecretsResponse from a dictionary."""
@@ -831,6 +1060,12 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Mkdirs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Mkdirs:
         """Deserializes the Mkdirs from a dictionary."""
@@ -845,6 +1080,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MkdirsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MkdirsResponse:
         """Deserializes the MkdirsResponse from a dictionary."""
@@ -894,6 +1134,19 @@ def as_dict(self) -> dict:
         if self.size is not None: body['size'] = self.size
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ObjectInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.language is not None: body['language'] = self.language
+        if self.modified_at is not None: body['modified_at'] = self.modified_at
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        if self.path is not None: body['path'] = self.path
+        if self.resource_id is not None: body['resource_id'] = self.resource_id
+        if self.size is not None: body['size'] = self.size
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ObjectInfo:
         """Deserializes the ObjectInfo from a dictionary."""
@@ -941,6 +1194,14 @@ def as_dict(self) -> dict:
         if self.scope is not None: body['scope'] = self.scope
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PutAcl into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission is not None: body['permission'] = self.permission
+        if self.principal is not None: body['principal'] = self.principal
+        if self.scope is not None: body['scope'] = self.scope
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutAcl:
         """Deserializes the PutAcl from a dictionary."""
@@ -957,6 +1218,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PutAclResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutAclResponse:
         """Deserializes the PutAclResponse from a dictionary."""
@@ -986,6 +1252,15 @@ def as_dict(self) -> dict:
         if self.string_value is not None: body['string_value'] = self.string_value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PutSecret into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.bytes_value is not None: body['bytes_value'] = self.bytes_value
+        if self.key is not None: body['key'] = self.key
+        if self.scope is not None: body['scope'] = self.scope
+        if self.string_value is not None: body['string_value'] = self.string_value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutSecret:
         """Deserializes the PutSecret from a dictionary."""
@@ -1003,6 +1278,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PutSecretResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutSecretResponse:
         """Deserializes the PutSecretResponse from a dictionary."""
@@ -1033,6 +1313,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepoAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoAccessControlRequest:
         """Deserializes the RepoAccessControlRequest from a dictionary."""
@@ -1070,6 +1360,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepoAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoAccessControlResponse:
         """Deserializes the RepoAccessControlResponse from a dictionary."""
@@ -1117,6 +1418,18 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepoInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.branch is not None: body['branch'] = self.branch
+        if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id
+        if self.id is not None: body['id'] = self.id
+        if self.path is not None: body['path'] = self.path
+        if self.provider is not None: body['provider'] = self.provider
+        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoInfo:
         """Deserializes the RepoInfo from a dictionary."""
@@ -1146,6 +1459,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepoPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoPermission:
         """Deserializes the RepoPermission from a dictionary."""
@@ -1180,6 +1501,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepoPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoPermissions:
         """Deserializes the RepoPermissions from a dictionary."""
@@ -1202,6 +1531,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepoPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoPermissionsDescription:
         """Deserializes the RepoPermissionsDescription from a dictionary."""
@@ -1224,6 +1560,13 @@ def as_dict(self) -> dict:
         if self.repo_id is not None: body['repo_id'] = self.repo_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepoPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.repo_id is not None: body['repo_id'] = self.repo_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoPermissionsRequest:
         """Deserializes the RepoPermissionsRequest from a dictionary."""
@@ -1253,6 +1596,14 @@ def as_dict(self) -> dict:
             body['last_updated_timestamp'] = self.last_updated_timestamp
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SecretMetadata into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SecretMetadata:
         """Deserializes the SecretMetadata from a dictionary."""
@@ -1278,6 +1629,14 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SecretScope into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.backend_type is not None: body['backend_type'] = self.backend_type
+        if self.keyvault_metadata: body['keyvault_metadata'] = self.keyvault_metadata
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SecretScope:
         """Deserializes the SecretScope from a dictionary."""
@@ -1301,6 +1660,12 @@ def as_dict(self) -> dict:
         if self.patterns: body['patterns'] = [v for v in self.patterns]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SparseCheckout into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.patterns: body['patterns'] = self.patterns
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparseCheckout:
         """Deserializes the SparseCheckout from a dictionary."""
@@ -1322,6 +1687,12 @@ def as_dict(self) -> dict:
         if self.patterns: body['patterns'] = [v for v in self.patterns]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SparseCheckoutUpdate into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.patterns: body['patterns'] = self.patterns
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparseCheckoutUpdate:
         """Deserializes the SparseCheckoutUpdate from a dictionary."""
@@ -1360,6 +1731,15 @@ def as_dict(self) -> dict:
         if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCredentialsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.git_provider is not None: body['git_provider'] = self.git_provider
+        if self.git_username is not None: body['git_username'] = self.git_username
+        if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCredentialsRequest:
         """Deserializes the UpdateCredentialsRequest from a dictionary."""
@@ -1377,6 +1757,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCredentialsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCredentialsResponse:
         """Deserializes the UpdateCredentialsResponse from a dictionary."""
@@ -1409,6 +1794,15 @@ def as_dict(self) -> dict:
         if self.tag is not None: body['tag'] = self.tag
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateRepoRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.branch is not None: body['branch'] = self.branch
+        if self.repo_id is not None: body['repo_id'] = self.repo_id
+        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout
+        if self.tag is not None: body['tag'] = self.tag
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRepoRequest:
         """Deserializes the UpdateRepoRequest from a dictionary."""
@@ -1426,6 +1820,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateRepoResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRepoResponse:
         """Deserializes the UpdateRepoResponse from a dictionary."""
@@ -1456,6 +1855,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspaceObjectAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectAccessControlRequest:
         """Deserializes the WorkspaceObjectAccessControlRequest from a dictionary."""
@@ -1493,6 +1902,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspaceObjectAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectAccessControlResponse:
         """Deserializes the WorkspaceObjectAccessControlResponse from a dictionary."""
@@ -1520,6 +1940,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspaceObjectPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectPermission:
         """Deserializes the WorkspaceObjectPermission from a dictionary."""
@@ -1554,6 +1982,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspaceObjectPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectPermissions:
         """Deserializes the WorkspaceObjectPermissions from a dictionary."""
@@ -1577,6 +2013,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspaceObjectPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectPermissionsDescription:
         """Deserializes the WorkspaceObjectPermissionsDescription from a dictionary."""
@@ -1603,6 +2046,14 @@ def as_dict(self) -> dict:
         if self.workspace_object_type is not None: body['workspace_object_type'] = self.workspace_object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspaceObjectPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.workspace_object_id is not None: body['workspace_object_id'] = self.workspace_object_id
+        if self.workspace_object_type is not None: body['workspace_object_type'] = self.workspace_object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectPermissionsRequest:
         """Deserializes the WorkspaceObjectPermissionsRequest from a dictionary."""
diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst
index 19b245b25..5b5fbb379 100644
--- a/docs/dbdataclasses/catalog.rst
+++ b/docs/dbdataclasses/catalog.rst
@@ -193,8 +193,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: ColumnTypeName
 
-   Name of type (INT, STRUCT, MAP, etc.).
-
    .. py:attribute:: ARRAY
       :value: "ARRAY"
 
@@ -549,6 +547,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: WORKDAY_RAAS_FORMAT
       :value: "WORKDAY_RAAS_FORMAT"
 
+.. autoclass:: DatabricksGcpServiceAccount
+   :members:
+   :undoc-members:
+
 .. autoclass:: DatabricksGcpServiceAccountRequest
    :members:
    :undoc-members:
@@ -718,11 +720,11 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: GcpServiceAccountKey
+.. autoclass:: GenerateTemporaryServiceCredentialAzureOptions
    :members:
    :undoc-members:
 
-.. autoclass:: GenerateTemporaryServiceCredentialAzureOptions
+.. autoclass:: GenerateTemporaryServiceCredentialGcpOptions
    :members:
    :undoc-members:
 
@@ -1122,6 +1124,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CREATE_FOREIGN_CATALOG
       :value: "CREATE_FOREIGN_CATALOG"
 
+   .. py:attribute:: CREATE_FOREIGN_SECURABLE
+      :value: "CREATE_FOREIGN_SECURABLE"
+
    .. py:attribute:: CREATE_FUNCTION
       :value: "CREATE_FUNCTION"
 
diff --git a/docs/dbdataclasses/cleanrooms.rst b/docs/dbdataclasses/cleanrooms.rst
new file mode 100644
index 000000000..762c454bf
--- /dev/null
+++ b/docs/dbdataclasses/cleanrooms.rst
@@ -0,0 +1,155 @@
+Clean Rooms
+===========
+
+These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.cleanrooms`` module.
+
+.. py:currentmodule:: databricks.sdk.service.cleanrooms
+.. autoclass:: CleanRoom
+   :members:
+   :undoc-members:
+
+.. py:class:: CleanRoomAccessRestricted
+
+   .. py:attribute:: CSP_MISMATCH
+      :value: "CSP_MISMATCH"
+
+   .. py:attribute:: NO_RESTRICTION
+      :value: "NO_RESTRICTION"
+
+.. autoclass:: CleanRoomAsset
+   :members:
+   :undoc-members:
+
+.. py:class:: CleanRoomAssetAssetType
+
+   .. py:attribute:: FOREIGN_TABLE
+      :value: "FOREIGN_TABLE"
+
+   .. py:attribute:: NOTEBOOK_FILE
+      :value: "NOTEBOOK_FILE"
+
+   .. py:attribute:: TABLE
+      :value: "TABLE"
+
+   .. py:attribute:: VIEW
+      :value: "VIEW"
+
+   .. py:attribute:: VOLUME
+      :value: "VOLUME"
+
+.. autoclass:: CleanRoomAssetForeignTable
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomAssetForeignTableLocalDetails
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomAssetNotebook
+   :members:
+   :undoc-members:
+
+.. py:class:: CleanRoomAssetStatusEnum
+
+   .. py:attribute:: ACTIVE
+      :value: "ACTIVE"
+
+   .. py:attribute:: PERMISSION_DENIED
+      :value: "PERMISSION_DENIED"
+
+.. autoclass:: CleanRoomAssetTable
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomAssetTableLocalDetails
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomAssetView
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomAssetViewLocalDetails
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomAssetVolumeLocalDetails
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomCollaborator
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomNotebookTaskRun
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomOutputCatalog
+   :members:
+   :undoc-members:
+
+.. py:class:: CleanRoomOutputCatalogOutputCatalogStatus
+
+   .. py:attribute:: CREATED
+      :value: "CREATED"
+
+   .. py:attribute:: NOT_CREATED
+      :value: "NOT_CREATED"
+
+   .. py:attribute:: NOT_ELIGIBLE
+      :value: "NOT_ELIGIBLE"
+
+.. autoclass:: CleanRoomRemoteDetail
+   :members:
+   :undoc-members:
+
+.. py:class:: CleanRoomStatusEnum
+
+   .. py:attribute:: ACTIVE
+      :value: "ACTIVE"
+
+   .. py:attribute:: DELETED
+      :value: "DELETED"
+
+   .. py:attribute:: FAILED
+      :value: "FAILED"
+
+   .. py:attribute:: PROVISIONING
+      :value: "PROVISIONING"
+
+.. autoclass:: CollaboratorJobRunInfo
+   :members:
+   :undoc-members:
+
+.. autoclass:: ComplianceSecurityProfile
+   :members:
+   :undoc-members:
+
+.. autoclass:: CreateCleanRoomOutputCatalogResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: DeleteCleanRoomAssetResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: DeleteResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: ListCleanRoomAssetsResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: ListCleanRoomNotebookTaskRunsResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: ListCleanRoomsResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: UpdateCleanRoomRequest
+   :members:
+   :undoc-members:
diff --git a/docs/dbdataclasses/dashboards.rst b/docs/dbdataclasses/dashboards.rst
index 3d07ed346..22a3ea95d 100644
--- a/docs/dbdataclasses/dashboards.rst
+++ b/docs/dbdataclasses/dashboards.rst
@@ -17,6 +17,59 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: DASHBOARD_VIEW_BASIC
       :value: "DASHBOARD_VIEW_BASIC"
 
+.. py:class:: DataType
+
+   .. py:attribute:: DATA_TYPE_ARRAY
+      :value: "DATA_TYPE_ARRAY"
+
+   .. py:attribute:: DATA_TYPE_BIG_INT
+      :value: "DATA_TYPE_BIG_INT"
+
+   .. py:attribute:: DATA_TYPE_BINARY
+      :value: "DATA_TYPE_BINARY"
+
+   .. py:attribute:: DATA_TYPE_BOOLEAN
+      :value: "DATA_TYPE_BOOLEAN"
+
+   .. py:attribute:: DATA_TYPE_DATE
+      :value: "DATA_TYPE_DATE"
+
+   .. py:attribute:: DATA_TYPE_DECIMAL
+      :value: "DATA_TYPE_DECIMAL"
+
+   .. py:attribute:: DATA_TYPE_DOUBLE
+      :value: "DATA_TYPE_DOUBLE"
+
+   .. py:attribute:: DATA_TYPE_FLOAT
+      :value: "DATA_TYPE_FLOAT"
+
+   .. py:attribute:: DATA_TYPE_INT
+      :value: "DATA_TYPE_INT"
+
+   .. py:attribute:: DATA_TYPE_INTERVAL
+      :value: "DATA_TYPE_INTERVAL"
+
+   .. py:attribute:: DATA_TYPE_MAP
+      :value: "DATA_TYPE_MAP"
+
+   .. py:attribute:: DATA_TYPE_SMALL_INT
+      :value: "DATA_TYPE_SMALL_INT"
+
+   .. py:attribute:: DATA_TYPE_STRING
+      :value: "DATA_TYPE_STRING"
+
+   .. py:attribute:: DATA_TYPE_STRUCT
+      :value: "DATA_TYPE_STRUCT"
+
+   .. py:attribute:: DATA_TYPE_TIMESTAMP
+      :value: "DATA_TYPE_TIMESTAMP"
+
+   .. py:attribute:: DATA_TYPE_TINY_INT
+      :value: "DATA_TYPE_TINY_INT"
+
+   .. py:attribute:: DATA_TYPE_VOID
+      :value: "DATA_TYPE_VOID"
+
 .. autoclass:: DeleteScheduleResponse
    :members:
    :undoc-members:
@@ -246,6 +299,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: QuerySchema
+   :members:
+   :undoc-members:
+
+.. autoclass:: QuerySchemaColumn
+   :members:
+   :undoc-members:
+
 .. autoclass:: Result
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/index.rst b/docs/dbdataclasses/index.rst
index 987bee7f5..3ecb9c13f 100644
--- a/docs/dbdataclasses/index.rst
+++ b/docs/dbdataclasses/index.rst
@@ -8,6 +8,7 @@ Dataclasses
    apps
    billing
    catalog
+   cleanrooms
    compute
    dashboards
    files
diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst
index 3aa0db043..374c48351 100644
--- a/docs/dbdataclasses/jobs.rst
+++ b/docs/dbdataclasses/jobs.rst
@@ -28,6 +28,81 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. py:class:: CleanRoomTaskRunLifeCycleState
+
+   Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to remove coupling with jobs API definition
+
+   .. py:attribute:: BLOCKED
+      :value: "BLOCKED"
+
+   .. py:attribute:: INTERNAL_ERROR
+      :value: "INTERNAL_ERROR"
+
+   .. py:attribute:: PENDING
+      :value: "PENDING"
+
+   .. py:attribute:: QUEUED
+      :value: "QUEUED"
+
+   .. py:attribute:: RUNNING
+      :value: "RUNNING"
+
+   .. py:attribute:: SKIPPED
+      :value: "SKIPPED"
+
+   .. py:attribute:: TERMINATED
+      :value: "TERMINATED"
+
+   .. py:attribute:: TERMINATING
+      :value: "TERMINATING"
+
+   .. py:attribute:: WAITING_FOR_RETRY
+      :value: "WAITING_FOR_RETRY"
+
+.. py:class:: CleanRoomTaskRunResultState
+
+   Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to avoid cyclic dependency.
+
+   .. py:attribute:: CANCELED
+      :value: "CANCELED"
+
+   .. py:attribute:: DISABLED
+      :value: "DISABLED"
+
+   .. py:attribute:: EVICTED
+      :value: "EVICTED"
+
+   .. py:attribute:: EXCLUDED
+      :value: "EXCLUDED"
+
+   .. py:attribute:: FAILED
+      :value: "FAILED"
+
+   .. py:attribute:: MAXIMUM_CONCURRENT_RUNS_REACHED
+      :value: "MAXIMUM_CONCURRENT_RUNS_REACHED"
+
+   .. py:attribute:: SUCCESS
+      :value: "SUCCESS"
+
+   .. py:attribute:: SUCCESS_WITH_FAILURES
+      :value: "SUCCESS_WITH_FAILURES"
+
+   .. py:attribute:: TIMEDOUT
+      :value: "TIMEDOUT"
+
+   .. py:attribute:: UPSTREAM_CANCELED
+      :value: "UPSTREAM_CANCELED"
+
+   .. py:attribute:: UPSTREAM_EVICTED
+      :value: "UPSTREAM_EVICTED"
+
+   .. py:attribute:: UPSTREAM_FAILED
+      :value: "UPSTREAM_FAILED"
+
+.. autoclass:: CleanRoomTaskRunState
+   :members:
+   :undoc-members:
+
 .. autoclass:: ClusterInstance
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst
index 7d556f8ad..572a0d6c4 100644
--- a/docs/dbdataclasses/settings.rst
+++ b/docs/dbdataclasses/settings.rst
@@ -215,6 +215,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: DeleteAibiDashboardEmbeddingAccessPolicySettingResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: DeleteDefaultNamespaceSettingResponse
    :members:
    :undoc-members:
@@ -276,6 +284,83 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: EgressNetworkPolicy
+   :members:
+   :undoc-members:
+
+.. autoclass:: EgressNetworkPolicyInternetAccessPolicy
+   :members:
+   :undoc-members:
+
+.. autoclass:: EgressNetworkPolicyInternetAccessPolicyInternetDestination
+   :members:
+   :undoc-members:
+
+.. py:class:: EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol
+
+   The filtering protocol used by the DP. For private and public preview, SEG will only support TCP filtering (i.e. DNS based filtering, filtering by destination IP address), so protocol will be set to TCP by default and hidden from the user. In the future, users may be able to select HTTP filtering (i.e. SNI based filtering, filtering by FQDN).
+
+   .. py:attribute:: TCP
+      :value: "TCP"
+
+.. py:class:: EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType
+
+   .. py:attribute:: FQDN
+      :value: "FQDN"
+
+.. autoclass:: EgressNetworkPolicyInternetAccessPolicyLogOnlyMode
+   :members:
+   :undoc-members:
+
+.. py:class:: EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType
+
+   .. py:attribute:: ALL_SERVICES
+      :value: "ALL_SERVICES"
+
+   .. py:attribute:: SELECTED_SERVICES
+      :value: "SELECTED_SERVICES"
+
+.. py:class:: EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType
+
+   The values should match the list of workloads used in networkconfig.proto
+
+   .. py:attribute:: DBSQL
+      :value: "DBSQL"
+
+   .. py:attribute:: ML_SERVING
+      :value: "ML_SERVING"
+
+.. py:class:: EgressNetworkPolicyInternetAccessPolicyRestrictionMode
+
+   At which level can Databricks and Databricks managed compute access Internet. FULL_ACCESS: Databricks can access Internet. No blocking rules will apply. RESTRICTED_ACCESS: Databricks can only access explicitly allowed internet and storage destinations, as well as UC connections and external locations. PRIVATE_ACCESS_ONLY (not used): Databricks can only access destinations via private link.
+
+   .. py:attribute:: FULL_ACCESS
+      :value: "FULL_ACCESS"
+
+   .. py:attribute:: PRIVATE_ACCESS_ONLY
+      :value: "PRIVATE_ACCESS_ONLY"
+
+   .. py:attribute:: RESTRICTED_ACCESS
+      :value: "RESTRICTED_ACCESS"
+
+.. autoclass:: EgressNetworkPolicyInternetAccessPolicyStorageDestination
+   :members:
+   :undoc-members:
+
+.. py:class:: EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType
+
+   .. py:attribute:: AWS_S3
+      :value: "AWS_S3"
+
+   .. py:attribute:: AZURE_STORAGE
+      :value: "AZURE_STORAGE"
+
+   .. py:attribute:: CLOUDFLARE_R2
+      :value: "CLOUDFLARE_R2"
+
+   .. py:attribute:: GOOGLE_CLOUD_STORAGE
+      :value: "GOOGLE_CLOUD_STORAGE"
+
 .. autoclass:: EmailConfig
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/sharing.rst b/docs/dbdataclasses/sharing.rst
index 650811e08..2db59fcbe 100644
--- a/docs/dbdataclasses/sharing.rst
+++ b/docs/dbdataclasses/sharing.rst
@@ -62,14 +62,16 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: PartitionSpecificationPartition
+   :members:
+   :undoc-members:
+
 .. autoclass:: PartitionValue
    :members:
    :undoc-members:
 
 .. py:class:: PartitionValueOp
 
-   The operator to apply for the value.
-
    .. py:attribute:: EQUAL
       :value: "EQUAL"
 
@@ -108,6 +110,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CREATE_FOREIGN_CATALOG
       :value: "CREATE_FOREIGN_CATALOG"
 
+   .. py:attribute:: CREATE_FOREIGN_SECURABLE
+      :value: "CREATE_FOREIGN_SECURABLE"
+
    .. py:attribute:: CREATE_FUNCTION
       :value: "CREATE_FUNCTION"
 
diff --git a/docs/workspace/cleanrooms/clean_room_assets.rst b/docs/workspace/cleanrooms/clean_room_assets.rst
new file mode 100644
index 000000000..fe282543a
--- /dev/null
+++ b/docs/workspace/cleanrooms/clean_room_assets.rst
@@ -0,0 +1,94 @@
+``w.clean_room_assets``: Assets
+===============================
+.. currentmodule:: databricks.sdk.service.cleanrooms
+
+.. py:class:: CleanRoomAssetsAPI
+
+    Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the
+    clean room.
+
+    .. py:method:: create(clean_room_name: str [, asset: Optional[CleanRoomAsset]]) -> CleanRoomAsset
+
+        Create an asset.
+        
+        Create a clean room asset —share an asset like a notebook or table into the clean room. For each UC
+        asset that is added through this method, the clean room owner must also have enough privilege on the
+        asset to consume it. The privilege must be maintained indefinitely for the clean room to be able to
+        access the asset. Typically, you should use a group as the clean room owner.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset: :class:`CleanRoomAsset` (optional)
+          Metadata of the clean room asset
+        
+        :returns: :class:`CleanRoomAsset`
+        
+
+    .. py:method:: delete(clean_room_name: str, asset_type: CleanRoomAssetAssetType, asset_full_name: str)
+
+        Delete an asset.
+        
+        Delete a clean room asset - unshare/remove the asset from the clean room
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset_type: :class:`CleanRoomAssetAssetType`
+          The type of the asset.
+        :param asset_full_name: str
+          The fully qualified name of the asset, it is same as the name field in CleanRoomAsset.
+        
+        
+        
+
+    .. py:method:: get(clean_room_name: str, asset_type: CleanRoomAssetAssetType, asset_full_name: str) -> CleanRoomAsset
+
+        Get an asset.
+        
+        Get the details of a clean room asset by its type and full name.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset_type: :class:`CleanRoomAssetAssetType`
+          The type of the asset.
+        :param asset_full_name: str
+          The fully qualified name of the asset, it is same as the name field in CleanRoomAsset.
+        
+        :returns: :class:`CleanRoomAsset`
+        
+
+    .. py:method:: list(clean_room_name: str [, page_token: Optional[str]]) -> Iterator[CleanRoomAsset]
+
+        List assets.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`CleanRoomAsset`
+        
+
+    .. py:method:: update(clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str [, asset: Optional[CleanRoomAsset]]) -> CleanRoomAsset
+
+        Update an asset.
+        
+        Update a clean room asset. For example, updating the content of a notebook; changing the shared
+        partitions of a table; etc.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset_type: :class:`CleanRoomAssetAssetType`
+          The type of the asset.
+        :param name: str
+          A fully qualified name that uniquely identifies the asset within the clean room. This is also the
+          name displayed in the clean room UI.
+          
+          For UC securable assets (tables, volumes, etc.), the format is
+          *shared_catalog*.*shared_schema*.*asset_name*
+          
+          For notebooks, the name is the notebook file name.
+        :param asset: :class:`CleanRoomAsset` (optional)
+          Metadata of the clean room asset
+        
+        :returns: :class:`CleanRoomAsset`
+        
\ No newline at end of file
diff --git a/docs/workspace/cleanrooms/clean_room_task_runs.rst b/docs/workspace/cleanrooms/clean_room_task_runs.rst
new file mode 100644
index 000000000..dcf59037c
--- /dev/null
+++ b/docs/workspace/cleanrooms/clean_room_task_runs.rst
@@ -0,0 +1,25 @@
+``w.clean_room_task_runs``: Task Runs
+=====================================
+.. currentmodule:: databricks.sdk.service.cleanrooms
+
+.. py:class:: CleanRoomTaskRunsAPI
+
+    Clean room task runs are the executions of notebooks in a clean room.
+
+    .. py:method:: list(clean_room_name: str [, notebook_name: Optional[str], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[CleanRoomNotebookTaskRun]
+
+        List notebook task runs.
+        
+        List all the historical notebook task runs in a clean room.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param notebook_name: str (optional)
+          Notebook name
+        :param page_size: int (optional)
+          The maximum number of task runs to return
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`CleanRoomNotebookTaskRun`
+        
\ No newline at end of file
diff --git a/docs/workspace/cleanrooms/clean_rooms.rst b/docs/workspace/cleanrooms/clean_rooms.rst
new file mode 100644
index 000000000..0d1468399
--- /dev/null
+++ b/docs/workspace/cleanrooms/clean_rooms.rst
@@ -0,0 +1,94 @@
+``w.clean_rooms``: Clean Rooms
+==============================
+.. currentmodule:: databricks.sdk.service.cleanrooms
+
+.. py:class:: CleanRoomsAPI
+
+    A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting
+    environment where multiple parties can work together on sensitive enterprise data without direct access to
+    each other’s data.
+
+    .. py:method:: create( [, clean_room: Optional[CleanRoom]]) -> CleanRoom
+
+        Create a clean room.
+        
+        Create a new clean room with the specified collaborators. This method is asynchronous; the returned
+        name field inside the clean_room field can be used to poll the clean room status, using the
+        :method:cleanrooms/get method. When this method returns, the cluster will be in a PROVISIONING state.
+        The cluster will be usable once it enters an ACTIVE state.
+        
+        The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore.
+        
+        :param clean_room: :class:`CleanRoom` (optional)
+        
+        :returns: :class:`CleanRoom`
+        
+
+    .. py:method:: create_output_catalog(clean_room_name: str [, output_catalog: Optional[CleanRoomOutputCatalog]]) -> CreateCleanRoomOutputCatalogResponse
+
+        Create an output catalog.
+        
+        Create the output catalog of the clean room.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param output_catalog: :class:`CleanRoomOutputCatalog` (optional)
+        
+        :returns: :class:`CreateCleanRoomOutputCatalogResponse`
+        
+
+    .. py:method:: delete(name: str)
+
+        Delete a clean room.
+        
+        Delete a clean room. After deletion, the clean room will be removed from the metastore. If the other
+        collaborators have not deleted the clean room, they will still have the clean room in their metastore,
+        but it will be in a DELETED state and no operations other than deletion can be performed on it.
+        
+        :param name: str
+          Name of the clean room.
+        
+        
+        
+
+    .. py:method:: get(name: str) -> CleanRoom
+
+        Get a clean room.
+        
+        Get the details of a clean room given its name.
+        
+        :param name: str
+        
+        :returns: :class:`CleanRoom`
+        
+
+    .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[CleanRoom]
+
+        List clean rooms.
+        
+        Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are
+        returned.
+        
+        :param page_size: int (optional)
+          Maximum number of clean rooms to return (i.e., the page length). Defaults to 100.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`CleanRoom`
+        
+
+    .. py:method:: update(name: str [, clean_room: Optional[CleanRoom]]) -> CleanRoom
+
+        Update a clean room.
+        
+        Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM**
+        privilege, or be metastore admin.
+        
+        When the caller is a metastore admin, only the __owner__ field can be updated.
+        
+        :param name: str
+          Name of the clean room.
+        :param clean_room: :class:`CleanRoom` (optional)
+        
+        :returns: :class:`CleanRoom`
+        
\ No newline at end of file
diff --git a/docs/workspace/cleanrooms/index.rst b/docs/workspace/cleanrooms/index.rst
new file mode 100644
index 000000000..a979ac201
--- /dev/null
+++ b/docs/workspace/cleanrooms/index.rst
@@ -0,0 +1,12 @@
+
+Clean Rooms
+===========
+
+Manage clean rooms and their assets and task runs
+
+.. toctree::
+   :maxdepth: 1
+
+   clean_room_assets
+   clean_room_task_runs
+   clean_rooms
\ No newline at end of file
diff --git a/docs/workspace/files/files.rst b/docs/workspace/files/files.rst
index db20b2192..f1bd70317 100644
--- a/docs/workspace/files/files.rst
+++ b/docs/workspace/files/files.rst
@@ -64,8 +64,8 @@
 
         Download a file.
         
-        Downloads a file of up to 5 GiB. The file contents are the response body. This is a standard HTTP file
-        download, not a JSON RPC.
+        Downloads a file. The file contents are the response body. This is a standard HTTP file download, not
+        a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers.
         
         :param file_path: str
           The absolute path of the file.
diff --git a/docs/workspace/index.rst b/docs/workspace/index.rst
index d9ca84197..667f6c18f 100644
--- a/docs/workspace/index.rst
+++ b/docs/workspace/index.rst
@@ -9,6 +9,7 @@ These APIs are available from WorkspaceClient
 
    apps/index
    catalog/index
+   cleanrooms/index
    compute/index
    dashboards/index
    files/index
diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst
index e9e63bb20..b7d677f03 100644
--- a/docs/workspace/jobs/jobs.rst
+++ b/docs/workspace/jobs/jobs.rst
@@ -218,8 +218,8 @@
         :param queue: :class:`QueueSettings` (optional)
           The queue settings of the job.
         :param run_as: :class:`JobRunAs` (optional)
-          Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If
-          not specified, the job/pipeline runs as the user who created the job/pipeline.
+          Write-only setting. Specifies the user or service principal that the job runs as. If not specified,
+          the job runs as the user who created the job.
           
           Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.
         :param schedule: :class:`CronSchedule` (optional)
diff --git a/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst
index 1480fc978..00d12fa36 100644
--- a/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst
+++ b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst
@@ -7,6 +7,22 @@
     Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the
     workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS).
 
+    .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse
+
+        Delete the AI/BI dashboard embedding access policy.
+        
+        Delete the AI/BI dashboard embedding access policy, reverting back to the default.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteAibiDashboardEmbeddingAccessPolicySettingResponse`
+        
+
     .. py:method:: get( [, etag: Optional[str]]) -> AibiDashboardEmbeddingAccessPolicySetting
 
         Retrieve the AI/BI dashboard embedding access policy.
diff --git a/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst
index 09b12056e..d793e9a7c 100644
--- a/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst
+++ b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst
@@ -7,6 +7,23 @@
     Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains list
     can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS.
 
+    .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse
+
+        Delete AI/BI dashboard embedding approved domains.
+        
+        Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the default
+        empty list.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse`
+        
+
     .. py:method:: get( [, etag: Optional[str]]) -> AibiDashboardEmbeddingApprovedDomainsSetting
 
         Retrieve the list of domains approved to host embedded AI/BI dashboards.
diff --git a/docs/workspace/sql/dashboards.rst b/docs/workspace/sql/dashboards.rst
index 97ea1014d..f22c7c96b 100644
--- a/docs/workspace/sql/dashboards.rst
+++ b/docs/workspace/sql/dashboards.rst
@@ -1,5 +1,5 @@
-``w.dashboards``: Dashboards
-============================
+``w.dashboards``: Dashboards (legacy)
+=====================================
 .. currentmodule:: databricks.sdk.service.sql
 
 .. py:class:: DashboardsAPI

From 672da6f1323b803ec069c55e5d952163d1b397fe Mon Sep 17 00:00:00 2001
From: Tanmay Rustagi <88379306+tanmay-db@users.noreply.github.com>
Date: Wed, 11 Dec 2024 10:59:16 +0100
Subject: [PATCH 077/136] [Release] Release v0.39.0 (#836)

### Bug Fixes

* Update Changelog file
([#830](https://github.com/databricks/databricks-sdk-py/pull/830)).


### Internal Changes

* Fix a couple of typos in open_ai_client.py
([#829](https://github.com/databricks/databricks-sdk-py/pull/829)).
* Update SDK to OpenAPI spec
([#834](https://github.com/databricks/databricks-sdk-py/pull/834)).


### API Changes:

 * Added `databricks.sdk.service.cleanrooms` package.
* Added `delete()` method for
[w.aibi_dashboard_embedding_access_policy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/aibi_dashboard_embedding_access_policy.html)
workspace-level service.
* Added `delete()` method for
[w.aibi_dashboard_embedding_approved_domains](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/aibi_dashboard_embedding_approved_domains.html)
workspace-level service.
* Added `databricks_gcp_service_account` field for
`databricks.sdk.service.catalog.CreateCredentialRequest`.
* Added `databricks_gcp_service_account` field for
`databricks.sdk.service.catalog.CredentialInfo`.
* Added `gcp_options` field for
`databricks.sdk.service.catalog.GenerateTemporaryServiceCredentialRequest`.
* Added `databricks_gcp_service_account` field for
`databricks.sdk.service.catalog.UpdateCredentialRequest`.
* Added `cached_query_schema` field for
`databricks.sdk.service.dashboards.QueryAttachment`.
 * Added .
* Removed `gcp_service_account_key` field for
`databricks.sdk.service.catalog.CreateCredentialRequest`.

OpenAPI SHA: 7016dcbf2e011459416cf408ce21143bcc4b3a25, Date: 2024-12-05
---
 CHANGELOG.md              | 28 ++++++++++++++++++++++++++++
 databricks/sdk/version.py |  2 +-
 2 files changed, 29 insertions(+), 1 deletion(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index d3e7aac0b..aea3eb9c8 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,33 @@
 # Version changelog
 
+## [Release] Release v0.39.0
+
+### Bug Fixes
+
+ * Update Changelog file ([#830](https://github.com/databricks/databricks-sdk-py/pull/830)).
+
+
+### Internal Changes
+
+ * Fix a couple of typos in open_ai_client.py ([#829](https://github.com/databricks/databricks-sdk-py/pull/829)).
+ * Update SDK to OpenAPI spec ([#834](https://github.com/databricks/databricks-sdk-py/pull/834)).
+
+
+### API Changes:
+
+ * Added `databricks.sdk.service.cleanrooms` package.
+ * Added `delete()` method for [w.aibi_dashboard_embedding_access_policy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/aibi_dashboard_embedding_access_policy.html) workspace-level service.
+ * Added `delete()` method for [w.aibi_dashboard_embedding_approved_domains](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/aibi_dashboard_embedding_approved_domains.html) workspace-level service.
+ * Added `databricks_gcp_service_account` field for `databricks.sdk.service.catalog.CreateCredentialRequest`.
+ * Added `databricks_gcp_service_account` field for `databricks.sdk.service.catalog.CredentialInfo`.
+ * Added `gcp_options` field for `databricks.sdk.service.catalog.GenerateTemporaryServiceCredentialRequest`.
+ * Added `databricks_gcp_service_account` field for `databricks.sdk.service.catalog.UpdateCredentialRequest`.
+ * Added `cached_query_schema` field for `databricks.sdk.service.dashboards.QueryAttachment`.
+ * Added .
+ * Removed `gcp_service_account_key` field for `databricks.sdk.service.catalog.CreateCredentialRequest`.
+
+OpenAPI SHA: 7016dcbf2e011459416cf408ce21143bcc4b3a25, Date: 2024-12-05
+
 ## [Release] Release v0.38.0
 
 ### New Features and Improvements
diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py
index 457618b15..31a9ee722 100644
--- a/databricks/sdk/version.py
+++ b/databricks/sdk/version.py
@@ -1 +1 @@
-__version__ = '0.38.0'
+__version__ = '0.39.0'

From 8975d07c0cd5f3e15ce6aabf8bc905b367740b5f Mon Sep 17 00:00:00 2001
From: hectorcast-db 
Date: Thu, 19 Dec 2024 16:49:14 +0100
Subject: [PATCH 078/136] [Release] Release v0.40.0 (#840)

### API Changes:

* Added
[a.account_federation_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/account_federation_policy.html)
account-level service and
[a.service_principal_federation_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/service_principal_federation_policy.html)
account-level service.
* Added `is_single_node`, `kind` and `use_ml_runtime` fields for
`databricks.sdk.service.compute.ClusterAttributes`.
* Added `is_single_node`, `kind` and `use_ml_runtime` fields for
`databricks.sdk.service.compute.ClusterDetails`.
* Added `is_single_node`, `kind` and `use_ml_runtime` fields for
`databricks.sdk.service.compute.ClusterSpec`.
* Added `is_single_node`, `kind` and `use_ml_runtime` fields for
`databricks.sdk.service.compute.CreateCluster`.
* Added `is_single_node`, `kind` and `use_ml_runtime` fields for
`databricks.sdk.service.compute.EditCluster`.
* Added `is_single_node`, `kind` and `use_ml_runtime` fields for
`databricks.sdk.service.compute.UpdateClusterResource`.
* Added `update_parameter_syntax` field for
`databricks.sdk.service.dashboards.MigrateDashboardRequest`.
* Added `clean_rooms_notebook_task` field for
`databricks.sdk.service.jobs.RunTask`.
* Added `clean_rooms_notebook_task` field for
`databricks.sdk.service.jobs.SubmitTask`.
* Added `clean_rooms_notebook_task` field for
`databricks.sdk.service.jobs.Task`.
* Changed `days_of_week` field for
`databricks.sdk.service.pipelines.RestartWindow` to type
`databricks.sdk.service.pipelines.RestartWindowDaysOfWeekList`
dataclass.

OpenAPI SHA: a6a317df8327c9b1e5cb59a03a42ffa2aabeef6d, Date: 2024-12-16

Co-authored-by: Renaud Hartert 
---
 .codegen/_openapi_sha                         |   2 +-
 CHANGELOG.md                                  |  19 +
 databricks/sdk/__init__.py                    |  16 +-
 databricks/sdk/service/catalog.py             |   4 +-
 databricks/sdk/service/compute.py             | 424 +++++++++++++---
 databricks/sdk/service/dashboards.py          |  18 +-
 databricks/sdk/service/jobs.py                | 103 +++-
 databricks/sdk/service/oauth2.py              | 461 ++++++++++++++++++
 databricks/sdk/service/pipelines.py           |  10 +-
 databricks/sdk/version.py                     |   2 +-
 docs/account/oauth2/federation_policy.rst     |  99 ++++
 docs/account/oauth2/index.rst                 |   2 +
 .../service_principal_federation_policy.rst   | 109 +++++
 docs/dbdataclasses/catalog.rst                |  12 +-
 docs/dbdataclasses/compute.rst                |  21 +-
 docs/dbdataclasses/jobs.rst                   |   8 +-
 docs/dbdataclasses/oauth2.rst                 |  12 +
 docs/workspace/compute/clusters.rst           |  80 ++-
 docs/workspace/dashboards/lakeview.rst        |   5 +-
 19 files changed, 1290 insertions(+), 117 deletions(-)
 create mode 100644 docs/account/oauth2/federation_policy.rst
 create mode 100644 docs/account/oauth2/service_principal_federation_policy.rst

diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 68cd2f4be..8622b29ca 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-7016dcbf2e011459416cf408ce21143bcc4b3a25
\ No newline at end of file
+a6a317df8327c9b1e5cb59a03a42ffa2aabeef6d
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index aea3eb9c8..4f7aa3cc2 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,24 @@
 # Version changelog
 
+## [Release] Release v0.40.0
+
+### API Changes:
+
+ * Added [a.account_federation_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/account_federation_policy.html) account-level service and [a.service_principal_federation_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/service_principal_federation_policy.html) account-level service.
+ * Added `is_single_node`, `kind` and `use_ml_runtime` fields for `databricks.sdk.service.compute.ClusterAttributes`.
+ * Added `is_single_node`, `kind` and `use_ml_runtime` fields for `databricks.sdk.service.compute.ClusterDetails`.
+ * Added `is_single_node`, `kind` and `use_ml_runtime` fields for `databricks.sdk.service.compute.ClusterSpec`.
+ * Added `is_single_node`, `kind` and `use_ml_runtime` fields for `databricks.sdk.service.compute.CreateCluster`.
+ * Added `is_single_node`, `kind` and `use_ml_runtime` fields for `databricks.sdk.service.compute.EditCluster`.
+ * Added `is_single_node`, `kind` and `use_ml_runtime` fields for `databricks.sdk.service.compute.UpdateClusterResource`.
+ * Added `update_parameter_syntax` field for `databricks.sdk.service.dashboards.MigrateDashboardRequest`.
+ * Added `clean_rooms_notebook_task` field for `databricks.sdk.service.jobs.RunTask`.
+ * Added `clean_rooms_notebook_task` field for `databricks.sdk.service.jobs.SubmitTask`.
+ * Added `clean_rooms_notebook_task` field for `databricks.sdk.service.jobs.Task`.
+ * Changed `days_of_week` field for `databricks.sdk.service.pipelines.RestartWindow` to type `databricks.sdk.service.pipelines.RestartWindowDaysOfWeekList` dataclass.
+
+OpenAPI SHA: a6a317df8327c9b1e5cb59a03a42ffa2aabeef6d, Date: 2024-12-16
+
 ## [Release] Release v0.39.0
 
 ### Bug Fixes
diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py
index beb3fd7bb..068069f04 100755
--- a/databricks/sdk/__init__.py
+++ b/databricks/sdk/__init__.py
@@ -56,9 +56,11 @@
     ProviderListingsAPI, ProviderPersonalizationRequestsAPI,
     ProviderProviderAnalyticsDashboardsAPI, ProviderProvidersAPI)
 from databricks.sdk.service.ml import ExperimentsAPI, ModelRegistryAPI
-from databricks.sdk.service.oauth2 import (CustomAppIntegrationAPI,
+from databricks.sdk.service.oauth2 import (AccountFederationPolicyAPI,
+                                           CustomAppIntegrationAPI,
                                            OAuthPublishedAppsAPI,
                                            PublishedAppIntegrationAPI,
+                                           ServicePrincipalFederationPolicyAPI,
                                            ServicePrincipalSecretsAPI)
 from databricks.sdk.service.pipelines import PipelinesAPI
 from databricks.sdk.service.provisioning import (CredentialsAPI,
@@ -826,6 +828,7 @@ def __init__(self,
         self._credentials = CredentialsAPI(self._api_client)
         self._custom_app_integration = CustomAppIntegrationAPI(self._api_client)
         self._encryption_keys = EncryptionKeysAPI(self._api_client)
+        self._federation_policy = AccountFederationPolicyAPI(self._api_client)
         self._groups = AccountGroupsAPI(self._api_client)
         self._ip_access_lists = AccountIpAccessListsAPI(self._api_client)
         self._log_delivery = LogDeliveryAPI(self._api_client)
@@ -836,6 +839,7 @@ def __init__(self,
         self._o_auth_published_apps = OAuthPublishedAppsAPI(self._api_client)
         self._private_access = PrivateAccessAPI(self._api_client)
         self._published_app_integration = PublishedAppIntegrationAPI(self._api_client)
+        self._service_principal_federation_policy = ServicePrincipalFederationPolicyAPI(self._api_client)
         self._service_principal_secrets = ServicePrincipalSecretsAPI(self._api_client)
         self._service_principals = AccountServicePrincipalsAPI(self._api_client)
         self._settings = AccountSettingsAPI(self._api_client)
@@ -881,6 +885,11 @@ def encryption_keys(self) -> EncryptionKeysAPI:
         """These APIs manage encryption key configurations for this workspace (optional)."""
         return self._encryption_keys
 
+    @property
+    def federation_policy(self) -> AccountFederationPolicyAPI:
+        """These APIs manage account federation policies."""
+        return self._federation_policy
+
     @property
     def groups(self) -> AccountGroupsAPI:
         """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects."""
@@ -931,6 +940,11 @@ def published_app_integration(self) -> PublishedAppIntegrationAPI:
         """These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud."""
         return self._published_app_integration
 
+    @property
+    def service_principal_federation_policy(self) -> ServicePrincipalFederationPolicyAPI:
+        """These APIs manage service principal federation policies."""
+        return self._service_principal_federation_policy
+
     @property
     def service_principal_secrets(self) -> ServicePrincipalSecretsAPI:
         """These APIs enable administrators to manage service principal secrets."""
diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py
index 0798bb5b6..f1b549339 100755
--- a/databricks/sdk/service/catalog.py
+++ b/databricks/sdk/service/catalog.py
@@ -3704,8 +3704,8 @@ def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryTableCredentialRespons
 class GetBindingsSecurableType(Enum):
 
     CATALOG = 'catalog'
+    CREDENTIAL = 'credential'
     EXTERNAL_LOCATION = 'external_location'
-    SERVICE_CREDENTIAL = 'service_credential'
     STORAGE_CREDENTIAL = 'storage_credential'
 
 
@@ -7067,8 +7067,8 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateAssignmentResponse:
 class UpdateBindingsSecurableType(Enum):
 
     CATALOG = 'catalog'
+    CREDENTIAL = 'credential'
     EXTERNAL_LOCATION = 'external_location'
-    SERVICE_CREDENTIAL = 'service_credential'
     STORAGE_CREDENTIAL = 'storage_credential'
 
 
diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py
index d8be32003..0afdb6f19 100755
--- a/databricks/sdk/service/compute.py
+++ b/databricks/sdk/service/compute.py
@@ -659,13 +659,19 @@ class ClusterAttributes:
     data_security_mode: Optional[DataSecurityMode] = None
     """Data security mode decides what data governance model to use when accessing data from a cluster.
     
-    * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features
-    are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively
-    used by a single user specified in `single_user_name`. Most programming languages, cluster
-    features and data governance features are available in this mode. * `USER_ISOLATION`: A secure
-    cluster that can be shared by multiple users. Cluster users are fully isolated so that they
-    cannot see each other's data and credentials. Most data governance features are supported in
-    this mode. But programming languages and cluster features might be limited.
+    The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+    choose the most appropriate access mode depending on your compute configuration. *
+    `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`:
+    Alias for `SINGLE_USER`.
+    
+    The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
+    multiple users sharing the cluster. Data governance features are not available in this mode. *
+    `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
+    `single_user_name`. Most programming languages, cluster features and data governance features
+    are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
+    users. Cluster users are fully isolated so that they cannot see each other's data and
+    credentials. Most data governance features are supported in this mode. But programming languages
+    and cluster features might be limited.
     
     The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
     future Databricks Runtime versions:
@@ -706,6 +712,20 @@ class ClusterAttributes:
     instance_pool_id: Optional[str] = None
     """The optional ID of the instance pool to which the cluster belongs."""
 
+    is_single_node: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    When set to true, Databricks will automatically set single node related `custom_tags`,
+    `spark_conf`, and `num_workers`"""
+
+    kind: Optional[Kind] = None
+    """The kind of compute described by this compute specification.
+    
+    Depending on `kind`, different validations and default values will be applied.
+    
+    The first usage of this value is for the simple cluster form where it sets `kind =
+    CLASSIC_PREVIEW`."""
+
     node_type_id: Optional[str] = None
     """This field encodes, through a single value, the resources available to each of the Spark nodes
     in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
@@ -750,6 +770,12 @@ class ClusterAttributes:
     private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can
     be specified."""
 
+    use_ml_runtime: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    `effective_spark_version` is determined by `spark_version` (DBR release), this field
+    `use_ml_runtime`, and whether `node_type_id` is gpu node or not."""
+
     workload_type: Optional[WorkloadType] = None
 
     def as_dict(self) -> dict:
@@ -773,6 +799,8 @@ def as_dict(self) -> dict:
         if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
         if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind.value
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
         if self.policy_id is not None: body['policy_id'] = self.policy_id
         if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value
@@ -781,6 +809,7 @@ def as_dict(self) -> dict:
         if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
         if self.spark_version is not None: body['spark_version'] = self.spark_version
         if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
         if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
         return body
 
@@ -805,6 +834,8 @@ def as_shallow_dict(self) -> dict:
         if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
         if self.init_scripts: body['init_scripts'] = self.init_scripts
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
         if self.policy_id is not None: body['policy_id'] = self.policy_id
         if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine
@@ -813,6 +844,7 @@ def as_shallow_dict(self) -> dict:
         if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
         if self.spark_version is not None: body['spark_version'] = self.spark_version
         if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
         if self.workload_type: body['workload_type'] = self.workload_type
         return body
 
@@ -834,6 +866,8 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterAttributes:
                    gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes),
                    init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo),
                    instance_pool_id=d.get('instance_pool_id', None),
+                   is_single_node=d.get('is_single_node', None),
+                   kind=_enum(d, 'kind', Kind),
                    node_type_id=d.get('node_type_id', None),
                    policy_id=d.get('policy_id', None),
                    runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine),
@@ -842,6 +876,7 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterAttributes:
                    spark_env_vars=d.get('spark_env_vars', None),
                    spark_version=d.get('spark_version', None),
                    ssh_public_keys=d.get('ssh_public_keys', None),
+                   use_ml_runtime=d.get('use_ml_runtime', None),
                    workload_type=_from_dict(d, 'workload_type', WorkloadType))
 
 
@@ -948,13 +983,19 @@ class ClusterDetails:
     data_security_mode: Optional[DataSecurityMode] = None
     """Data security mode decides what data governance model to use when accessing data from a cluster.
     
-    * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features
-    are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively
-    used by a single user specified in `single_user_name`. Most programming languages, cluster
-    features and data governance features are available in this mode. * `USER_ISOLATION`: A secure
-    cluster that can be shared by multiple users. Cluster users are fully isolated so that they
-    cannot see each other's data and credentials. Most data governance features are supported in
-    this mode. But programming languages and cluster features might be limited.
+    The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+    choose the most appropriate access mode depending on your compute configuration. *
+    `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`:
+    Alias for `SINGLE_USER`.
+    
+    The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
+    multiple users sharing the cluster. Data governance features are not available in this mode. *
+    `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
+    `single_user_name`. Most programming languages, cluster features and data governance features
+    are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
+    users. Cluster users are fully isolated so that they cannot see each other's data and
+    credentials. Most data governance features are supported in this mode. But programming languages
+    and cluster features might be limited.
     
     The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
     future Databricks Runtime versions:
@@ -1015,10 +1056,24 @@ class ClusterDetails:
     instance_pool_id: Optional[str] = None
     """The optional ID of the instance pool to which the cluster belongs."""
 
+    is_single_node: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    When set to true, Databricks will automatically set single node related `custom_tags`,
+    `spark_conf`, and `num_workers`"""
+
     jdbc_port: Optional[int] = None
     """Port on which Spark JDBC server is listening, in the driver nod. No service will be listeningon
     on this port in executor nodes."""
 
+    kind: Optional[Kind] = None
+    """The kind of compute described by this compute specification.
+    
+    Depending on `kind`, different validations and default values will be applied.
+    
+    The first usage of this value is for the simple cluster form where it sets `kind =
+    CLASSIC_PREVIEW`."""
+
     last_restarted_time: Optional[int] = None
     """the timestamp that the cluster was started/restarted"""
 
@@ -1111,6 +1166,12 @@ class ClusterDetails:
     """Information about why the cluster was terminated. This field only appears when the cluster is in
     a `TERMINATING` or `TERMINATED` state."""
 
+    use_ml_runtime: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    `effective_spark_version` is determined by `spark_version` (DBR release), this field
+    `use_ml_runtime`, and whether `node_type_id` is gpu node or not."""
+
     workload_type: Optional[WorkloadType] = None
 
     def as_dict(self) -> dict:
@@ -1144,7 +1205,9 @@ def as_dict(self) -> dict:
         if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
         if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
         if self.jdbc_port is not None: body['jdbc_port'] = self.jdbc_port
+        if self.kind is not None: body['kind'] = self.kind.value
         if self.last_restarted_time is not None: body['last_restarted_time'] = self.last_restarted_time
         if self.last_state_loss_time is not None: body['last_state_loss_time'] = self.last_state_loss_time
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
@@ -1163,6 +1226,7 @@ def as_dict(self) -> dict:
         if self.state_message is not None: body['state_message'] = self.state_message
         if self.terminated_time is not None: body['terminated_time'] = self.terminated_time
         if self.termination_reason: body['termination_reason'] = self.termination_reason.as_dict()
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
         if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
         return body
 
@@ -1197,7 +1261,9 @@ def as_shallow_dict(self) -> dict:
         if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
         if self.init_scripts: body['init_scripts'] = self.init_scripts
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
         if self.jdbc_port is not None: body['jdbc_port'] = self.jdbc_port
+        if self.kind is not None: body['kind'] = self.kind
         if self.last_restarted_time is not None: body['last_restarted_time'] = self.last_restarted_time
         if self.last_state_loss_time is not None: body['last_state_loss_time'] = self.last_state_loss_time
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
@@ -1216,6 +1282,7 @@ def as_shallow_dict(self) -> dict:
         if self.state_message is not None: body['state_message'] = self.state_message
         if self.terminated_time is not None: body['terminated_time'] = self.terminated_time
         if self.termination_reason: body['termination_reason'] = self.termination_reason
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
         if self.workload_type: body['workload_type'] = self.workload_type
         return body
 
@@ -1247,7 +1314,9 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterDetails:
                    gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes),
                    init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo),
                    instance_pool_id=d.get('instance_pool_id', None),
+                   is_single_node=d.get('is_single_node', None),
                    jdbc_port=d.get('jdbc_port', None),
+                   kind=_enum(d, 'kind', Kind),
                    last_restarted_time=d.get('last_restarted_time', None),
                    last_state_loss_time=d.get('last_state_loss_time', None),
                    node_type_id=d.get('node_type_id', None),
@@ -1266,6 +1335,7 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterDetails:
                    state_message=d.get('state_message', None),
                    terminated_time=d.get('terminated_time', None),
                    termination_reason=_from_dict(d, 'termination_reason', TerminationReason),
+                   use_ml_runtime=d.get('use_ml_runtime', None),
                    workload_type=_from_dict(d, 'workload_type', WorkloadType))
 
 
@@ -1870,13 +1940,19 @@ class ClusterSpec:
     data_security_mode: Optional[DataSecurityMode] = None
     """Data security mode decides what data governance model to use when accessing data from a cluster.
     
-    * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features
-    are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively
-    used by a single user specified in `single_user_name`. Most programming languages, cluster
-    features and data governance features are available in this mode. * `USER_ISOLATION`: A secure
-    cluster that can be shared by multiple users. Cluster users are fully isolated so that they
-    cannot see each other's data and credentials. Most data governance features are supported in
-    this mode. But programming languages and cluster features might be limited.
+    The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+    choose the most appropriate access mode depending on your compute configuration. *
+    `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`:
+    Alias for `SINGLE_USER`.
+    
+    The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
+    multiple users sharing the cluster. Data governance features are not available in this mode. *
+    `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
+    `single_user_name`. Most programming languages, cluster features and data governance features
+    are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
+    users. Cluster users are fully isolated so that they cannot see each other's data and
+    credentials. Most data governance features are supported in this mode. But programming languages
+    and cluster features might be limited.
     
     The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
     future Databricks Runtime versions:
@@ -1917,6 +1993,20 @@ class ClusterSpec:
     instance_pool_id: Optional[str] = None
     """The optional ID of the instance pool to which the cluster belongs."""
 
+    is_single_node: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    When set to true, Databricks will automatically set single node related `custom_tags`,
+    `spark_conf`, and `num_workers`"""
+
+    kind: Optional[Kind] = None
+    """The kind of compute described by this compute specification.
+    
+    Depending on `kind`, different validations and default values will be applied.
+    
+    The first usage of this value is for the simple cluster form where it sets `kind =
+    CLASSIC_PREVIEW`."""
+
     node_type_id: Optional[str] = None
     """This field encodes, through a single value, the resources available to each of the Spark nodes
     in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
@@ -1975,6 +2065,12 @@ class ClusterSpec:
     private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can
     be specified."""
 
+    use_ml_runtime: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    `effective_spark_version` is determined by `spark_version` (DBR release), this field
+    `use_ml_runtime`, and whether `node_type_id` is gpu node or not."""
+
     workload_type: Optional[WorkloadType] = None
 
     def as_dict(self) -> dict:
@@ -2001,6 +2097,8 @@ def as_dict(self) -> dict:
         if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
         if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind.value
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
         if self.num_workers is not None: body['num_workers'] = self.num_workers
         if self.policy_id is not None: body['policy_id'] = self.policy_id
@@ -2010,6 +2108,7 @@ def as_dict(self) -> dict:
         if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
         if self.spark_version is not None: body['spark_version'] = self.spark_version
         if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
         if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
         return body
 
@@ -2037,6 +2136,8 @@ def as_shallow_dict(self) -> dict:
         if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
         if self.init_scripts: body['init_scripts'] = self.init_scripts
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
         if self.num_workers is not None: body['num_workers'] = self.num_workers
         if self.policy_id is not None: body['policy_id'] = self.policy_id
@@ -2046,6 +2147,7 @@ def as_shallow_dict(self) -> dict:
         if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
         if self.spark_version is not None: body['spark_version'] = self.spark_version
         if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
         if self.workload_type: body['workload_type'] = self.workload_type
         return body
 
@@ -2069,6 +2171,8 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterSpec:
                    gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes),
                    init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo),
                    instance_pool_id=d.get('instance_pool_id', None),
+                   is_single_node=d.get('is_single_node', None),
+                   kind=_enum(d, 'kind', Kind),
                    node_type_id=d.get('node_type_id', None),
                    num_workers=d.get('num_workers', None),
                    policy_id=d.get('policy_id', None),
@@ -2078,6 +2182,7 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterSpec:
                    spark_env_vars=d.get('spark_env_vars', None),
                    spark_version=d.get('spark_version', None),
                    ssh_public_keys=d.get('ssh_public_keys', None),
+                   use_ml_runtime=d.get('use_ml_runtime', None),
                    workload_type=_from_dict(d, 'workload_type', WorkloadType))
 
 
@@ -2251,13 +2356,19 @@ class CreateCluster:
     data_security_mode: Optional[DataSecurityMode] = None
     """Data security mode decides what data governance model to use when accessing data from a cluster.
     
-    * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features
-    are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively
-    used by a single user specified in `single_user_name`. Most programming languages, cluster
-    features and data governance features are available in this mode. * `USER_ISOLATION`: A secure
-    cluster that can be shared by multiple users. Cluster users are fully isolated so that they
-    cannot see each other's data and credentials. Most data governance features are supported in
-    this mode. But programming languages and cluster features might be limited.
+    The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+    choose the most appropriate access mode depending on your compute configuration. *
+    `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`:
+    Alias for `SINGLE_USER`.
+    
+    The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
+    multiple users sharing the cluster. Data governance features are not available in this mode. *
+    `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
+    `single_user_name`. Most programming languages, cluster features and data governance features
+    are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
+    users. Cluster users are fully isolated so that they cannot see each other's data and
+    credentials. Most data governance features are supported in this mode. But programming languages
+    and cluster features might be limited.
     
     The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
     future Databricks Runtime versions:
@@ -2298,6 +2409,20 @@ class CreateCluster:
     instance_pool_id: Optional[str] = None
     """The optional ID of the instance pool to which the cluster belongs."""
 
+    is_single_node: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    When set to true, Databricks will automatically set single node related `custom_tags`,
+    `spark_conf`, and `num_workers`"""
+
+    kind: Optional[Kind] = None
+    """The kind of compute described by this compute specification.
+    
+    Depending on `kind`, different validations and default values will be applied.
+    
+    The first usage of this value is for the simple cluster form where it sets `kind =
+    CLASSIC_PREVIEW`."""
+
     node_type_id: Optional[str] = None
     """This field encodes, through a single value, the resources available to each of the Spark nodes
     in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
@@ -2352,6 +2477,12 @@ class CreateCluster:
     private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can
     be specified."""
 
+    use_ml_runtime: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    `effective_spark_version` is determined by `spark_version` (DBR release), this field
+    `use_ml_runtime`, and whether `node_type_id` is gpu node or not."""
+
     workload_type: Optional[WorkloadType] = None
 
     def as_dict(self) -> dict:
@@ -2379,6 +2510,8 @@ def as_dict(self) -> dict:
         if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
         if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind.value
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
         if self.num_workers is not None: body['num_workers'] = self.num_workers
         if self.policy_id is not None: body['policy_id'] = self.policy_id
@@ -2388,6 +2521,7 @@ def as_dict(self) -> dict:
         if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
         if self.spark_version is not None: body['spark_version'] = self.spark_version
         if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
         if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
         return body
 
@@ -2416,6 +2550,8 @@ def as_shallow_dict(self) -> dict:
         if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
         if self.init_scripts: body['init_scripts'] = self.init_scripts
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
         if self.num_workers is not None: body['num_workers'] = self.num_workers
         if self.policy_id is not None: body['policy_id'] = self.policy_id
@@ -2425,6 +2561,7 @@ def as_shallow_dict(self) -> dict:
         if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
         if self.spark_version is not None: body['spark_version'] = self.spark_version
         if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
         if self.workload_type: body['workload_type'] = self.workload_type
         return body
 
@@ -2449,6 +2586,8 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCluster:
                    gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes),
                    init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo),
                    instance_pool_id=d.get('instance_pool_id', None),
+                   is_single_node=d.get('is_single_node', None),
+                   kind=_enum(d, 'kind', Kind),
                    node_type_id=d.get('node_type_id', None),
                    num_workers=d.get('num_workers', None),
                    policy_id=d.get('policy_id', None),
@@ -2458,6 +2597,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCluster:
                    spark_env_vars=d.get('spark_env_vars', None),
                    spark_version=d.get('spark_version', None),
                    ssh_public_keys=d.get('ssh_public_keys', None),
+                   use_ml_runtime=d.get('use_ml_runtime', None),
                    workload_type=_from_dict(d, 'workload_type', WorkloadType))
 
 
@@ -2848,13 +2988,19 @@ class DataPlaneEventDetailsEventType(Enum):
 class DataSecurityMode(Enum):
     """Data security mode decides what data governance model to use when accessing data from a cluster.
     
-    * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features
-    are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively
-    used by a single user specified in `single_user_name`. Most programming languages, cluster
-    features and data governance features are available in this mode. * `USER_ISOLATION`: A secure
-    cluster that can be shared by multiple users. Cluster users are fully isolated so that they
-    cannot see each other's data and credentials. Most data governance features are supported in
-    this mode. But programming languages and cluster features might be limited.
+    The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+    choose the most appropriate access mode depending on your compute configuration. *
+    `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`:
+    Alias for `SINGLE_USER`.
+    
+    The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
+    multiple users sharing the cluster. Data governance features are not available in this mode. *
+    `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
+    `single_user_name`. Most programming languages, cluster features and data governance features
+    are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
+    users. Cluster users are fully isolated so that they cannot see each other's data and
+    credentials. Most data governance features are supported in this mode. But programming languages
+    and cluster features might be limited.
     
     The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
     future Databricks Runtime versions:
@@ -2865,6 +3011,9 @@ class DataSecurityMode(Enum):
     Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that
     doesn’t have UC nor passthrough enabled."""
 
+    DATA_SECURITY_MODE_AUTO = 'DATA_SECURITY_MODE_AUTO'
+    DATA_SECURITY_MODE_DEDICATED = 'DATA_SECURITY_MODE_DEDICATED'
+    DATA_SECURITY_MODE_STANDARD = 'DATA_SECURITY_MODE_STANDARD'
     LEGACY_PASSTHROUGH = 'LEGACY_PASSTHROUGH'
     LEGACY_SINGLE_USER = 'LEGACY_SINGLE_USER'
     LEGACY_SINGLE_USER_STANDARD = 'LEGACY_SINGLE_USER_STANDARD'
@@ -3306,13 +3455,19 @@ class EditCluster:
     data_security_mode: Optional[DataSecurityMode] = None
     """Data security mode decides what data governance model to use when accessing data from a cluster.
     
-    * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features
-    are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively
-    used by a single user specified in `single_user_name`. Most programming languages, cluster
-    features and data governance features are available in this mode. * `USER_ISOLATION`: A secure
-    cluster that can be shared by multiple users. Cluster users are fully isolated so that they
-    cannot see each other's data and credentials. Most data governance features are supported in
-    this mode. But programming languages and cluster features might be limited.
+    The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+    choose the most appropriate access mode depending on your compute configuration. *
+    `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`:
+    Alias for `SINGLE_USER`.
+    
+    The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
+    multiple users sharing the cluster. Data governance features are not available in this mode. *
+    `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
+    `single_user_name`. Most programming languages, cluster features and data governance features
+    are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
+    users. Cluster users are fully isolated so that they cannot see each other's data and
+    credentials. Most data governance features are supported in this mode. But programming languages
+    and cluster features might be limited.
     
     The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
     future Databricks Runtime versions:
@@ -3353,6 +3508,20 @@ class EditCluster:
     instance_pool_id: Optional[str] = None
     """The optional ID of the instance pool to which the cluster belongs."""
 
+    is_single_node: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    When set to true, Databricks will automatically set single node related `custom_tags`,
+    `spark_conf`, and `num_workers`"""
+
+    kind: Optional[Kind] = None
+    """The kind of compute described by this compute specification.
+    
+    Depending on `kind`, different validations and default values will be applied.
+    
+    The first usage of this value is for the simple cluster form where it sets `kind =
+    CLASSIC_PREVIEW`."""
+
     node_type_id: Optional[str] = None
     """This field encodes, through a single value, the resources available to each of the Spark nodes
     in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
@@ -3407,6 +3576,12 @@ class EditCluster:
     private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can
     be specified."""
 
+    use_ml_runtime: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    `effective_spark_version` is determined by `spark_version` (DBR release), this field
+    `use_ml_runtime`, and whether `node_type_id` is gpu node or not."""
+
     workload_type: Optional[WorkloadType] = None
 
     def as_dict(self) -> dict:
@@ -3434,6 +3609,8 @@ def as_dict(self) -> dict:
         if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
         if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind.value
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
         if self.num_workers is not None: body['num_workers'] = self.num_workers
         if self.policy_id is not None: body['policy_id'] = self.policy_id
@@ -3443,6 +3620,7 @@ def as_dict(self) -> dict:
         if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
         if self.spark_version is not None: body['spark_version'] = self.spark_version
         if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
         if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
         return body
 
@@ -3471,6 +3649,8 @@ def as_shallow_dict(self) -> dict:
         if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
         if self.init_scripts: body['init_scripts'] = self.init_scripts
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
         if self.num_workers is not None: body['num_workers'] = self.num_workers
         if self.policy_id is not None: body['policy_id'] = self.policy_id
@@ -3480,6 +3660,7 @@ def as_shallow_dict(self) -> dict:
         if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
         if self.spark_version is not None: body['spark_version'] = self.spark_version
         if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
         if self.workload_type: body['workload_type'] = self.workload_type
         return body
 
@@ -3504,6 +3685,8 @@ def from_dict(cls, d: Dict[str, any]) -> EditCluster:
                    gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes),
                    init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo),
                    instance_pool_id=d.get('instance_pool_id', None),
+                   is_single_node=d.get('is_single_node', None),
+                   kind=_enum(d, 'kind', Kind),
                    node_type_id=d.get('node_type_id', None),
                    num_workers=d.get('num_workers', None),
                    policy_id=d.get('policy_id', None),
@@ -3513,6 +3696,7 @@ def from_dict(cls, d: Dict[str, any]) -> EditCluster:
                    spark_env_vars=d.get('spark_env_vars', None),
                    spark_version=d.get('spark_version', None),
                    ssh_public_keys=d.get('ssh_public_keys', None),
+                   use_ml_runtime=d.get('use_ml_runtime', None),
                    workload_type=_from_dict(d, 'workload_type', WorkloadType))
 
 
@@ -5642,6 +5826,17 @@ def from_dict(cls, d: Dict[str, any]) -> InstanceProfile:
                    is_meta_instance_profile=d.get('is_meta_instance_profile', None))
 
 
+class Kind(Enum):
+    """The kind of compute described by this compute specification.
+    
+    Depending on `kind`, different validations and default values will be applied.
+    
+    The first usage of this value is for the simple cluster form where it sets `kind =
+    CLASSIC_PREVIEW`."""
+
+    CLASSIC_PREVIEW = 'CLASSIC_PREVIEW'
+
+
 class Language(Enum):
 
     PYTHON = 'python'
@@ -7560,13 +7755,19 @@ class UpdateClusterResource:
     data_security_mode: Optional[DataSecurityMode] = None
     """Data security mode decides what data governance model to use when accessing data from a cluster.
     
-    * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features
-    are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively
-    used by a single user specified in `single_user_name`. Most programming languages, cluster
-    features and data governance features are available in this mode. * `USER_ISOLATION`: A secure
-    cluster that can be shared by multiple users. Cluster users are fully isolated so that they
-    cannot see each other's data and credentials. Most data governance features are supported in
-    this mode. But programming languages and cluster features might be limited.
+    The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+    choose the most appropriate access mode depending on your compute configuration. *
+    `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`:
+    Alias for `SINGLE_USER`.
+    
+    The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
+    multiple users sharing the cluster. Data governance features are not available in this mode. *
+    `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
+    `single_user_name`. Most programming languages, cluster features and data governance features
+    are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
+    users. Cluster users are fully isolated so that they cannot see each other's data and
+    credentials. Most data governance features are supported in this mode. But programming languages
+    and cluster features might be limited.
     
     The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
     future Databricks Runtime versions:
@@ -7607,6 +7808,20 @@ class UpdateClusterResource:
     instance_pool_id: Optional[str] = None
     """The optional ID of the instance pool to which the cluster belongs."""
 
+    is_single_node: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    When set to true, Databricks will automatically set single node related `custom_tags`,
+    `spark_conf`, and `num_workers`"""
+
+    kind: Optional[Kind] = None
+    """The kind of compute described by this compute specification.
+    
+    Depending on `kind`, different validations and default values will be applied.
+    
+    The first usage of this value is for the simple cluster form where it sets `kind =
+    CLASSIC_PREVIEW`."""
+
     node_type_id: Optional[str] = None
     """This field encodes, through a single value, the resources available to each of the Spark nodes
     in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
@@ -7665,6 +7880,12 @@ class UpdateClusterResource:
     private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can
     be specified."""
 
+    use_ml_runtime: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    `effective_spark_version` is determined by `spark_version` (DBR release), this field
+    `use_ml_runtime`, and whether `node_type_id` is gpu node or not."""
+
     workload_type: Optional[WorkloadType] = None
 
     def as_dict(self) -> dict:
@@ -7689,6 +7910,8 @@ def as_dict(self) -> dict:
         if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
         if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind.value
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
         if self.num_workers is not None: body['num_workers'] = self.num_workers
         if self.policy_id is not None: body['policy_id'] = self.policy_id
@@ -7698,6 +7921,7 @@ def as_dict(self) -> dict:
         if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
         if self.spark_version is not None: body['spark_version'] = self.spark_version
         if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
         if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
         return body
 
@@ -7723,6 +7947,8 @@ def as_shallow_dict(self) -> dict:
         if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
         if self.init_scripts: body['init_scripts'] = self.init_scripts
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
         if self.num_workers is not None: body['num_workers'] = self.num_workers
         if self.policy_id is not None: body['policy_id'] = self.policy_id
@@ -7732,6 +7958,7 @@ def as_shallow_dict(self) -> dict:
         if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
         if self.spark_version is not None: body['spark_version'] = self.spark_version
         if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
         if self.workload_type: body['workload_type'] = self.workload_type
         return body
 
@@ -7754,6 +7981,8 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateClusterResource:
                    gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes),
                    init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo),
                    instance_pool_id=d.get('instance_pool_id', None),
+                   is_single_node=d.get('is_single_node', None),
+                   kind=_enum(d, 'kind', Kind),
                    node_type_id=d.get('node_type_id', None),
                    num_workers=d.get('num_workers', None),
                    policy_id=d.get('policy_id', None),
@@ -7763,6 +7992,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateClusterResource:
                    spark_env_vars=d.get('spark_env_vars', None),
                    spark_version=d.get('spark_version', None),
                    ssh_public_keys=d.get('ssh_public_keys', None),
+                   use_ml_runtime=d.get('use_ml_runtime', None),
                    workload_type=_from_dict(d, 'workload_type', WorkloadType))
 
 
@@ -8301,6 +8531,8 @@ def create(self,
                gcp_attributes: Optional[GcpAttributes] = None,
                init_scripts: Optional[List[InitScriptInfo]] = None,
                instance_pool_id: Optional[str] = None,
+               is_single_node: Optional[bool] = None,
+               kind: Optional[Kind] = None,
                node_type_id: Optional[str] = None,
                num_workers: Optional[int] = None,
                policy_id: Optional[str] = None,
@@ -8309,6 +8541,7 @@ def create(self,
                spark_conf: Optional[Dict[str, str]] = None,
                spark_env_vars: Optional[Dict[str, str]] = None,
                ssh_public_keys: Optional[List[str]] = None,
+               use_ml_runtime: Optional[bool] = None,
                workload_type: Optional[WorkloadType] = None) -> Wait[ClusterDetails]:
         """Create new cluster.
         
@@ -8364,13 +8597,19 @@ def create(self,
         :param data_security_mode: :class:`DataSecurityMode` (optional)
           Data security mode decides what data governance model to use when accessing data from a cluster.
           
-          * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are
-          not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a
-          single user specified in `single_user_name`. Most programming languages, cluster features and data
-          governance features are available in this mode. * `USER_ISOLATION`: A secure cluster that can be
-          shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data
-          and credentials. Most data governance features are supported in this mode. But programming languages
-          and cluster features might be limited.
+          The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+          choose the most appropriate access mode depending on your compute configuration. *
+          `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias
+          for `SINGLE_USER`.
+          
+          The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple
+          users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`:
+          A secure cluster that can only be exclusively used by a single user specified in `single_user_name`.
+          Most programming languages, cluster features and data governance features are available in this
+          mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are
+          fully isolated so that they cannot see each other's data and credentials. Most data governance
+          features are supported in this mode. But programming languages and cluster features might be
+          limited.
           
           The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
           future Databricks Runtime versions:
@@ -8402,6 +8641,17 @@ def create(self,
           logs are sent to `//init_scripts`.
         :param instance_pool_id: str (optional)
           The optional ID of the instance pool to which the cluster belongs.
+        :param is_single_node: bool (optional)
+          This field can only be used with `kind`.
+          
+          When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`,
+          and `num_workers`
+        :param kind: :class:`Kind` (optional)
+          The kind of compute described by this compute specification.
+          
+          Depending on `kind`, different validations and default values will be applied.
+          
+          The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
         :param node_type_id: str (optional)
           This field encodes, through a single value, the resources available to each of the Spark nodes in
           this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
@@ -8448,6 +8698,11 @@ def create(self,
           SSH public key contents that will be added to each Spark node in this cluster. The corresponding
           private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be
           specified.
+        :param use_ml_runtime: bool (optional)
+          This field can only be used with `kind`.
+          
+          `effective_spark_version` is determined by `spark_version` (DBR release), this field
+          `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
         :param workload_type: :class:`WorkloadType` (optional)
         
         :returns:
@@ -8475,6 +8730,8 @@ def create(self,
         if gcp_attributes is not None: body['gcp_attributes'] = gcp_attributes.as_dict()
         if init_scripts is not None: body['init_scripts'] = [v.as_dict() for v in init_scripts]
         if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id
+        if is_single_node is not None: body['is_single_node'] = is_single_node
+        if kind is not None: body['kind'] = kind.value
         if node_type_id is not None: body['node_type_id'] = node_type_id
         if num_workers is not None: body['num_workers'] = num_workers
         if policy_id is not None: body['policy_id'] = policy_id
@@ -8484,6 +8741,7 @@ def create(self,
         if spark_env_vars is not None: body['spark_env_vars'] = spark_env_vars
         if spark_version is not None: body['spark_version'] = spark_version
         if ssh_public_keys is not None: body['ssh_public_keys'] = [v for v in ssh_public_keys]
+        if use_ml_runtime is not None: body['use_ml_runtime'] = use_ml_runtime
         if workload_type is not None: body['workload_type'] = workload_type.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
@@ -8514,6 +8772,8 @@ def create_and_wait(
         gcp_attributes: Optional[GcpAttributes] = None,
         init_scripts: Optional[List[InitScriptInfo]] = None,
         instance_pool_id: Optional[str] = None,
+        is_single_node: Optional[bool] = None,
+        kind: Optional[Kind] = None,
         node_type_id: Optional[str] = None,
         num_workers: Optional[int] = None,
         policy_id: Optional[str] = None,
@@ -8522,6 +8782,7 @@ def create_and_wait(
         spark_conf: Optional[Dict[str, str]] = None,
         spark_env_vars: Optional[Dict[str, str]] = None,
         ssh_public_keys: Optional[List[str]] = None,
+        use_ml_runtime: Optional[bool] = None,
         workload_type: Optional[WorkloadType] = None,
         timeout=timedelta(minutes=20)) -> ClusterDetails:
         return self.create(apply_policy_default_values=apply_policy_default_values,
@@ -8542,6 +8803,8 @@ def create_and_wait(
                            gcp_attributes=gcp_attributes,
                            init_scripts=init_scripts,
                            instance_pool_id=instance_pool_id,
+                           is_single_node=is_single_node,
+                           kind=kind,
                            node_type_id=node_type_id,
                            num_workers=num_workers,
                            policy_id=policy_id,
@@ -8551,6 +8814,7 @@ def create_and_wait(
                            spark_env_vars=spark_env_vars,
                            spark_version=spark_version,
                            ssh_public_keys=ssh_public_keys,
+                           use_ml_runtime=use_ml_runtime,
                            workload_type=workload_type).result(timeout=timeout)
 
     def delete(self, cluster_id: str) -> Wait[ClusterDetails]:
@@ -8600,6 +8864,8 @@ def edit(self,
              gcp_attributes: Optional[GcpAttributes] = None,
              init_scripts: Optional[List[InitScriptInfo]] = None,
              instance_pool_id: Optional[str] = None,
+             is_single_node: Optional[bool] = None,
+             kind: Optional[Kind] = None,
              node_type_id: Optional[str] = None,
              num_workers: Optional[int] = None,
              policy_id: Optional[str] = None,
@@ -8608,6 +8874,7 @@ def edit(self,
              spark_conf: Optional[Dict[str, str]] = None,
              spark_env_vars: Optional[Dict[str, str]] = None,
              ssh_public_keys: Optional[List[str]] = None,
+             use_ml_runtime: Optional[bool] = None,
              workload_type: Optional[WorkloadType] = None) -> Wait[ClusterDetails]:
         """Update cluster configuration.
         
@@ -8663,13 +8930,19 @@ def edit(self,
         :param data_security_mode: :class:`DataSecurityMode` (optional)
           Data security mode decides what data governance model to use when accessing data from a cluster.
           
-          * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are
-          not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a
-          single user specified in `single_user_name`. Most programming languages, cluster features and data
-          governance features are available in this mode. * `USER_ISOLATION`: A secure cluster that can be
-          shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data
-          and credentials. Most data governance features are supported in this mode. But programming languages
-          and cluster features might be limited.
+          The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+          choose the most appropriate access mode depending on your compute configuration. *
+          `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias
+          for `SINGLE_USER`.
+          
+          The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple
+          users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`:
+          A secure cluster that can only be exclusively used by a single user specified in `single_user_name`.
+          Most programming languages, cluster features and data governance features are available in this
+          mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are
+          fully isolated so that they cannot see each other's data and credentials. Most data governance
+          features are supported in this mode. But programming languages and cluster features might be
+          limited.
           
           The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
           future Databricks Runtime versions:
@@ -8701,6 +8974,17 @@ def edit(self,
           logs are sent to `//init_scripts`.
         :param instance_pool_id: str (optional)
           The optional ID of the instance pool to which the cluster belongs.
+        :param is_single_node: bool (optional)
+          This field can only be used with `kind`.
+          
+          When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`,
+          and `num_workers`
+        :param kind: :class:`Kind` (optional)
+          The kind of compute described by this compute specification.
+          
+          Depending on `kind`, different validations and default values will be applied.
+          
+          The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
         :param node_type_id: str (optional)
           This field encodes, through a single value, the resources available to each of the Spark nodes in
           this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
@@ -8747,6 +9031,11 @@ def edit(self,
           SSH public key contents that will be added to each Spark node in this cluster. The corresponding
           private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be
           specified.
+        :param use_ml_runtime: bool (optional)
+          This field can only be used with `kind`.
+          
+          `effective_spark_version` is determined by `spark_version` (DBR release), this field
+          `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
         :param workload_type: :class:`WorkloadType` (optional)
         
         :returns:
@@ -8774,6 +9063,8 @@ def edit(self,
         if gcp_attributes is not None: body['gcp_attributes'] = gcp_attributes.as_dict()
         if init_scripts is not None: body['init_scripts'] = [v.as_dict() for v in init_scripts]
         if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id
+        if is_single_node is not None: body['is_single_node'] = is_single_node
+        if kind is not None: body['kind'] = kind.value
         if node_type_id is not None: body['node_type_id'] = node_type_id
         if num_workers is not None: body['num_workers'] = num_workers
         if policy_id is not None: body['policy_id'] = policy_id
@@ -8783,6 +9074,7 @@ def edit(self,
         if spark_env_vars is not None: body['spark_env_vars'] = spark_env_vars
         if spark_version is not None: body['spark_version'] = spark_version
         if ssh_public_keys is not None: body['ssh_public_keys'] = [v for v in ssh_public_keys]
+        if use_ml_runtime is not None: body['use_ml_runtime'] = use_ml_runtime
         if workload_type is not None: body['workload_type'] = workload_type.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
@@ -8813,6 +9105,8 @@ def edit_and_wait(
         gcp_attributes: Optional[GcpAttributes] = None,
         init_scripts: Optional[List[InitScriptInfo]] = None,
         instance_pool_id: Optional[str] = None,
+        is_single_node: Optional[bool] = None,
+        kind: Optional[Kind] = None,
         node_type_id: Optional[str] = None,
         num_workers: Optional[int] = None,
         policy_id: Optional[str] = None,
@@ -8821,6 +9115,7 @@ def edit_and_wait(
         spark_conf: Optional[Dict[str, str]] = None,
         spark_env_vars: Optional[Dict[str, str]] = None,
         ssh_public_keys: Optional[List[str]] = None,
+        use_ml_runtime: Optional[bool] = None,
         workload_type: Optional[WorkloadType] = None,
         timeout=timedelta(minutes=20)) -> ClusterDetails:
         return self.edit(apply_policy_default_values=apply_policy_default_values,
@@ -8841,6 +9136,8 @@ def edit_and_wait(
                          gcp_attributes=gcp_attributes,
                          init_scripts=init_scripts,
                          instance_pool_id=instance_pool_id,
+                         is_single_node=is_single_node,
+                         kind=kind,
                          node_type_id=node_type_id,
                          num_workers=num_workers,
                          policy_id=policy_id,
@@ -8850,6 +9147,7 @@ def edit_and_wait(
                          spark_env_vars=spark_env_vars,
                          spark_version=spark_version,
                          ssh_public_keys=ssh_public_keys,
+                         use_ml_runtime=use_ml_runtime,
                          workload_type=workload_type).result(timeout=timeout)
 
     def events(self,
diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py
index da908cb2d..34bd58995 100755
--- a/databricks/sdk/service/dashboards.py
+++ b/databricks/sdk/service/dashboards.py
@@ -711,12 +711,18 @@ class MigrateDashboardRequest:
     parent_path: Optional[str] = None
     """The workspace path of the folder to contain the migrated Lakeview dashboard."""
 
+    update_parameter_syntax: Optional[bool] = None
+    """Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named
+    syntax (:param) when converting datasets in the dashboard."""
+
     def as_dict(self) -> dict:
         """Serializes the MigrateDashboardRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.display_name is not None: body['display_name'] = self.display_name
         if self.parent_path is not None: body['parent_path'] = self.parent_path
         if self.source_dashboard_id is not None: body['source_dashboard_id'] = self.source_dashboard_id
+        if self.update_parameter_syntax is not None:
+            body['update_parameter_syntax'] = self.update_parameter_syntax
         return body
 
     def as_shallow_dict(self) -> dict:
@@ -725,6 +731,8 @@ def as_shallow_dict(self) -> dict:
         if self.display_name is not None: body['display_name'] = self.display_name
         if self.parent_path is not None: body['parent_path'] = self.parent_path
         if self.source_dashboard_id is not None: body['source_dashboard_id'] = self.source_dashboard_id
+        if self.update_parameter_syntax is not None:
+            body['update_parameter_syntax'] = self.update_parameter_syntax
         return body
 
     @classmethod
@@ -732,7 +740,8 @@ def from_dict(cls, d: Dict[str, any]) -> MigrateDashboardRequest:
         """Deserializes the MigrateDashboardRequest from a dictionary."""
         return cls(display_name=d.get('display_name', None),
                    parent_path=d.get('parent_path', None),
-                   source_dashboard_id=d.get('source_dashboard_id', None))
+                   source_dashboard_id=d.get('source_dashboard_id', None),
+                   update_parameter_syntax=d.get('update_parameter_syntax', None))
 
 
 @dataclass
@@ -1759,7 +1768,8 @@ def migrate(self,
                 source_dashboard_id: str,
                 *,
                 display_name: Optional[str] = None,
-                parent_path: Optional[str] = None) -> Dashboard:
+                parent_path: Optional[str] = None,
+                update_parameter_syntax: Optional[bool] = None) -> Dashboard:
         """Migrate dashboard.
         
         Migrates a classic SQL dashboard to Lakeview.
@@ -1770,6 +1780,9 @@ def migrate(self,
           Display name for the new Lakeview dashboard.
         :param parent_path: str (optional)
           The workspace path of the folder to contain the migrated Lakeview dashboard.
+        :param update_parameter_syntax: bool (optional)
+          Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named syntax
+          (:param) when converting datasets in the dashboard.
         
         :returns: :class:`Dashboard`
         """
@@ -1777,6 +1790,7 @@ def migrate(self,
         if display_name is not None: body['display_name'] = display_name
         if parent_path is not None: body['parent_path'] = parent_path
         if source_dashboard_id is not None: body['source_dashboard_id'] = source_dashboard_id
+        if update_parameter_syntax is not None: body['update_parameter_syntax'] = update_parameter_syntax
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST', '/api/2.0/lakeview/dashboards/migrate', body=body, headers=headers)
diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index a991c7c50..105c7cd22 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -209,7 +209,8 @@ class BaseRun:
     previously failed run. This occurs when you request to re-run the job in case of failures. *
     `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`:
     Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is
-    triggered by a table update."""
+    triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to manually
+    restart a continuous job run."""
 
     trigger_info: Optional[TriggerInfo] = None
     """Additional details about what triggered the run"""
@@ -449,7 +450,7 @@ class CleanRoomTaskRunResultState(Enum):
 
 @dataclass
 class CleanRoomTaskRunState:
-    """Stores the run state of the clean room notebook V1 task."""
+    """Stores the run state of the clean rooms notebook task."""
 
     life_cycle_state: Optional[CleanRoomTaskRunLifeCycleState] = None
     """A value indicating the run's current lifecycle state. This field is always available in the
@@ -479,6 +480,48 @@ def from_dict(cls, d: Dict[str, any]) -> CleanRoomTaskRunState:
                    result_state=_enum(d, 'result_state', CleanRoomTaskRunResultState))
 
 
+@dataclass
+class CleanRoomsNotebookTask:
+    clean_room_name: str
+    """The clean room that the notebook belongs to."""
+
+    notebook_name: str
+    """Name of the notebook being run."""
+
+    etag: Optional[str] = None
+    """Checksum to validate the freshness of the notebook resource (i.e. the notebook being run is the
+    latest version). It can be fetched by calling the :method:cleanroomassets/get API."""
+
+    notebook_base_parameters: Optional[Dict[str, str]] = None
+    """Base parameters to be used for the clean room notebook job."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomsNotebookTask into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.clean_room_name is not None: body['clean_room_name'] = self.clean_room_name
+        if self.etag is not None: body['etag'] = self.etag
+        if self.notebook_base_parameters: body['notebook_base_parameters'] = self.notebook_base_parameters
+        if self.notebook_name is not None: body['notebook_name'] = self.notebook_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomsNotebookTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.clean_room_name is not None: body['clean_room_name'] = self.clean_room_name
+        if self.etag is not None: body['etag'] = self.etag
+        if self.notebook_base_parameters: body['notebook_base_parameters'] = self.notebook_base_parameters
+        if self.notebook_name is not None: body['notebook_name'] = self.notebook_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomsNotebookTask:
+        """Deserializes the CleanRoomsNotebookTask from a dictionary."""
+        return cls(clean_room_name=d.get('clean_room_name', None),
+                   etag=d.get('etag', None),
+                   notebook_base_parameters=d.get('notebook_base_parameters', None),
+                   notebook_name=d.get('notebook_name', None))
+
+
 @dataclass
 class ClusterInstance:
     cluster_id: Optional[str] = None
@@ -2526,11 +2569,11 @@ class JobsHealthMetric(Enum):
     
     * `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`:
     An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric
-    is in Private Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag
-    across all streams. This metric is in Private Preview. * `STREAMING_BACKLOG_SECONDS`: An
-    estimate of the maximum consumer delay across all streams. This metric is in Private Preview. *
+    is in Public Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag
+    across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_SECONDS`: An estimate
+    of the maximum consumer delay across all streams. This metric is in Public Preview. *
     `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all
-    streams. This metric is in Private Preview."""
+    streams. This metric is in Public Preview."""
 
     RUN_DURATION_SECONDS = 'RUN_DURATION_SECONDS'
     STREAMING_BACKLOG_BYTES = 'STREAMING_BACKLOG_BYTES'
@@ -2552,11 +2595,11 @@ class JobsHealthRule:
     
     * `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`:
     An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric
-    is in Private Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag
-    across all streams. This metric is in Private Preview. * `STREAMING_BACKLOG_SECONDS`: An
-    estimate of the maximum consumer delay across all streams. This metric is in Private Preview. *
+    is in Public Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag
+    across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_SECONDS`: An estimate
+    of the maximum consumer delay across all streams. This metric is in Public Preview. *
     `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all
-    streams. This metric is in Private Preview."""
+    streams. This metric is in Public Preview."""
 
     op: JobsHealthOperator
     """Specifies the operator used to compare the health metric value with the specified threshold."""
@@ -3711,7 +3754,8 @@ class Run:
     previously failed run. This occurs when you request to re-run the job in case of failures. *
     `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`:
     Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is
-    triggered by a table update."""
+    triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to manually
+    restart a continuous job run."""
 
     trigger_info: Optional[TriggerInfo] = None
     """Additional details about what triggered the run"""
@@ -4653,6 +4697,11 @@ class RunTask:
     original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they
     succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job."""
 
+    clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None
+    """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present.
+    
+    [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html"""
+
     cleanup_duration: Optional[int] = None
     """The time in milliseconds it took to terminate the cluster and clean up any associated artifacts.
     The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the
@@ -4820,6 +4869,8 @@ def as_dict(self) -> dict:
         """Serializes the RunTask into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.attempt_number is not None: body['attempt_number'] = self.attempt_number
+        if self.clean_rooms_notebook_task:
+            body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task.as_dict()
         if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration
         if self.cluster_instance: body['cluster_instance'] = self.cluster_instance.as_dict()
         if self.condition_task: body['condition_task'] = self.condition_task.as_dict()
@@ -4864,6 +4915,7 @@ def as_shallow_dict(self) -> dict:
         """Serializes the RunTask into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.attempt_number is not None: body['attempt_number'] = self.attempt_number
+        if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task
         if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration
         if self.cluster_instance: body['cluster_instance'] = self.cluster_instance
         if self.condition_task: body['condition_task'] = self.condition_task
@@ -4908,6 +4960,8 @@ def as_shallow_dict(self) -> dict:
     def from_dict(cls, d: Dict[str, any]) -> RunTask:
         """Deserializes the RunTask from a dictionary."""
         return cls(attempt_number=d.get('attempt_number', None),
+                   clean_rooms_notebook_task=_from_dict(d, 'clean_rooms_notebook_task',
+                                                        CleanRoomsNotebookTask),
                    cleanup_duration=d.get('cleanup_duration', None),
                    cluster_instance=_from_dict(d, 'cluster_instance', ClusterInstance),
                    condition_task=_from_dict(d, 'condition_task', RunConditionTask),
@@ -5753,6 +5807,11 @@ class SubmitTask:
     field is required and must be unique within its parent job. On Update or Reset, this field is
     used to reference the tasks to be updated or reset."""
 
+    clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None
+    """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present.
+    
+    [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html"""
+
     condition_task: Optional[ConditionTask] = None
     """The task evaluates a condition that can be used to control the execution of other tasks when the
     `condition_task` field is present. The condition task does not require a cluster to execute and
@@ -5857,6 +5916,8 @@ class SubmitTask:
     def as_dict(self) -> dict:
         """Serializes the SubmitTask into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.clean_rooms_notebook_task:
+            body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task.as_dict()
         if self.condition_task: body['condition_task'] = self.condition_task.as_dict()
         if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict()
         if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on]
@@ -5886,6 +5947,7 @@ def as_dict(self) -> dict:
     def as_shallow_dict(self) -> dict:
         """Serializes the SubmitTask into a shallow dictionary of its immediate attributes."""
         body = {}
+        if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task
         if self.condition_task: body['condition_task'] = self.condition_task
         if self.dbt_task: body['dbt_task'] = self.dbt_task
         if self.depends_on: body['depends_on'] = self.depends_on
@@ -5915,7 +5977,9 @@ def as_shallow_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SubmitTask:
         """Deserializes the SubmitTask from a dictionary."""
-        return cls(condition_task=_from_dict(d, 'condition_task', ConditionTask),
+        return cls(clean_rooms_notebook_task=_from_dict(d, 'clean_rooms_notebook_task',
+                                                        CleanRoomsNotebookTask),
+                   condition_task=_from_dict(d, 'condition_task', ConditionTask),
                    dbt_task=_from_dict(d, 'dbt_task', DbtTask),
                    depends_on=_repeated_dict(d, 'depends_on', TaskDependency),
                    description=d.get('description', None),
@@ -5997,6 +6061,11 @@ class Task:
     field is required and must be unique within its parent job. On Update or Reset, this field is
     used to reference the tasks to be updated or reset."""
 
+    clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None
+    """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present.
+    
+    [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html"""
+
     condition_task: Optional[ConditionTask] = None
     """The task evaluates a condition that can be used to control the execution of other tasks when the
     `condition_task` field is present. The condition task does not require a cluster to execute and
@@ -6126,6 +6195,8 @@ class Task:
     def as_dict(self) -> dict:
         """Serializes the Task into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.clean_rooms_notebook_task:
+            body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task.as_dict()
         if self.condition_task: body['condition_task'] = self.condition_task.as_dict()
         if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict()
         if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on]
@@ -6162,6 +6233,7 @@ def as_dict(self) -> dict:
     def as_shallow_dict(self) -> dict:
         """Serializes the Task into a shallow dictionary of its immediate attributes."""
         body = {}
+        if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task
         if self.condition_task: body['condition_task'] = self.condition_task
         if self.dbt_task: body['dbt_task'] = self.dbt_task
         if self.depends_on: body['depends_on'] = self.depends_on
@@ -6198,7 +6270,9 @@ def as_shallow_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Task:
         """Deserializes the Task from a dictionary."""
-        return cls(condition_task=_from_dict(d, 'condition_task', ConditionTask),
+        return cls(clean_rooms_notebook_task=_from_dict(d, 'clean_rooms_notebook_task',
+                                                        CleanRoomsNotebookTask),
+                   condition_task=_from_dict(d, 'condition_task', ConditionTask),
                    dbt_task=_from_dict(d, 'dbt_task', DbtTask),
                    depends_on=_repeated_dict(d, 'depends_on', TaskDependency),
                    description=d.get('description', None),
@@ -6610,7 +6684,8 @@ class TriggerType(Enum):
     previously failed run. This occurs when you request to re-run the job in case of failures. *
     `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`:
     Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is
-    triggered by a table update."""
+    triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to manually
+    restart a continuous job run."""
 
     FILE_ARRIVAL = 'FILE_ARRIVAL'
     ONE_TIME = 'ONE_TIME'
diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py
index 11a83b3ab..f7df5a25e 100755
--- a/databricks/sdk/service/oauth2.py
+++ b/databricks/sdk/service/oauth2.py
@@ -288,6 +288,60 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         return cls()
 
 
+@dataclass
+class FederationPolicy:
+    create_time: Optional[str] = None
+    """Creation time of the federation policy."""
+
+    description: Optional[str] = None
+    """Description of the federation policy."""
+
+    name: Optional[str] = None
+    """Name of the federation policy. The name must contain only lowercase alphanumeric characters,
+    numbers, and hyphens. It must be unique within the account."""
+
+    oidc_policy: Optional[OidcFederationPolicy] = None
+    """Specifies the policy to use for validating OIDC claims in your federated tokens."""
+
+    uid: Optional[str] = None
+    """Unique, immutable id of the federation policy."""
+
+    update_time: Optional[str] = None
+    """Last update time of the federation policy."""
+
+    def as_dict(self) -> dict:
+        """Serializes the FederationPolicy into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.oidc_policy: body['oidc_policy'] = self.oidc_policy.as_dict()
+        if self.uid is not None: body['uid'] = self.uid
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FederationPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.oidc_policy: body['oidc_policy'] = self.oidc_policy
+        if self.uid is not None: body['uid'] = self.uid
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> FederationPolicy:
+        """Deserializes the FederationPolicy from a dictionary."""
+        return cls(create_time=d.get('create_time', None),
+                   description=d.get('description', None),
+                   name=d.get('name', None),
+                   oidc_policy=_from_dict(d, 'oidc_policy', OidcFederationPolicy),
+                   uid=d.get('uid', None),
+                   update_time=d.get('update_time', None))
+
+
 @dataclass
 class GetCustomAppIntegrationOutput:
     client_id: Optional[str] = None
@@ -498,6 +552,33 @@ def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppsOutput:
                    next_page_token=d.get('next_page_token', None))
 
 
+@dataclass
+class ListFederationPoliciesResponse:
+    next_page_token: Optional[str] = None
+
+    policies: Optional[List[FederationPolicy]] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the ListFederationPoliciesResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.policies: body['policies'] = [v.as_dict() for v in self.policies]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListFederationPoliciesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.policies: body['policies'] = self.policies
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListFederationPoliciesResponse:
+        """Deserializes the ListFederationPoliciesResponse from a dictionary."""
+        return cls(next_page_token=d.get('next_page_token', None),
+                   policies=_repeated_dict(d, 'policies', FederationPolicy))
+
+
 @dataclass
 class ListServicePrincipalSecretsResponse:
     next_page_token: Optional[str] = None
@@ -527,6 +608,64 @@ def from_dict(cls, d: Dict[str, any]) -> ListServicePrincipalSecretsResponse:
                    secrets=_repeated_dict(d, 'secrets', SecretInfo))
 
 
+@dataclass
+class OidcFederationPolicy:
+    """Specifies the policy to use for validating OIDC claims in your federated tokens."""
+
+    audiences: Optional[List[str]] = None
+    """The allowed token audiences, as specified in the 'aud' claim of federated tokens. The audience
+    identifier is intended to represent the recipient of the token. Can be any non-empty string
+    value. As long as the audience in the token matches at least one audience in the policy, the
+    token is considered a match. If audiences is unspecified, defaults to your Databricks account
+    id."""
+
+    issuer: Optional[str] = None
+    """The required token issuer, as specified in the 'iss' claim of federated tokens."""
+
+    jwks_json: Optional[str] = None
+    """The public keys used to validate the signature of federated tokens, in JWKS format. If
+    unspecified (recommended), Databricks automatically fetches the public keys from your issuer’s
+    well known endpoint. Databricks strongly recommends relying on your issuer’s well known
+    endpoint for discovering public keys."""
+
+    subject: Optional[str] = None
+    """The required token subject, as specified in the subject claim of federated tokens. Must be
+    specified for service principal federation policies. Must not be specified for account
+    federation policies."""
+
+    subject_claim: Optional[str] = None
+    """The claim that contains the subject of the token. If unspecified, the default value is 'sub'."""
+
+    def as_dict(self) -> dict:
+        """Serializes the OidcFederationPolicy into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.audiences: body['audiences'] = [v for v in self.audiences]
+        if self.issuer is not None: body['issuer'] = self.issuer
+        if self.jwks_json is not None: body['jwks_json'] = self.jwks_json
+        if self.subject is not None: body['subject'] = self.subject
+        if self.subject_claim is not None: body['subject_claim'] = self.subject_claim
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the OidcFederationPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.audiences: body['audiences'] = self.audiences
+        if self.issuer is not None: body['issuer'] = self.issuer
+        if self.jwks_json is not None: body['jwks_json'] = self.jwks_json
+        if self.subject is not None: body['subject'] = self.subject
+        if self.subject_claim is not None: body['subject_claim'] = self.subject_claim
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> OidcFederationPolicy:
+        """Deserializes the OidcFederationPolicy from a dictionary."""
+        return cls(audiences=d.get('audiences', None),
+                   issuer=d.get('issuer', None),
+                   jwks_json=d.get('jwks_json', None),
+                   subject=d.get('subject', None),
+                   subject_claim=d.get('subject_claim', None))
+
+
 @dataclass
 class PublishedAppOutput:
     app_id: Optional[str] = None
@@ -769,6 +908,158 @@ def from_dict(cls, d: Dict[str, any]) -> UpdatePublishedAppIntegrationOutput:
         return cls()
 
 
+class AccountFederationPolicyAPI:
+    """These APIs manage account federation policies.
+    
+    Account federation policies allow users and service principals in your Databricks account to securely
+    access Databricks APIs using tokens from your trusted identity providers (IdPs).
+    
+    With token federation, your users and service principals can exchange tokens from your IdP for Databricks
+    OAuth tokens, which can be used to access Databricks APIs. Token federation eliminates the need to manage
+    Databricks secrets, and allows you to centralize management of token issuance policies in your IdP.
+    Databricks token federation is typically used in combination with [SCIM], so users in your IdP are
+    synchronized into your Databricks account.
+    
+    Token federation is configured in your Databricks account using an account federation policy. An account
+    federation policy specifies: * which IdP, or issuer, your Databricks account should accept tokens from *
+    how to determine which Databricks user, or subject, a token is issued for
+    
+    To configure a federation policy, you provide the following: * The required token __issuer__, as specified
+    in the “iss” claim of your tokens. The issuer is an https URL that identifies your IdP. * The allowed
+    token __audiences__, as specified in the “aud” claim of your tokens. This identifier is intended to
+    represent the recipient of the token. As long as the audience in the token matches at least one audience
+    in the policy, the token is considered a match. If unspecified, the default value is your Databricks
+    account id. * The __subject claim__, which indicates which token claim contains the Databricks username of
+    the user the token was issued for. If unspecified, the default value is “sub”. * Optionally, the
+    public keys used to validate the signature of your tokens, in JWKS format. If unspecified (recommended),
+    Databricks automatically fetches the public keys from your issuer’s well known endpoint. Databricks
+    strongly recommends relying on your issuer’s well known endpoint for discovering public keys.
+    
+    An example federation policy is: ``` issuer: "https://idp.mycompany.com/oidc" audiences: ["databricks"]
+    subject_claim: "sub" ```
+    
+    An example JWT token body that matches this policy and could be used to authenticate to Databricks as user
+    `username@mycompany.com` is: ``` { "iss": "https://idp.mycompany.com/oidc", "aud": "databricks", "sub":
+    "username@mycompany.com" } ```
+    
+    You may also need to configure your IdP to generate tokens for your users to exchange with Databricks, if
+    your users do not already have the ability to generate tokens that are compatible with your federation
+    policy.
+    
+    You do not need to configure an OAuth application in Databricks to use token federation.
+    
+    [SCIM]: https://docs.databricks.com/admin/users-groups/scim/index.html"""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def create(self,
+               *,
+               policy: Optional[FederationPolicy] = None,
+               policy_id: Optional[str] = None) -> FederationPolicy:
+        """Create account federation policy.
+        
+        :param policy: :class:`FederationPolicy` (optional)
+        :param policy_id: str (optional)
+          The identifier for the federation policy. If unspecified, the id will be assigned by Databricks.
+        
+        :returns: :class:`FederationPolicy`
+        """
+        body = policy.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST',
+                           f'/api/2.0/accounts/{self._api.account_id}/federationPolicies',
+                           query=query,
+                           body=body,
+                           headers=headers)
+        return FederationPolicy.from_dict(res)
+
+    def delete(self, policy_id: str):
+        """Delete account federation policy.
+        
+        :param policy_id: str
+        
+        
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        self._api.do('DELETE',
+                     f'/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}',
+                     headers=headers)
+
+    def get(self, policy_id: str) -> FederationPolicy:
+        """Get account federation policy.
+        
+        :param policy_id: str
+        
+        :returns: :class:`FederationPolicy`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET',
+                           f'/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}',
+                           headers=headers)
+        return FederationPolicy.from_dict(res)
+
+    def list(self,
+             *,
+             page_size: Optional[int] = None,
+             page_token: Optional[str] = None) -> Iterator[FederationPolicy]:
+        """List account federation policies.
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`FederationPolicy`
+        """
+
+        query = {}
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET',
+                                f'/api/2.0/accounts/{self._api.account_id}/federationPolicies',
+                                query=query,
+                                headers=headers)
+            if 'policies' in json:
+                for v in json['policies']:
+                    yield FederationPolicy.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+    def update(self,
+               policy_id: str,
+               update_mask: str,
+               *,
+               policy: Optional[FederationPolicy] = None) -> FederationPolicy:
+        """Update account federation policy.
+        
+        :param policy_id: str
+        :param update_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        :param policy: :class:`FederationPolicy` (optional)
+        
+        :returns: :class:`FederationPolicy`
+        """
+        body = policy.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH',
+                           f'/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}',
+                           query=query,
+                           body=body,
+                           headers=headers)
+        return FederationPolicy.from_dict(res)
+
+
 class CustomAppIntegrationAPI:
     """These APIs enable administrators to manage custom OAuth app integrations, which is required for
     adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud."""
@@ -1086,6 +1377,176 @@ def update(self, integration_id: str, *, token_access_policy: Optional[TokenAcce
             headers=headers)
 
 
+class ServicePrincipalFederationPolicyAPI:
+    """These APIs manage service principal federation policies.
+    
+    Service principal federation, also known as Workload Identity Federation, allows your automated workloads
+    running outside of Databricks to securely access Databricks APIs without the need for Databricks secrets.
+    With Workload Identity Federation, your application (or workload) authenticates to Databricks as a
+    Databricks service principal, using tokens provided by the workload runtime.
+    
+    Databricks strongly recommends using Workload Identity Federation to authenticate to Databricks from
+    automated workloads, over alternatives such as OAuth client secrets or Personal Access Tokens, whenever
+    possible. Workload Identity Federation is supported by many popular services, including Github Actions,
+    Azure DevOps, GitLab, Terraform Cloud, and Kubernetes clusters, among others.
+    
+    Workload identity federation is configured in your Databricks account using a service principal federation
+    policy. A service principal federation policy specifies: * which IdP, or issuer, the service principal is
+    allowed to authenticate from * which workload identity, or subject, is allowed to authenticate as the
+    Databricks service principal
+    
+    To configure a federation policy, you provide the following: * The required token __issuer__, as specified
+    in the “iss” claim of workload identity tokens. The issuer is an https URL that identifies the
+    workload identity provider. * The required token __subject__, as specified in the “sub” claim of
+    workload identity tokens. The subject uniquely identifies the workload in the workload runtime
+    environment. * The allowed token __audiences__, as specified in the “aud” claim of workload identity
+    tokens. The audience is intended to represent the recipient of the token. As long as the audience in the
+    token matches at least one audience in the policy, the token is considered a match. If unspecified, the
+    default value is your Databricks account id. * Optionally, the public keys used to validate the signature
+    of the workload identity tokens, in JWKS format. If unspecified (recommended), Databricks automatically
+    fetches the public keys from the issuer’s well known endpoint. Databricks strongly recommends relying on
+    the issuer’s well known endpoint for discovering public keys.
+    
+    An example service principal federation policy, for a Github Actions workload, is: ``` issuer:
+    "https://token.actions.githubusercontent.com" audiences: ["https://github.com/my-github-org"] subject:
+    "repo:my-github-org/my-repo:environment:prod" ```
+    
+    An example JWT token body that matches this policy and could be used to authenticate to Databricks is: ```
+    { "iss": "https://token.actions.githubusercontent.com", "aud": "https://github.com/my-github-org", "sub":
+    "repo:my-github-org/my-repo:environment:prod" } ```
+    
+    You may also need to configure the workload runtime to generate tokens for your workloads.
+    
+    You do not need to configure an OAuth application in Databricks to use token federation."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def create(self,
+               service_principal_id: int,
+               *,
+               policy: Optional[FederationPolicy] = None,
+               policy_id: Optional[str] = None) -> FederationPolicy:
+        """Create service principal federation policy.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy: :class:`FederationPolicy` (optional)
+        :param policy_id: str (optional)
+          The identifier for the federation policy. If unspecified, the id will be assigned by Databricks.
+        
+        :returns: :class:`FederationPolicy`
+        """
+        body = policy.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do(
+            'POST',
+            f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies',
+            query=query,
+            body=body,
+            headers=headers)
+        return FederationPolicy.from_dict(res)
+
+    def delete(self, service_principal_id: int, policy_id: str):
+        """Delete service principal federation policy.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy_id: str
+        
+        
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        self._api.do(
+            'DELETE',
+            f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}',
+            headers=headers)
+
+    def get(self, service_principal_id: int, policy_id: str) -> FederationPolicy:
+        """Get service principal federation policy.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy_id: str
+        
+        :returns: :class:`FederationPolicy`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'GET',
+            f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}',
+            headers=headers)
+        return FederationPolicy.from_dict(res)
+
+    def list(self,
+             service_principal_id: int,
+             *,
+             page_size: Optional[int] = None,
+             page_token: Optional[str] = None) -> Iterator[FederationPolicy]:
+        """List service principal federation policies.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`FederationPolicy`
+        """
+
+        query = {}
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do(
+                'GET',
+                f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies',
+                query=query,
+                headers=headers)
+            if 'policies' in json:
+                for v in json['policies']:
+                    yield FederationPolicy.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+    def update(self,
+               service_principal_id: int,
+               policy_id: str,
+               update_mask: str,
+               *,
+               policy: Optional[FederationPolicy] = None) -> FederationPolicy:
+        """Update service principal federation policy.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy_id: str
+        :param update_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        :param policy: :class:`FederationPolicy` (optional)
+        
+        :returns: :class:`FederationPolicy`
+        """
+        body = policy.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do(
+            'PATCH',
+            f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}',
+            query=query,
+            body=body,
+            headers=headers)
+        return FederationPolicy.from_dict(res)
+
+
 class ServicePrincipalSecretsAPI:
     """These APIs enable administrators to manage service principal secrets.
     
diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py
index 0ded4a83b..8f8b015c5 100755
--- a/databricks/sdk/service/pipelines.py
+++ b/databricks/sdk/service/pipelines.py
@@ -11,7 +11,7 @@
 from typing import Callable, Dict, Iterator, List, Optional
 
 from ..errors import OperationFailed
-from ._internal import Wait, _enum, _from_dict, _repeated_dict
+from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum
 
 _LOG = logging.getLogger('databricks.sdk')
 
@@ -2105,7 +2105,7 @@ class RestartWindow:
     """An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day.
     Continuous pipeline restart is triggered only within a five-hour window starting at this hour."""
 
-    days_of_week: Optional[RestartWindowDaysOfWeek] = None
+    days_of_week: Optional[List[RestartWindowDaysOfWeek]] = None
     """Days of week in which the restart is allowed to happen (within a five-hour window starting at
     start_hour). If not specified all days of the week will be used."""
 
@@ -2117,7 +2117,7 @@ class RestartWindow:
     def as_dict(self) -> dict:
         """Serializes the RestartWindow into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.days_of_week is not None: body['days_of_week'] = self.days_of_week.value
+        if self.days_of_week: body['days_of_week'] = [v.value for v in self.days_of_week]
         if self.start_hour is not None: body['start_hour'] = self.start_hour
         if self.time_zone_id is not None: body['time_zone_id'] = self.time_zone_id
         return body
@@ -2125,7 +2125,7 @@ def as_dict(self) -> dict:
     def as_shallow_dict(self) -> dict:
         """Serializes the RestartWindow into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.days_of_week is not None: body['days_of_week'] = self.days_of_week
+        if self.days_of_week: body['days_of_week'] = self.days_of_week
         if self.start_hour is not None: body['start_hour'] = self.start_hour
         if self.time_zone_id is not None: body['time_zone_id'] = self.time_zone_id
         return body
@@ -2133,7 +2133,7 @@ def as_shallow_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestartWindow:
         """Deserializes the RestartWindow from a dictionary."""
-        return cls(days_of_week=_enum(d, 'days_of_week', RestartWindowDaysOfWeek),
+        return cls(days_of_week=_repeated_enum(d, 'days_of_week', RestartWindowDaysOfWeek),
                    start_hour=d.get('start_hour', None),
                    time_zone_id=d.get('time_zone_id', None))
 
diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py
index 31a9ee722..eb9b6f12e 100644
--- a/databricks/sdk/version.py
+++ b/databricks/sdk/version.py
@@ -1 +1 @@
-__version__ = '0.39.0'
+__version__ = '0.40.0'
diff --git a/docs/account/oauth2/federation_policy.rst b/docs/account/oauth2/federation_policy.rst
new file mode 100644
index 000000000..4bee8675d
--- /dev/null
+++ b/docs/account/oauth2/federation_policy.rst
@@ -0,0 +1,99 @@
+``a.federation_policy``: Account Federation Policies
+====================================================
+.. currentmodule:: databricks.sdk.service.oauth2
+
+.. py:class:: AccountFederationPolicyAPI
+
+    These APIs manage account federation policies.
+    
+    Account federation policies allow users and service principals in your Databricks account to securely
+    access Databricks APIs using tokens from your trusted identity providers (IdPs).
+    
+    With token federation, your users and service principals can exchange tokens from your IdP for Databricks
+    OAuth tokens, which can be used to access Databricks APIs. Token federation eliminates the need to manage
+    Databricks secrets, and allows you to centralize management of token issuance policies in your IdP.
+    Databricks token federation is typically used in combination with [SCIM], so users in your IdP are
+    synchronized into your Databricks account.
+    
+    Token federation is configured in your Databricks account using an account federation policy. An account
+    federation policy specifies: * which IdP, or issuer, your Databricks account should accept tokens from *
+    how to determine which Databricks user, or subject, a token is issued for
+    
+    To configure a federation policy, you provide the following: * The required token __issuer__, as specified
+    in the “iss” claim of your tokens. The issuer is an https URL that identifies your IdP. * The allowed
+    token __audiences__, as specified in the “aud” claim of your tokens. This identifier is intended to
+    represent the recipient of the token. As long as the audience in the token matches at least one audience
+    in the policy, the token is considered a match. If unspecified, the default value is your Databricks
+    account id. * The __subject claim__, which indicates which token claim contains the Databricks username of
+    the user the token was issued for. If unspecified, the default value is “sub”. * Optionally, the
+    public keys used to validate the signature of your tokens, in JWKS format. If unspecified (recommended),
+    Databricks automatically fetches the public keys from your issuer’s well known endpoint. Databricks
+    strongly recommends relying on your issuer’s well known endpoint for discovering public keys.
+    
+    An example federation policy is: ``` issuer: "https://idp.mycompany.com/oidc" audiences: ["databricks"]
+    subject_claim: "sub" ```
+    
+    An example JWT token body that matches this policy and could be used to authenticate to Databricks as user
+    `username@mycompany.com` is: ``` { "iss": "https://idp.mycompany.com/oidc", "aud": "databricks", "sub":
+    "username@mycompany.com" } ```
+    
+    You may also need to configure your IdP to generate tokens for your users to exchange with Databricks, if
+    your users do not already have the ability to generate tokens that are compatible with your federation
+    policy.
+    
+    You do not need to configure an OAuth application in Databricks to use token federation.
+    
+    [SCIM]: https://docs.databricks.com/admin/users-groups/scim/index.html
+
+    .. py:method:: create( [, policy: Optional[FederationPolicy], policy_id: Optional[str]]) -> FederationPolicy
+
+        Create account federation policy.
+        
+        :param policy: :class:`FederationPolicy` (optional)
+        :param policy_id: str (optional)
+          The identifier for the federation policy. If unspecified, the id will be assigned by Databricks.
+        
+        :returns: :class:`FederationPolicy`
+        
+
+    .. py:method:: delete(policy_id: str)
+
+        Delete account federation policy.
+        
+        :param policy_id: str
+        
+        
+        
+
+    .. py:method:: get(policy_id: str) -> FederationPolicy
+
+        Get account federation policy.
+        
+        :param policy_id: str
+        
+        :returns: :class:`FederationPolicy`
+        
+
+    .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FederationPolicy]
+
+        List account federation policies.
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`FederationPolicy`
+        
+
+    .. py:method:: update(policy_id: str, update_mask: str [, policy: Optional[FederationPolicy]]) -> FederationPolicy
+
+        Update account federation policy.
+        
+        :param policy_id: str
+        :param update_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        :param policy: :class:`FederationPolicy` (optional)
+        
+        :returns: :class:`FederationPolicy`
+        
\ No newline at end of file
diff --git a/docs/account/oauth2/index.rst b/docs/account/oauth2/index.rst
index a4663ef6b..745a3e721 100644
--- a/docs/account/oauth2/index.rst
+++ b/docs/account/oauth2/index.rst
@@ -8,6 +8,8 @@ Configure OAuth 2.0 application registrations for Databricks
    :maxdepth: 1
 
    custom_app_integration
+   federation_policy
    o_auth_published_apps
    published_app_integration
+   service_principal_federation_policy
    service_principal_secrets
\ No newline at end of file
diff --git a/docs/account/oauth2/service_principal_federation_policy.rst b/docs/account/oauth2/service_principal_federation_policy.rst
new file mode 100644
index 000000000..e4293c5f2
--- /dev/null
+++ b/docs/account/oauth2/service_principal_federation_policy.rst
@@ -0,0 +1,109 @@
+``a.service_principal_federation_policy``: Service Principal Federation Policies
+================================================================================
+.. currentmodule:: databricks.sdk.service.oauth2
+
+.. py:class:: ServicePrincipalFederationPolicyAPI
+
+    These APIs manage service principal federation policies.
+    
+    Service principal federation, also known as Workload Identity Federation, allows your automated workloads
+    running outside of Databricks to securely access Databricks APIs without the need for Databricks secrets.
+    With Workload Identity Federation, your application (or workload) authenticates to Databricks as a
+    Databricks service principal, using tokens provided by the workload runtime.
+    
+    Databricks strongly recommends using Workload Identity Federation to authenticate to Databricks from
+    automated workloads, over alternatives such as OAuth client secrets or Personal Access Tokens, whenever
+    possible. Workload Identity Federation is supported by many popular services, including Github Actions,
+    Azure DevOps, GitLab, Terraform Cloud, and Kubernetes clusters, among others.
+    
+    Workload identity federation is configured in your Databricks account using a service principal federation
+    policy. A service principal federation policy specifies: * which IdP, or issuer, the service principal is
+    allowed to authenticate from * which workload identity, or subject, is allowed to authenticate as the
+    Databricks service principal
+    
+    To configure a federation policy, you provide the following: * The required token __issuer__, as specified
+    in the “iss” claim of workload identity tokens. The issuer is an https URL that identifies the
+    workload identity provider. * The required token __subject__, as specified in the “sub” claim of
+    workload identity tokens. The subject uniquely identifies the workload in the workload runtime
+    environment. * The allowed token __audiences__, as specified in the “aud” claim of workload identity
+    tokens. The audience is intended to represent the recipient of the token. As long as the audience in the
+    token matches at least one audience in the policy, the token is considered a match. If unspecified, the
+    default value is your Databricks account id. * Optionally, the public keys used to validate the signature
+    of the workload identity tokens, in JWKS format. If unspecified (recommended), Databricks automatically
+    fetches the public keys from the issuer’s well known endpoint. Databricks strongly recommends relying on
+    the issuer’s well known endpoint for discovering public keys.
+    
+    An example service principal federation policy, for a Github Actions workload, is: ``` issuer:
+    "https://token.actions.githubusercontent.com" audiences: ["https://github.com/my-github-org"] subject:
+    "repo:my-github-org/my-repo:environment:prod" ```
+    
+    An example JWT token body that matches this policy and could be used to authenticate to Databricks is: ```
+    { "iss": "https://token.actions.githubusercontent.com", "aud": "https://github.com/my-github-org", "sub":
+    "repo:my-github-org/my-repo:environment:prod" } ```
+    
+    You may also need to configure the workload runtime to generate tokens for your workloads.
+    
+    You do not need to configure an OAuth application in Databricks to use token federation.
+
+    .. py:method:: create(service_principal_id: int [, policy: Optional[FederationPolicy], policy_id: Optional[str]]) -> FederationPolicy
+
+        Create service principal federation policy.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy: :class:`FederationPolicy` (optional)
+        :param policy_id: str (optional)
+          The identifier for the federation policy. If unspecified, the id will be assigned by Databricks.
+        
+        :returns: :class:`FederationPolicy`
+        
+
+    .. py:method:: delete(service_principal_id: int, policy_id: str)
+
+        Delete service principal federation policy.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy_id: str
+        
+        
+        
+
+    .. py:method:: get(service_principal_id: int, policy_id: str) -> FederationPolicy
+
+        Get service principal federation policy.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy_id: str
+        
+        :returns: :class:`FederationPolicy`
+        
+
+    .. py:method:: list(service_principal_id: int [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FederationPolicy]
+
+        List service principal federation policies.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`FederationPolicy`
+        
+
+    .. py:method:: update(service_principal_id: int, policy_id: str, update_mask: str [, policy: Optional[FederationPolicy]]) -> FederationPolicy
+
+        Update service principal federation policy.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy_id: str
+        :param update_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        :param policy: :class:`FederationPolicy` (optional)
+        
+        :returns: :class:`FederationPolicy`
+        
\ No newline at end of file
diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst
index 5b5fbb379..84f3c9867 100644
--- a/docs/dbdataclasses/catalog.rst
+++ b/docs/dbdataclasses/catalog.rst
@@ -745,12 +745,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CATALOG
       :value: "CATALOG"
 
+   .. py:attribute:: CREDENTIAL
+      :value: "CREDENTIAL"
+
    .. py:attribute:: EXTERNAL_LOCATION
       :value: "EXTERNAL_LOCATION"
 
-   .. py:attribute:: SERVICE_CREDENTIAL
-      :value: "SERVICE_CREDENTIAL"
-
    .. py:attribute:: STORAGE_CREDENTIAL
       :value: "STORAGE_CREDENTIAL"
 
@@ -1460,12 +1460,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CATALOG
       :value: "CATALOG"
 
+   .. py:attribute:: CREDENTIAL
+      :value: "CREDENTIAL"
+
    .. py:attribute:: EXTERNAL_LOCATION
       :value: "EXTERNAL_LOCATION"
 
-   .. py:attribute:: SERVICE_CREDENTIAL
-      :value: "SERVICE_CREDENTIAL"
-
    .. py:attribute:: STORAGE_CREDENTIAL
       :value: "STORAGE_CREDENTIAL"
 
diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst
index 0066f0374..9c628c476 100644
--- a/docs/dbdataclasses/compute.rst
+++ b/docs/dbdataclasses/compute.rst
@@ -316,10 +316,20 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 .. py:class:: DataSecurityMode
 
    Data security mode decides what data governance model to use when accessing data from a cluster.
-   * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. Most programming languages, cluster features and data governance features are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited.
+   The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`.
+   The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. Most programming languages, cluster features and data governance features are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited.
    The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions:
    * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled.
 
+   .. py:attribute:: DATA_SECURITY_MODE_AUTO
+      :value: "DATA_SECURITY_MODE_AUTO"
+
+   .. py:attribute:: DATA_SECURITY_MODE_DEDICATED
+      :value: "DATA_SECURITY_MODE_DEDICATED"
+
+   .. py:attribute:: DATA_SECURITY_MODE_STANDARD
+      :value: "DATA_SECURITY_MODE_STANDARD"
+
    .. py:attribute:: LEGACY_PASSTHROUGH
       :value: "LEGACY_PASSTHROUGH"
 
@@ -782,6 +792,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. py:class:: Kind
+
+   The kind of compute described by this compute specification.
+   Depending on `kind`, different validations and default values will be applied.
+   The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
+
+   .. py:attribute:: CLASSIC_PREVIEW
+      :value: "CLASSIC_PREVIEW"
+
 .. py:class:: Language
 
    .. py:attribute:: PYTHON
diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst
index 374c48351..cbb4059a1 100644
--- a/docs/dbdataclasses/jobs.rst
+++ b/docs/dbdataclasses/jobs.rst
@@ -103,6 +103,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: CleanRoomsNotebookTask
+   :members:
+   :undoc-members:
+
 .. autoclass:: ClusterInstance
    :members:
    :undoc-members:
@@ -392,7 +396,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 .. py:class:: JobsHealthMetric
 
    Specifies the health metric that is being evaluated for a particular health rule.
-   * `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`: An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric is in Private Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag across all streams. This metric is in Private Preview. * `STREAMING_BACKLOG_SECONDS`: An estimate of the maximum consumer delay across all streams. This metric is in Private Preview. * `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all streams. This metric is in Private Preview.
+   * `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`: An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_SECONDS`: An estimate of the maximum consumer delay across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all streams. This metric is in Public Preview.
 
    .. py:attribute:: RUN_DURATION_SECONDS
       :value: "RUN_DURATION_SECONDS"
@@ -975,7 +979,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 .. py:class:: TriggerType
 
    The type of trigger that fired this run.
-   * `PERIODIC`: Schedules that periodically trigger runs, such as a cron scheduler. * `ONE_TIME`: One time triggers that fire a single run. This occurs you triggered a single run on demand through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a previously failed run. This occurs when you request to re-run the job in case of failures. * `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`: Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is triggered by a table update.
+   * `PERIODIC`: Schedules that periodically trigger runs, such as a cron scheduler. * `ONE_TIME`: One time triggers that fire a single run. This occurs you triggered a single run on demand through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a previously failed run. This occurs when you request to re-run the job in case of failures. * `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`: Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to manually restart a continuous job run.
 
    .. py:attribute:: FILE_ARRIVAL
       :value: "FILE_ARRIVAL"
diff --git a/docs/dbdataclasses/oauth2.rst b/docs/dbdataclasses/oauth2.rst
index 6265f6648..70e09ab05 100644
--- a/docs/dbdataclasses/oauth2.rst
+++ b/docs/dbdataclasses/oauth2.rst
@@ -40,6 +40,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: FederationPolicy
+   :members:
+   :undoc-members:
+
 .. autoclass:: GetCustomAppIntegrationOutput
    :members:
    :undoc-members:
@@ -60,10 +64,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: ListFederationPoliciesResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: ListServicePrincipalSecretsResponse
    :members:
    :undoc-members:
 
+.. autoclass:: OidcFederationPolicy
+   :members:
+   :undoc-members:
+
 .. autoclass:: PublishedAppOutput
    :members:
    :undoc-members:
diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst
index 24fe2d253..c51a61602 100644
--- a/docs/workspace/compute/clusters.rst
+++ b/docs/workspace/compute/clusters.rst
@@ -71,7 +71,7 @@
         
         
 
-    .. py:method:: create(spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], clone_from: Optional[CloneCluster], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], workload_type: Optional[WorkloadType]]) -> Wait[ClusterDetails]
+    .. py:method:: create(spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], clone_from: Optional[CloneCluster], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType]]) -> Wait[ClusterDetails]
 
 
         Usage:
@@ -152,13 +152,19 @@
         :param data_security_mode: :class:`DataSecurityMode` (optional)
           Data security mode decides what data governance model to use when accessing data from a cluster.
           
-          * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are
-          not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a
-          single user specified in `single_user_name`. Most programming languages, cluster features and data
-          governance features are available in this mode. * `USER_ISOLATION`: A secure cluster that can be
-          shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data
-          and credentials. Most data governance features are supported in this mode. But programming languages
-          and cluster features might be limited.
+          The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+          choose the most appropriate access mode depending on your compute configuration. *
+          `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias
+          for `SINGLE_USER`.
+          
+          The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple
+          users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`:
+          A secure cluster that can only be exclusively used by a single user specified in `single_user_name`.
+          Most programming languages, cluster features and data governance features are available in this
+          mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are
+          fully isolated so that they cannot see each other's data and credentials. Most data governance
+          features are supported in this mode. But programming languages and cluster features might be
+          limited.
           
           The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
           future Databricks Runtime versions:
@@ -190,6 +196,17 @@
           logs are sent to `//init_scripts`.
         :param instance_pool_id: str (optional)
           The optional ID of the instance pool to which the cluster belongs.
+        :param is_single_node: bool (optional)
+          This field can only be used with `kind`.
+          
+          When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`,
+          and `num_workers`
+        :param kind: :class:`Kind` (optional)
+          The kind of compute described by this compute specification.
+          
+          Depending on `kind`, different validations and default values will be applied.
+          
+          The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
         :param node_type_id: str (optional)
           This field encodes, through a single value, the resources available to each of the Spark nodes in
           this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
@@ -236,6 +253,11 @@
           SSH public key contents that will be added to each Spark node in this cluster. The corresponding
           private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be
           specified.
+        :param use_ml_runtime: bool (optional)
+          This field can only be used with `kind`.
+          
+          `effective_spark_version` is determined by `spark_version` (DBR release), this field
+          `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
         :param workload_type: :class:`WorkloadType` (optional)
         
         :returns:
@@ -243,7 +265,7 @@
           See :method:wait_get_cluster_running for more details.
         
 
-    .. py:method:: create_and_wait(spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], clone_from: Optional[CloneCluster], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], workload_type: Optional[WorkloadType], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails
+    .. py:method:: create_and_wait(spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], clone_from: Optional[CloneCluster], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails
 
 
     .. py:method:: delete(cluster_id: str) -> Wait[ClusterDetails]
@@ -292,7 +314,7 @@
     .. py:method:: delete_and_wait(cluster_id: str, timeout: datetime.timedelta = 0:20:00) -> ClusterDetails
 
 
-    .. py:method:: edit(cluster_id: str, spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], workload_type: Optional[WorkloadType]]) -> Wait[ClusterDetails]
+    .. py:method:: edit(cluster_id: str, spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType]]) -> Wait[ClusterDetails]
 
 
         Usage:
@@ -380,13 +402,19 @@
         :param data_security_mode: :class:`DataSecurityMode` (optional)
           Data security mode decides what data governance model to use when accessing data from a cluster.
           
-          * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are
-          not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a
-          single user specified in `single_user_name`. Most programming languages, cluster features and data
-          governance features are available in this mode. * `USER_ISOLATION`: A secure cluster that can be
-          shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data
-          and credentials. Most data governance features are supported in this mode. But programming languages
-          and cluster features might be limited.
+          The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+          choose the most appropriate access mode depending on your compute configuration. *
+          `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias
+          for `SINGLE_USER`.
+          
+          The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple
+          users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`:
+          A secure cluster that can only be exclusively used by a single user specified in `single_user_name`.
+          Most programming languages, cluster features and data governance features are available in this
+          mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are
+          fully isolated so that they cannot see each other's data and credentials. Most data governance
+          features are supported in this mode. But programming languages and cluster features might be
+          limited.
           
           The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
           future Databricks Runtime versions:
@@ -418,6 +446,17 @@
           logs are sent to `//init_scripts`.
         :param instance_pool_id: str (optional)
           The optional ID of the instance pool to which the cluster belongs.
+        :param is_single_node: bool (optional)
+          This field can only be used with `kind`.
+          
+          When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`,
+          and `num_workers`
+        :param kind: :class:`Kind` (optional)
+          The kind of compute described by this compute specification.
+          
+          Depending on `kind`, different validations and default values will be applied.
+          
+          The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
         :param node_type_id: str (optional)
           This field encodes, through a single value, the resources available to each of the Spark nodes in
           this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
@@ -464,6 +503,11 @@
           SSH public key contents that will be added to each Spark node in this cluster. The corresponding
           private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be
           specified.
+        :param use_ml_runtime: bool (optional)
+          This field can only be used with `kind`.
+          
+          `effective_spark_version` is determined by `spark_version` (DBR release), this field
+          `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
         :param workload_type: :class:`WorkloadType` (optional)
         
         :returns:
@@ -471,7 +515,7 @@
           See :method:wait_get_cluster_running for more details.
         
 
-    .. py:method:: edit_and_wait(cluster_id: str, spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], workload_type: Optional[WorkloadType], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails
+    .. py:method:: edit_and_wait(cluster_id: str, spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails
 
 
     .. py:method:: ensure_cluster_is_running(cluster_id: str)
diff --git a/docs/workspace/dashboards/lakeview.rst b/docs/workspace/dashboards/lakeview.rst
index b8dceeb9e..c37479dcb 100644
--- a/docs/workspace/dashboards/lakeview.rst
+++ b/docs/workspace/dashboards/lakeview.rst
@@ -174,7 +174,7 @@
         :returns: Iterator over :class:`Subscription`
         
 
-    .. py:method:: migrate(source_dashboard_id: str [, display_name: Optional[str], parent_path: Optional[str]]) -> Dashboard
+    .. py:method:: migrate(source_dashboard_id: str [, display_name: Optional[str], parent_path: Optional[str], update_parameter_syntax: Optional[bool]]) -> Dashboard
 
         Migrate dashboard.
         
@@ -186,6 +186,9 @@
           Display name for the new Lakeview dashboard.
         :param parent_path: str (optional)
           The workspace path of the folder to contain the migrated Lakeview dashboard.
+        :param update_parameter_syntax: bool (optional)
+          Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named syntax
+          (:param) when converting datasets in the dashboard.
         
         :returns: :class:`Dashboard`
         

From 6d6923ebbfab94db5a3fee990501a276f4de7149 Mon Sep 17 00:00:00 2001
From: Pieter Noordhuis 
Date: Mon, 6 Jan 2025 17:44:18 +0100
Subject: [PATCH 079/136] [Internal] Migrate workflows that need write access
 to use hosted runners (#850)

Fixes #848.
---
 .github/workflows/external-message.yml  |  9 ++++---
 .github/workflows/integration-tests.yml | 34 +++++++++++++++++--------
 .github/workflows/release-test.yml      |  7 ++++-
 .github/workflows/release.yml           |  7 ++++-
 4 files changed, 41 insertions(+), 16 deletions(-)

diff --git a/.github/workflows/external-message.yml b/.github/workflows/external-message.yml
index a2d9dc2e8..6771057c7 100644
--- a/.github/workflows/external-message.yml
+++ b/.github/workflows/external-message.yml
@@ -13,7 +13,10 @@ on:
 
 jobs:
   comment-on-pr:
-    runs-on: ubuntu-latest
+    runs-on:
+      group: databricks-deco-testing-runner-group
+      labels: ubuntu-latest-deco
+
     permissions:
       pull-requests: write
 
@@ -44,13 +47,13 @@ jobs:
           gh pr comment ${{ github.event.pull_request.number }} --body \
           "
           If integration tests don't run automatically, an authorized user can run them manually by following the instructions below:
-          
+
           Trigger:
           [go/deco-tests-run/sdk-py](https://go/deco-tests-run/sdk-py)
 
           Inputs:
           * PR number: ${{github.event.pull_request.number}}
           * Commit SHA: \`${{ env.COMMIT_SHA }}\`
-          
+
           Checks will be approved automatically on success.
           "
diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml
index 93a6c2676..c308cc03c 100644
--- a/.github/workflows/integration-tests.yml
+++ b/.github/workflows/integration-tests.yml
@@ -6,12 +6,16 @@ on:
     types: [opened, synchronize]
 
   merge_group:
-  
+
 
 jobs:
   check-token:
     name: Check secrets access
-    runs-on: ubuntu-latest
+
+    runs-on:
+      group: databricks-deco-testing-runner-group
+      labels: ubuntu-latest-deco
+
     environment: "test-trigger-is"
     outputs:
       has_token: ${{ steps.set-token-status.outputs.has_token }}
@@ -26,14 +30,18 @@ jobs:
               echo "DECO_WORKFLOW_TRIGGER_APP_ID is set. User has access to secrets."
               echo "::set-output name=has_token::true"
             fi
-    
+
   trigger-tests:
     name: Trigger Tests
-    runs-on: ubuntu-latest
+
+    runs-on:
+      group: databricks-deco-testing-runner-group
+      labels: ubuntu-latest-deco
+
     needs: check-token
     if: github.event_name == 'pull_request'  && needs.check-token.outputs.has_token == 'true'
     environment: "test-trigger-is"
-    
+
     steps:
     - uses: actions/checkout@v3
 
@@ -45,7 +53,7 @@ jobs:
         private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
         owner: ${{ secrets.ORG_NAME }}
         repositories: ${{secrets.REPO_NAME}}
-    
+
     - name: Trigger Workflow in Another Repo
       env:
         GH_TOKEN: ${{ steps.generate-token.outputs.token }}
@@ -53,18 +61,22 @@ jobs:
         gh workflow run sdk-py-isolated-pr.yml -R ${{ secrets.ORG_NAME }}/${{secrets.REPO_NAME}} \
         --ref main \
         -f pull_request_number=${{ github.event.pull_request.number }} \
-        -f commit_sha=${{ github.event.pull_request.head.sha }} 
+        -f commit_sha=${{ github.event.pull_request.head.sha }}
 
-  # Statuses and checks apply to specific commits (by hash). 
+  # Statuses and checks apply to specific commits (by hash).
   # Enforcement of required checks is done both at the PR level and the merge queue level.
-  # In case of multiple commits in a single PR, the hash of the squashed commit 
+  # In case of multiple commits in a single PR, the hash of the squashed commit
   # will not match the one for the latest (approved) commit in the PR.
   # We auto approve the check for the merge queue for two reasons:
   # * Queue times out due to duration of tests.
   # * Avoid running integration tests twice, since it was already run at the tip of the branch before squashing.
   auto-approve:
     if: github.event_name == 'merge_group'
-    runs-on: ubuntu-latest
+
+    runs-on:
+      group: databricks-deco-testing-runner-group
+      labels: ubuntu-latest-deco
+
     steps:
       - name: Mark Check
         env:
@@ -75,4 +87,4 @@ jobs:
               -H "X-GitHub-Api-Version: 2022-11-28" \
               /repos/${{ github.repository }}/statuses/${{ github.sha }} \
               -f 'state=success' \
-              -f 'context=Integration Tests Check'
\ No newline at end of file
+              -f 'context=Integration Tests Check'
diff --git a/.github/workflows/release-test.yml b/.github/workflows/release-test.yml
index c3349b75a..0e8c4d8e0 100644
--- a/.github/workflows/release-test.yml
+++ b/.github/workflows/release-test.yml
@@ -5,10 +5,15 @@ on:
 
 jobs:
   publish:
-    runs-on: ubuntu-latest
+    runs-on:
+      group: databricks-deco-testing-runner-group
+      labels: ubuntu-latest-deco
+
     environment: release-test
+
     permissions:
       id-token: write
+
     steps:
       - uses: actions/checkout@v3
 
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index ecde40e08..32890bde6 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -7,11 +7,16 @@ on:
 
 jobs:
   publish:
-    runs-on: ubuntu-latest
+    runs-on:
+      group: databricks-deco-testing-runner-group
+      labels: ubuntu-latest-deco
+
     environment: release
+
     permissions:
       contents: write
       id-token: write
+
     steps:
       - uses: actions/checkout@v3
 

From d907c0cfa45402e7223efe7b633e0dd26f2ca7bd Mon Sep 17 00:00:00 2001
From: Kirill Safonov <122353021+ksafonov-db@users.noreply.github.com>
Date: Wed, 8 Jan 2025 16:15:28 +0100
Subject: [PATCH 080/136] [Feature] Files API client: recover on download
 failures (#844) (#845)

## What changes are proposed in this pull request?

1. Extending Files API client to support resuming download on failures.
New implementation tracks current offset in the input stream and issues
a new download request from this point in case of an error.
2. New code path is enabled by
'DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT' config parameter.

## How is this tested?

Added unit tests for the new code path:
`% python3 -m pytest tests/test_files.py`

---------

Signed-off-by: Kirill Safonov 
---
 databricks/sdk/__init__.py     |  11 +-
 databricks/sdk/_base_client.py |  19 +-
 databricks/sdk/config.py       |   5 +
 databricks/sdk/mixins/files.py | 185 +++++++++++++++++-
 tests/test_base_client.py      |  10 +-
 tests/test_files.py            | 340 +++++++++++++++++++++++++++++++++
 6 files changed, 559 insertions(+), 11 deletions(-)
 create mode 100644 tests/test_files.py

diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py
index 068069f04..d27110b86 100755
--- a/databricks/sdk/__init__.py
+++ b/databricks/sdk/__init__.py
@@ -5,7 +5,7 @@
 from databricks.sdk import azure
 from databricks.sdk.credentials_provider import CredentialsStrategy
 from databricks.sdk.mixins.compute import ClustersExt
-from databricks.sdk.mixins.files import DbfsExt
+from databricks.sdk.mixins.files import DbfsExt, FilesExt
 from databricks.sdk.mixins.jobs import JobsExt
 from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt
 from databricks.sdk.mixins.workspace import WorkspaceExt
@@ -114,6 +114,13 @@ def _make_dbutils(config: client.Config):
     return runtime_dbutils
 
 
+def _make_files_client(apiClient: client.ApiClient, config: client.Config):
+    if config.enable_experimental_files_api_client:
+        return FilesExt(apiClient, config)
+    else:
+        return FilesAPI(apiClient)
+
+
 class WorkspaceClient:
     """
     The WorkspaceClient is a client for the workspace-level Databricks REST API.
@@ -203,7 +210,7 @@ def __init__(self,
         self._dbsql_permissions = DbsqlPermissionsAPI(self._api_client)
         self._experiments = ExperimentsAPI(self._api_client)
         self._external_locations = ExternalLocationsAPI(self._api_client)
-        self._files = FilesAPI(self._api_client)
+        self._files = _make_files_client(self._api_client, self._config)
         self._functions = FunctionsAPI(self._api_client)
         self._genie = GenieAPI(self._api_client)
         self._git_credentials = GitCredentialsAPI(self._api_client)
diff --git a/databricks/sdk/_base_client.py b/databricks/sdk/_base_client.py
index ed85dc470..e61dd39c3 100644
--- a/databricks/sdk/_base_client.py
+++ b/databricks/sdk/_base_client.py
@@ -1,6 +1,7 @@
 import io
 import logging
 import urllib.parse
+from abc import ABC, abstractmethod
 from datetime import timedelta
 from types import TracebackType
 from typing import (Any, BinaryIO, Callable, Dict, Iterable, Iterator, List,
@@ -285,8 +286,20 @@ def _record_request_log(self, response: requests.Response, raw: bool = False) ->
         logger.debug(RoundTrip(response, self._debug_headers, self._debug_truncate_bytes, raw).generate())
 
 
+class _RawResponse(ABC):
+
+    @abstractmethod
+    # follows Response signature: https://github.com/psf/requests/blob/main/src/requests/models.py#L799
+    def iter_content(self, chunk_size: int = 1, decode_unicode: bool = False):
+        pass
+
+    @abstractmethod
+    def close(self):
+        pass
+
+
 class _StreamingResponse(BinaryIO):
-    _response: requests.Response
+    _response: _RawResponse
     _buffer: bytes
     _content: Union[Iterator[bytes], None]
     _chunk_size: Union[int, None]
@@ -298,7 +311,7 @@ def fileno(self) -> int:
     def flush(self) -> int:
         pass
 
-    def __init__(self, response: requests.Response, chunk_size: Union[int, None] = None):
+    def __init__(self, response: _RawResponse, chunk_size: Union[int, None] = None):
         self._response = response
         self._buffer = b''
         self._content = None
@@ -308,7 +321,7 @@ def _open(self) -> None:
         if self._closed:
             raise ValueError("I/O operation on closed file")
         if not self._content:
-            self._content = self._response.iter_content(chunk_size=self._chunk_size)
+            self._content = self._response.iter_content(chunk_size=self._chunk_size, decode_unicode=False)
 
     def __enter__(self) -> BinaryIO:
         self._open()
diff --git a/databricks/sdk/config.py b/databricks/sdk/config.py
index 387fa65c5..490c6ba4e 100644
--- a/databricks/sdk/config.py
+++ b/databricks/sdk/config.py
@@ -92,6 +92,11 @@ class Config:
     max_connections_per_pool: int = ConfigAttribute()
     databricks_environment: Optional[DatabricksEnvironment] = None
 
+    enable_experimental_files_api_client: bool = ConfigAttribute(
+        env='DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT')
+    files_api_client_download_max_total_recovers = None
+    files_api_client_download_max_total_recovers_without_progressing = 1
+
     def __init__(
             self,
             *,
diff --git a/databricks/sdk/mixins/files.py b/databricks/sdk/mixins/files.py
index 1e109a1a7..678b4b630 100644
--- a/databricks/sdk/mixins/files.py
+++ b/databricks/sdk/mixins/files.py
@@ -1,6 +1,7 @@
 from __future__ import annotations
 
 import base64
+import logging
 import os
 import pathlib
 import platform
@@ -8,19 +9,27 @@
 import sys
 from abc import ABC, abstractmethod
 from collections import deque
+from collections.abc import Iterator
 from io import BytesIO
 from types import TracebackType
 from typing import (TYPE_CHECKING, AnyStr, BinaryIO, Generator, Iterable,
-                    Iterator, Type, Union)
+                    Optional, Type, Union)
 from urllib import parse
 
+from requests import RequestException
+
+from .._base_client import _RawResponse, _StreamingResponse
 from .._property import _cached_property
 from ..errors import NotFound
 from ..service import files
+from ..service._internal import _escape_multi_segment_path_parameter
+from ..service.files import DownloadResponse
 
 if TYPE_CHECKING:
     from _typeshed import Self
 
+_LOG = logging.getLogger(__name__)
+
 
 class _DbfsIO(BinaryIO):
     MAX_CHUNK_SIZE = 1024 * 1024
@@ -636,3 +645,177 @@ def delete(self, path: str, *, recursive=False):
         if p.is_dir and not recursive:
             raise IOError('deleting directories requires recursive flag')
         p.delete(recursive=recursive)
+
+
+class FilesExt(files.FilesAPI):
+    __doc__ = files.FilesAPI.__doc__
+
+    def __init__(self, api_client, config: Config):
+        super().__init__(api_client)
+        self._config = config.copy()
+
+    def download(self, file_path: str) -> DownloadResponse:
+        """Download a file.
+
+        Downloads a file of any size. The file contents are the response body.
+        This is a standard HTTP file download, not a JSON RPC.
+
+        It is strongly recommended, for fault tolerance reasons,
+        to iteratively consume from the stream with a maximum read(size)
+        defined instead of using indefinite-size reads.
+
+        :param file_path: str
+          The remote path of the file, e.g. /Volumes/path/to/your/file
+
+        :returns: :class:`DownloadResponse`
+        """
+
+        initial_response: DownloadResponse = self._download_raw_stream(file_path=file_path,
+                                                                       start_byte_offset=0,
+                                                                       if_unmodified_since_timestamp=None)
+
+        wrapped_response = self._wrap_stream(file_path, initial_response)
+        initial_response.contents._response = wrapped_response
+        return initial_response
+
+    def _download_raw_stream(self,
+                             file_path: str,
+                             start_byte_offset: int,
+                             if_unmodified_since_timestamp: Optional[str] = None) -> DownloadResponse:
+        headers = {'Accept': 'application/octet-stream', }
+
+        if start_byte_offset and not if_unmodified_since_timestamp:
+            raise Exception("if_unmodified_since_timestamp is required if start_byte_offset is specified")
+
+        if start_byte_offset:
+            headers['Range'] = f'bytes={start_byte_offset}-'
+
+        if if_unmodified_since_timestamp:
+            headers['If-Unmodified-Since'] = if_unmodified_since_timestamp
+
+        response_headers = ['content-length', 'content-type', 'last-modified', ]
+        res = self._api.do('GET',
+                           f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}',
+                           headers=headers,
+                           response_headers=response_headers,
+                           raw=True)
+
+        result = DownloadResponse.from_dict(res)
+        if not isinstance(result.contents, _StreamingResponse):
+            raise Exception("Internal error: response contents is of unexpected type: " +
+                            type(result.contents).__name__)
+
+        return result
+
+    def _wrap_stream(self, file_path: str, downloadResponse: DownloadResponse):
+        underlying_response = _ResilientIterator._extract_raw_response(downloadResponse)
+        return _ResilientResponse(self,
+                                  file_path,
+                                  downloadResponse.last_modified,
+                                  offset=0,
+                                  underlying_response=underlying_response)
+
+
+class _ResilientResponse(_RawResponse):
+
+    def __init__(self, api: FilesExt, file_path: str, file_last_modified: str, offset: int,
+                 underlying_response: _RawResponse):
+        self.api = api
+        self.file_path = file_path
+        self.underlying_response = underlying_response
+        self.offset = offset
+        self.file_last_modified = file_last_modified
+
+    def iter_content(self, chunk_size=1, decode_unicode=False):
+        if decode_unicode:
+            raise ValueError('Decode unicode is not supported')
+
+        iterator = self.underlying_response.iter_content(chunk_size=chunk_size, decode_unicode=False)
+        self.iterator = _ResilientIterator(iterator, self.file_path, self.file_last_modified, self.offset,
+                                           self.api, chunk_size)
+        return self.iterator
+
+    def close(self):
+        self.iterator.close()
+
+
+class _ResilientIterator(Iterator):
+    # This class tracks current offset (returned to the client code)
+    # and recovers from failures by requesting download from the current offset.
+
+    @staticmethod
+    def _extract_raw_response(download_response: DownloadResponse) -> _RawResponse:
+        streaming_response: _StreamingResponse = download_response.contents # this is an instance of _StreamingResponse
+        return streaming_response._response
+
+    def __init__(self, underlying_iterator, file_path: str, file_last_modified: str, offset: int,
+                 api: FilesExt, chunk_size: int):
+        self._underlying_iterator = underlying_iterator
+        self._api = api
+        self._file_path = file_path
+
+        # Absolute current offset (0-based), i.e. number of bytes from the beginning of the file
+        # that were so far returned to the caller code.
+        self._offset = offset
+        self._file_last_modified = file_last_modified
+        self._chunk_size = chunk_size
+
+        self._total_recovers_count: int = 0
+        self._recovers_without_progressing_count: int = 0
+        self._closed: bool = False
+
+    def _should_recover(self) -> bool:
+        if self._total_recovers_count == self._api._config.files_api_client_download_max_total_recovers:
+            _LOG.debug("Total recovers limit exceeded")
+            return False
+        if self._api._config.files_api_client_download_max_total_recovers_without_progressing is not None and self._recovers_without_progressing_count >= self._api._config.files_api_client_download_max_total_recovers_without_progressing:
+            _LOG.debug("No progression recovers limit exceeded")
+            return False
+        return True
+
+    def _recover(self) -> bool:
+        if not self._should_recover():
+            return False # recover suppressed, rethrow original exception
+
+        self._total_recovers_count += 1
+        self._recovers_without_progressing_count += 1
+
+        try:
+            self._underlying_iterator.close()
+
+            _LOG.debug("Trying to recover from offset " + str(self._offset))
+
+            # following call includes all the required network retries
+            downloadResponse = self._api._download_raw_stream(self._file_path, self._offset,
+                                                              self._file_last_modified)
+            underlying_response = _ResilientIterator._extract_raw_response(downloadResponse)
+            self._underlying_iterator = underlying_response.iter_content(chunk_size=self._chunk_size,
+                                                                         decode_unicode=False)
+            _LOG.debug("Recover succeeded")
+            return True
+        except:
+            return False # recover failed, rethrow original exception
+
+    def __next__(self):
+        if self._closed:
+            # following _BaseClient
+            raise ValueError("I/O operation on closed file")
+
+        while True:
+            try:
+                returned_bytes = next(self._underlying_iterator)
+                self._offset += len(returned_bytes)
+                self._recovers_without_progressing_count = 0
+                return returned_bytes
+
+            except StopIteration:
+                raise
+
+            # https://requests.readthedocs.io/en/latest/user/quickstart/#errors-and-exceptions
+            except RequestException:
+                if not self._recover():
+                    raise
+
+    def close(self):
+        self._underlying_iterator.close()
+        self._closed = True
diff --git a/tests/test_base_client.py b/tests/test_base_client.py
index 4b6aaa714..a9a9d5cc6 100644
--- a/tests/test_base_client.py
+++ b/tests/test_base_client.py
@@ -5,17 +5,17 @@
 from unittest.mock import Mock
 
 import pytest
-import requests
 
 from databricks.sdk import errors, useragent
-from databricks.sdk._base_client import _BaseClient, _StreamingResponse
+from databricks.sdk._base_client import (_BaseClient, _RawResponse,
+                                         _StreamingResponse)
 from databricks.sdk.core import DatabricksError
 
 from .clock import FakeClock
 from .fixture_server import http_fixture_server
 
 
-class DummyResponse(requests.Response):
+class DummyResponse(_RawResponse):
     _content: Iterator[bytes]
     _closed: bool = False
 
@@ -293,9 +293,9 @@ def test_streaming_response_chunk_size(chunk_size, expected_chunks, data_size):
     test_data = bytes(rng.getrandbits(8) for _ in range(data_size))
 
     content_chunks = []
-    mock_response = Mock(spec=requests.Response)
+    mock_response = Mock(spec=_RawResponse)
 
-    def mock_iter_content(chunk_size):
+    def mock_iter_content(chunk_size: int, decode_unicode: bool):
         # Simulate how requests would chunk the data.
         for i in range(0, len(test_data), chunk_size):
             chunk = test_data[i:i + chunk_size]
diff --git a/tests/test_files.py b/tests/test_files.py
new file mode 100644
index 000000000..f4d916f6f
--- /dev/null
+++ b/tests/test_files.py
@@ -0,0 +1,340 @@
+import logging
+import os
+import re
+from dataclasses import dataclass
+from typing import List, Union
+
+import pytest
+from requests import RequestException
+
+from databricks.sdk import WorkspaceClient
+from databricks.sdk.core import Config
+
+logger = logging.getLogger(__name__)
+
+
+@dataclass
+class RequestData:
+
+    def __init__(self, offset: int):
+        self._offset: int = offset
+
+
+class DownloadTestCase:
+
+    def __init__(self, name: str, enable_new_client: bool, file_size: int,
+                 failure_at_absolute_offset: List[int], max_recovers_total: Union[int, None],
+                 max_recovers_without_progressing: Union[int, None], expected_success: bool,
+                 expected_requested_offsets: List[int]):
+        self.name = name
+        self.enable_new_client = enable_new_client
+        self.file_size = file_size
+        self.failure_at_absolute_offset = failure_at_absolute_offset
+        self.max_recovers_total = max_recovers_total
+        self.max_recovers_without_progressing = max_recovers_without_progressing
+        self.expected_success = expected_success
+        self.expected_requested_offsets = expected_requested_offsets
+
+    @staticmethod
+    def to_string(test_case):
+        return test_case.name
+
+    def run(self, config: Config):
+        config = config.copy()
+        config.enable_experimental_files_api_client = self.enable_new_client
+        config.files_api_client_download_max_total_recovers = self.max_recovers_total
+        config.files_api_client_download_max_total_recovers_without_progressing = self.max_recovers_without_progressing
+
+        w = WorkspaceClient(config=config)
+
+        session = MockSession(self)
+        w.files._api._api_client._session = session
+
+        response = w.files.download("/test").contents
+        if self.expected_success:
+            actual_content = response.read()
+            assert (len(actual_content) == len(session.content))
+            assert (actual_content == session.content)
+        else:
+            with pytest.raises(RequestException):
+                response.read()
+
+        received_requests = session.received_requests
+
+        assert (len(self.expected_requested_offsets) == len(received_requests))
+        for idx, requested_offset in enumerate(self.expected_requested_offsets):
+            assert (requested_offset == received_requests[idx]._offset)
+
+
+class MockSession:
+
+    def __init__(self, test_case: DownloadTestCase):
+        self.test_case: DownloadTestCase = test_case
+        self.received_requests: List[RequestData] = []
+        self.content: bytes = os.urandom(self.test_case.file_size)
+        self.failure_pointer = 0
+        self.last_modified = 'Thu, 28 Nov 2024 16:39:14 GMT'
+
+    # following the signature of Session.request()
+    def request(self,
+                method,
+                url,
+                params=None,
+                data=None,
+                headers=None,
+                cookies=None,
+                files=None,
+                auth=None,
+                timeout=None,
+                allow_redirects=True,
+                proxies=None,
+                hooks=None,
+                stream=None,
+                verify=None,
+                cert=None,
+                json=None):
+        assert method == 'GET'
+        assert stream == True
+
+        offset = 0
+        if "Range" in headers:
+            range = headers["Range"]
+            match = re.search("^bytes=(\\d+)-$", range)
+            if match:
+                offset = int(match.group(1))
+            else:
+                raise Exception("Unexpected range header: " + range)
+
+            if "If-Unmodified-Since" in headers:
+                assert (headers["If-Unmodified-Since"] == self.last_modified)
+            else:
+                raise Exception("If-Unmodified-Since header should be passed along with Range")
+
+        logger.info("Client requested offset: %s", offset)
+
+        if offset > len(self.content):
+            raise Exception("Offset %s exceeds file length %s", offset, len(self.content))
+
+        self.received_requests.append(RequestData(offset))
+        return MockResponse(self, offset, MockRequest(url))
+
+
+# required only for correct logging
+class MockRequest:
+
+    def __init__(self, url: str):
+        self.url = url
+        self.method = 'GET'
+        self.headers = dict()
+        self.body = None
+
+
+class MockResponse:
+
+    def __init__(self, session: MockSession, offset: int, request: MockRequest):
+        self.session = session
+        self.offset = offset
+        self.request = request
+        self.status_code = 200
+        self.reason = 'OK'
+        self.headers = dict()
+        self.headers['Content-Length'] = len(session.content) - offset
+        self.headers['Content-Type'] = 'application/octet-stream'
+        self.headers['Last-Modified'] = session.last_modified
+        self.ok = True
+        self.url = request.url
+
+    def iter_content(self, chunk_size: int, decode_unicode: bool):
+        assert decode_unicode == False
+        return MockIterator(self, chunk_size)
+
+
+class MockIterator:
+
+    def __init__(self, response: MockResponse, chunk_size: int):
+        self.response = response
+        self.chunk_size = chunk_size
+        self.offset = 0
+
+    def __next__(self):
+        start_offset = self.response.offset + self.offset
+        if start_offset == len(self.response.session.content):
+            raise StopIteration
+
+        end_offset = start_offset + self.chunk_size # exclusive, might be out of range
+
+        if self.response.session.failure_pointer < len(
+                self.response.session.test_case.failure_at_absolute_offset):
+            failure_after_byte = self.response.session.test_case.failure_at_absolute_offset[
+                self.response.session.failure_pointer]
+            if failure_after_byte < end_offset:
+                self.response.session.failure_pointer += 1
+                raise RequestException("Fake error")
+
+        result = self.response.session.content[start_offset:end_offset]
+        self.offset += len(result)
+        return result
+
+    def close(self):
+        pass
+
+
+class _Constants:
+    underlying_chunk_size = 1024 * 1024 # see ticket #832
+
+
+@pytest.mark.parametrize(
+    "test_case",
+    [
+        DownloadTestCase(name="Old client: no failures, file of 5 bytes",
+                         enable_new_client=False,
+                         file_size=5,
+                         failure_at_absolute_offset=[],
+                         max_recovers_total=0,
+                         max_recovers_without_progressing=0,
+                         expected_success=True,
+                         expected_requested_offsets=[0]),
+        DownloadTestCase(name="Old client: no failures, file of 1.5 chunks",
+                         enable_new_client=False,
+                         file_size=int(1.5 * _Constants.underlying_chunk_size),
+                         failure_at_absolute_offset=[],
+                         max_recovers_total=0,
+                         max_recovers_without_progressing=0,
+                         expected_success=True,
+                         expected_requested_offsets=[0]),
+        DownloadTestCase(
+            name="Old client: failure",
+            enable_new_client=False,
+            file_size=1024,
+            failure_at_absolute_offset=[100],
+            max_recovers_total=None, # unlimited but ignored
+            max_recovers_without_progressing=None, # unlimited but ignored
+            expected_success=False,
+            expected_requested_offsets=[0]),
+        DownloadTestCase(name="New client: no failures, file of 5 bytes",
+                         enable_new_client=True,
+                         file_size=5,
+                         failure_at_absolute_offset=[],
+                         max_recovers_total=0,
+                         max_recovers_without_progressing=0,
+                         expected_success=True,
+                         expected_requested_offsets=[0]),
+        DownloadTestCase(name="New client: no failures, file of 1 Kb",
+                         enable_new_client=True,
+                         file_size=1024,
+                         max_recovers_total=None,
+                         max_recovers_without_progressing=None,
+                         failure_at_absolute_offset=[],
+                         expected_success=True,
+                         expected_requested_offsets=[0]),
+        DownloadTestCase(name="New client: no failures, file of 1.5 chunks",
+                         enable_new_client=True,
+                         file_size=int(1.5 * _Constants.underlying_chunk_size),
+                         failure_at_absolute_offset=[],
+                         max_recovers_total=0,
+                         max_recovers_without_progressing=0,
+                         expected_success=True,
+                         expected_requested_offsets=[0]),
+        DownloadTestCase(name="New client: no failures, file of 10 chunks",
+                         enable_new_client=True,
+                         file_size=10 * _Constants.underlying_chunk_size,
+                         failure_at_absolute_offset=[],
+                         max_recovers_total=0,
+                         max_recovers_without_progressing=0,
+                         expected_success=True,
+                         expected_requested_offsets=[0]),
+        DownloadTestCase(name="New client: recovers are disabled, first failure leads to download abort",
+                         enable_new_client=True,
+                         file_size=10000,
+                         failure_at_absolute_offset=[5],
+                         max_recovers_total=0,
+                         max_recovers_without_progressing=0,
+                         expected_success=False,
+                         expected_requested_offsets=[0]),
+        DownloadTestCase(
+            name="New client: unlimited recovers allowed",
+            enable_new_client=True,
+            file_size=_Constants.underlying_chunk_size * 5,
+            # causes errors on requesting the third chunk
+            failure_at_absolute_offset=[
+                _Constants.underlying_chunk_size - 1, _Constants.underlying_chunk_size - 1,
+                _Constants.underlying_chunk_size - 1, _Constants.underlying_chunk_size + 1,
+                _Constants.underlying_chunk_size * 3,
+            ],
+            max_recovers_total=None,
+            max_recovers_without_progressing=None,
+            expected_success=True,
+            expected_requested_offsets=[
+                0, 0, 0, 0, _Constants.underlying_chunk_size, _Constants.underlying_chunk_size * 3
+            ]),
+        DownloadTestCase(
+            name="New client: we respect limit on total recovers when progressing",
+            enable_new_client=True,
+            file_size=_Constants.underlying_chunk_size * 10,
+            failure_at_absolute_offset=[
+                1,
+                _Constants.underlying_chunk_size + 1, # progressing
+                _Constants.underlying_chunk_size * 2 + 1, # progressing
+                _Constants.underlying_chunk_size * 3 + 1 # progressing
+            ],
+            max_recovers_total=3,
+            max_recovers_without_progressing=None,
+            expected_success=False,
+            expected_requested_offsets=[
+                0, 0, _Constants.underlying_chunk_size * 1, _Constants.underlying_chunk_size * 2
+            ]),
+        DownloadTestCase(name="New client: we respect limit on total recovers when not progressing",
+                         enable_new_client=True,
+                         file_size=_Constants.underlying_chunk_size * 10,
+                         failure_at_absolute_offset=[1, 1, 1, 1],
+                         max_recovers_total=3,
+                         max_recovers_without_progressing=None,
+                         expected_success=False,
+                         expected_requested_offsets=[0, 0, 0, 0]),
+        DownloadTestCase(name="New client: we respect limit on non-progressing recovers",
+                         enable_new_client=True,
+                         file_size=_Constants.underlying_chunk_size * 2,
+                         failure_at_absolute_offset=[
+                             _Constants.underlying_chunk_size - 1, _Constants.underlying_chunk_size - 1,
+                             _Constants.underlying_chunk_size - 1, _Constants.underlying_chunk_size - 1
+                         ],
+                         max_recovers_total=None,
+                         max_recovers_without_progressing=3,
+                         expected_success=False,
+                         expected_requested_offsets=[0, 0, 0, 0]),
+        DownloadTestCase(
+            name="New client: non-progressing recovers count is reset when progressing",
+            enable_new_client=True,
+            file_size=_Constants.underlying_chunk_size * 10,
+            failure_at_absolute_offset=[
+                _Constants.underlying_chunk_size + 1, # this recover is after progressing
+                _Constants.underlying_chunk_size + 1, # this is not
+                _Constants.underlying_chunk_size * 2 + 1, # this recover is after progressing
+                _Constants.underlying_chunk_size * 2 + 1, # this is not
+                _Constants.underlying_chunk_size * 2 + 1, # this is not, we abort here
+            ],
+            max_recovers_total=None,
+            max_recovers_without_progressing=2,
+            expected_success=False,
+            expected_requested_offsets=[
+                0, _Constants.underlying_chunk_size, _Constants.underlying_chunk_size,
+                _Constants.underlying_chunk_size * 2, _Constants.underlying_chunk_size * 2
+            ]),
+        DownloadTestCase(name="New client: non-progressing recovers count is reset when progressing - 2",
+                         enable_new_client=True,
+                         file_size=_Constants.underlying_chunk_size * 10,
+                         failure_at_absolute_offset=[
+                             1, _Constants.underlying_chunk_size + 1, _Constants.underlying_chunk_size * 2 +
+                             1, _Constants.underlying_chunk_size * 3 + 1
+                         ],
+                         max_recovers_total=None,
+                         max_recovers_without_progressing=1,
+                         expected_success=True,
+                         expected_requested_offsets=[
+                             0, 0, _Constants.underlying_chunk_size, _Constants.underlying_chunk_size * 2,
+                             _Constants.underlying_chunk_size * 3
+                         ]),
+    ],
+    ids=DownloadTestCase.to_string)
+def test_download_recover(config: Config, test_case: DownloadTestCase):
+    test_case.run(config)

From cbae014ac73b99c659646daa1e0d42f939452567 Mon Sep 17 00:00:00 2001
From: Parth Bansal 
Date: Fri, 10 Jan 2025 12:05:04 +0100
Subject: [PATCH 081/136] [Internal] Decouple oauth2 and serving  (#855)

## What changes are proposed in this pull request?
This PR removes serving.py's indirect dependency on oauth2.py by moving
DataPlaneInfo to serving.py. This change is also reflected in the
OpenAPI specification. It narrows down import scopes to prevent circular
dependencies.

## How is this tested?
Existing Unit tests.
---
 .codegen/_openapi_sha               |   2 +-
 databricks/sdk/__init__.py          |   2 +
 databricks/sdk/data_plane.py        |   2 +-
 databricks/sdk/service/apps.py      |  14 ++--
 databricks/sdk/service/catalog.py   |   1 +
 databricks/sdk/service/jobs.py      |  99 ++++++++++++++++++++++-----
 databricks/sdk/service/oauth2.py    |  78 +++++++++------------
 databricks/sdk/service/pipelines.py | 101 ++++++++++++++++++++++++----
 databricks/sdk/service/serving.py   |  37 ++++++++--
 tests/test_data_plane.py            |   2 +-
 10 files changed, 254 insertions(+), 84 deletions(-)

diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 8622b29ca..dfe78790a 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-a6a317df8327c9b1e5cb59a03a42ffa2aabeef6d
\ No newline at end of file
+779817ed8d63031f5ea761fbd25ee84f38feec0d
\ No newline at end of file
diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py
index d27110b86..80fe188b8 100755
--- a/databricks/sdk/__init__.py
+++ b/databricks/sdk/__init__.py
@@ -1,3 +1,5 @@
+# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
 from typing import Optional
 
 import databricks.sdk.core as client
diff --git a/databricks/sdk/data_plane.py b/databricks/sdk/data_plane.py
index 6f6ddf80c..5ad9b79ad 100644
--- a/databricks/sdk/data_plane.py
+++ b/databricks/sdk/data_plane.py
@@ -3,7 +3,6 @@
 from typing import Callable, List
 
 from databricks.sdk.oauth import Token
-from databricks.sdk.service.oauth2 import DataPlaneInfo
 
 
 @dataclass
@@ -19,6 +18,7 @@ class DataPlaneDetails:
 
 class DataPlaneService:
     """Helper class to fetch and manage DataPlane details."""
+    from .service.serving import DataPlaneInfo
 
     def __init__(self):
         self._data_plane_info = {}
diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py
index eee49a212..37af1011d 100755
--- a/databricks/sdk/service/apps.py
+++ b/databricks/sdk/service/apps.py
@@ -967,12 +967,14 @@ def wait_get_app_stopped(self,
             attempt += 1
         raise TimeoutError(f'timed out after {timeout}: {status_message}')
 
-    def create(self, *, app: Optional[App] = None) -> Wait[App]:
+    def create(self, *, app: Optional[App] = None, no_compute: Optional[bool] = None) -> Wait[App]:
         """Create an app.
         
         Creates a new app.
         
         :param app: :class:`App` (optional)
+        :param no_compute: bool (optional)
+          If true, the app will not be started after creation.
         
         :returns:
           Long-running operation waiter for :class:`App`.
@@ -981,11 +983,15 @@ def create(self, *, app: Optional[App] = None) -> Wait[App]:
         body = app.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        op_response = self._api.do('POST', '/api/2.0/apps', body=body, headers=headers)
+        op_response = self._api.do('POST', '/api/2.0/apps', query=query, body=body, headers=headers)
         return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response['name'])
 
-    def create_and_wait(self, *, app: Optional[App] = None, timeout=timedelta(minutes=20)) -> App:
-        return self.create(app=app).result(timeout=timeout)
+    def create_and_wait(self,
+                        *,
+                        app: Optional[App] = None,
+                        no_compute: Optional[bool] = None,
+                        timeout=timedelta(minutes=20)) -> App:
+        return self.create(app=app, no_compute=no_compute).result(timeout=timeout)
 
     def delete(self, name: str) -> App:
         """Delete an app.
diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py
index f1b549339..c56acce32 100755
--- a/databricks/sdk/service/catalog.py
+++ b/databricks/sdk/service/catalog.py
@@ -5810,6 +5810,7 @@ def from_dict(cls, d: Dict[str, any]) -> ProvisioningInfo:
 class ProvisioningInfoState(Enum):
 
     ACTIVE = 'ACTIVE'
+    DEGRADED = 'DEGRADED'
     DELETING = 'DELETING'
     FAILED = 'FAILED'
     PROVISIONING = 'PROVISIONING'
diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index 105c7cd22..c5fdb8393 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -35,6 +35,11 @@ class BaseJob:
     Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based
     on accessible budget policies of the run_as identity on job creation or modification."""
 
+    has_more: Optional[bool] = None
+    """Indicates if the job has more sub-resources (`tasks`, `job_clusters`) that are not shown. They
+    can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 :method:jobs/list
+    requests with `expand_tasks=true`."""
+
     job_id: Optional[int] = None
     """The canonical identifier for this job."""
 
@@ -49,6 +54,7 @@ def as_dict(self) -> dict:
         if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
         if self.effective_budget_policy_id is not None:
             body['effective_budget_policy_id'] = self.effective_budget_policy_id
+        if self.has_more is not None: body['has_more'] = self.has_more
         if self.job_id is not None: body['job_id'] = self.job_id
         if self.settings: body['settings'] = self.settings.as_dict()
         return body
@@ -60,6 +66,7 @@ def as_shallow_dict(self) -> dict:
         if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
         if self.effective_budget_policy_id is not None:
             body['effective_budget_policy_id'] = self.effective_budget_policy_id
+        if self.has_more is not None: body['has_more'] = self.has_more
         if self.job_id is not None: body['job_id'] = self.job_id
         if self.settings: body['settings'] = self.settings
         return body
@@ -70,6 +77,7 @@ def from_dict(cls, d: Dict[str, any]) -> BaseJob:
         return cls(created_time=d.get('created_time', None),
                    creator_user_name=d.get('creator_user_name', None),
                    effective_budget_policy_id=d.get('effective_budget_policy_id', None),
+                   has_more=d.get('has_more', None),
                    job_id=d.get('job_id', None),
                    settings=_from_dict(d, 'settings', JobSettings))
 
@@ -124,10 +132,16 @@ class BaseRun:
     Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks
     are used, `git_source` must be defined on the job."""
 
+    has_more: Optional[bool] = None
+    """Indicates if the run has more sub-resources (`tasks`, `job_clusters`) that are not shown. They
+    can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2
+    :method:jobs/listruns requests with `expand_tasks=true`."""
+
     job_clusters: Optional[List[JobCluster]] = None
     """A list of job cluster specifications that can be shared and reused by tasks of this job.
     Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in
-    task settings."""
+    task settings. If more than 100 job clusters are available, you can paginate through them using
+    :method:jobs/getrun."""
 
     job_id: Optional[int] = None
     """The canonical identifier of the job that contains this run."""
@@ -198,7 +212,9 @@ class BaseRun:
 
     tasks: Optional[List[RunTask]] = None
     """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call
-    `JobsGetOutput` to retrieve the run resutls."""
+    `JobsGetOutput` to retrieve the run resutls. If more than 100 tasks are available, you can
+    paginate through them using :method:jobs/getrun. Use the `next_page_token` field at the object
+    root to determine if more results are available."""
 
     trigger: Optional[TriggerType] = None
     """The type of trigger that fired this run.
@@ -227,6 +243,7 @@ def as_dict(self) -> dict:
         if self.end_time is not None: body['end_time'] = self.end_time
         if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
         if self.git_source: body['git_source'] = self.git_source.as_dict()
+        if self.has_more is not None: body['has_more'] = self.has_more
         if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters]
         if self.job_id is not None: body['job_id'] = self.job_id
         if self.job_parameters: body['job_parameters'] = [v.as_dict() for v in self.job_parameters]
@@ -264,6 +281,7 @@ def as_shallow_dict(self) -> dict:
         if self.end_time is not None: body['end_time'] = self.end_time
         if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
         if self.git_source: body['git_source'] = self.git_source
+        if self.has_more is not None: body['has_more'] = self.has_more
         if self.job_clusters: body['job_clusters'] = self.job_clusters
         if self.job_id is not None: body['job_id'] = self.job_id
         if self.job_parameters: body['job_parameters'] = self.job_parameters
@@ -301,6 +319,7 @@ def from_dict(cls, d: Dict[str, any]) -> BaseRun:
                    end_time=d.get('end_time', None),
                    execution_duration=d.get('execution_duration', None),
                    git_source=_from_dict(d, 'git_source', GitSource),
+                   has_more=d.get('has_more', None),
                    job_clusters=_repeated_dict(d, 'job_clusters', JobCluster),
                    job_id=d.get('job_id', None),
                    job_parameters=_repeated_dict(d, 'job_parameters', JobParameter),
@@ -754,7 +773,8 @@ class CreateJob:
     job_clusters: Optional[List[JobCluster]] = None
     """A list of job cluster specifications that can be shared and reused by tasks of this job.
     Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in
-    task settings."""
+    task settings. If more than 100 job clusters are available, you can paginate through them using
+    :method:jobs/get."""
 
     max_concurrent_runs: Optional[int] = None
     """An optional maximum allowed number of concurrent runs of the job. Set this value if you want to
@@ -795,7 +815,9 @@ class CreateJob:
     be added to the job."""
 
     tasks: Optional[List[Task]] = None
-    """A list of task specifications to be executed by this job."""
+    """A list of task specifications to be executed by this job. If more than 100 tasks are available,
+    you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the
+    object root to determine if more results are available."""
 
     timeout_seconds: Optional[int] = None
     """An optional timeout applied to each run of this job. A value of `0` means no timeout."""
@@ -1680,9 +1702,17 @@ class Job:
     Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based
     on accessible budget policies of the run_as identity on job creation or modification."""
 
+    has_more: Optional[bool] = None
+    """Indicates if the job has more sub-resources (`tasks`, `job_clusters`) that are not shown. They
+    can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 :method:jobs/list
+    requests with `expand_tasks=true`."""
+
     job_id: Optional[int] = None
     """The canonical identifier for this job."""
 
+    next_page_token: Optional[str] = None
+    """A token that can be used to list the next page of sub-resources."""
+
     run_as_user_name: Optional[str] = None
     """The email of an active workspace user or the application ID of a service principal that the job
     runs as. This value can be changed by setting the `run_as` field when creating or updating a
@@ -1703,7 +1733,9 @@ def as_dict(self) -> dict:
         if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
         if self.effective_budget_policy_id is not None:
             body['effective_budget_policy_id'] = self.effective_budget_policy_id
+        if self.has_more is not None: body['has_more'] = self.has_more
         if self.job_id is not None: body['job_id'] = self.job_id
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
         if self.settings: body['settings'] = self.settings.as_dict()
         return body
@@ -1715,7 +1747,9 @@ def as_shallow_dict(self) -> dict:
         if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
         if self.effective_budget_policy_id is not None:
             body['effective_budget_policy_id'] = self.effective_budget_policy_id
+        if self.has_more is not None: body['has_more'] = self.has_more
         if self.job_id is not None: body['job_id'] = self.job_id
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
         if self.settings: body['settings'] = self.settings
         return body
@@ -1726,7 +1760,9 @@ def from_dict(cls, d: Dict[str, any]) -> Job:
         return cls(created_time=d.get('created_time', None),
                    creator_user_name=d.get('creator_user_name', None),
                    effective_budget_policy_id=d.get('effective_budget_policy_id', None),
+                   has_more=d.get('has_more', None),
                    job_id=d.get('job_id', None),
+                   next_page_token=d.get('next_page_token', None),
                    run_as_user_name=d.get('run_as_user_name', None),
                    settings=_from_dict(d, 'settings', JobSettings))
 
@@ -2366,7 +2402,8 @@ class JobSettings:
     job_clusters: Optional[List[JobCluster]] = None
     """A list of job cluster specifications that can be shared and reused by tasks of this job.
     Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in
-    task settings."""
+    task settings. If more than 100 job clusters are available, you can paginate through them using
+    :method:jobs/get."""
 
     max_concurrent_runs: Optional[int] = None
     """An optional maximum allowed number of concurrent runs of the job. Set this value if you want to
@@ -2407,7 +2444,9 @@ class JobSettings:
     be added to the job."""
 
     tasks: Optional[List[Task]] = None
-    """A list of task specifications to be executed by this job."""
+    """A list of task specifications to be executed by this job. If more than 100 tasks are available,
+    you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the
+    object root to determine if more results are available."""
 
     timeout_seconds: Optional[int] = None
     """An optional timeout applied to each run of this job. A value of `0` means no timeout."""
@@ -3663,13 +3702,19 @@ class Run:
     Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks
     are used, `git_source` must be defined on the job."""
 
+    has_more: Optional[bool] = None
+    """Indicates if the run has more sub-resources (`tasks`, `job_clusters`) that are not shown. They
+    can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2
+    :method:jobs/listruns requests with `expand_tasks=true`."""
+
     iterations: Optional[List[RunTask]] = None
     """Only populated by for-each iterations. The parent for-each task is located in tasks array."""
 
     job_clusters: Optional[List[JobCluster]] = None
     """A list of job cluster specifications that can be shared and reused by tasks of this job.
     Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in
-    task settings."""
+    task settings. If more than 100 job clusters are available, you can paginate through them using
+    :method:jobs/getrun."""
 
     job_id: Optional[int] = None
     """The canonical identifier of the job that contains this run."""
@@ -3743,7 +3788,9 @@ class Run:
 
     tasks: Optional[List[RunTask]] = None
     """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call
-    `JobsGetOutput` to retrieve the run resutls."""
+    `JobsGetOutput` to retrieve the run resutls. If more than 100 tasks are available, you can
+    paginate through them using :method:jobs/getrun. Use the `next_page_token` field at the object
+    root to determine if more results are available."""
 
     trigger: Optional[TriggerType] = None
     """The type of trigger that fired this run.
@@ -3772,6 +3819,7 @@ def as_dict(self) -> dict:
         if self.end_time is not None: body['end_time'] = self.end_time
         if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
         if self.git_source: body['git_source'] = self.git_source.as_dict()
+        if self.has_more is not None: body['has_more'] = self.has_more
         if self.iterations: body['iterations'] = [v.as_dict() for v in self.iterations]
         if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters]
         if self.job_id is not None: body['job_id'] = self.job_id
@@ -3811,6 +3859,7 @@ def as_shallow_dict(self) -> dict:
         if self.end_time is not None: body['end_time'] = self.end_time
         if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
         if self.git_source: body['git_source'] = self.git_source
+        if self.has_more is not None: body['has_more'] = self.has_more
         if self.iterations: body['iterations'] = self.iterations
         if self.job_clusters: body['job_clusters'] = self.job_clusters
         if self.job_id is not None: body['job_id'] = self.job_id
@@ -3850,6 +3899,7 @@ def from_dict(cls, d: Dict[str, any]) -> Run:
                    end_time=d.get('end_time', None),
                    execution_duration=d.get('execution_duration', None),
                    git_source=_from_dict(d, 'git_source', GitSource),
+                   has_more=d.get('has_more', None),
                    iterations=_repeated_dict(d, 'iterations', RunTask),
                    job_clusters=_repeated_dict(d, 'job_clusters', JobCluster),
                    job_id=d.get('job_id', None),
@@ -7066,6 +7116,7 @@ def create(self,
         :param job_clusters: List[:class:`JobCluster`] (optional)
           A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries
           cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.
+          If more than 100 job clusters are available, you can paginate through them using :method:jobs/get.
         :param max_concurrent_runs: int (optional)
           An optional maximum allowed number of concurrent runs of the job. Set this value if you want to be
           able to execute multiple runs of the same job concurrently. This is useful for example if you
@@ -7097,7 +7148,9 @@ def create(self,
           clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added
           to the job.
         :param tasks: List[:class:`Task`] (optional)
-          A list of task specifications to be executed by this job.
+          A list of task specifications to be executed by this job. If more than 100 tasks are available, you
+          can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root
+          to determine if more results are available.
         :param timeout_seconds: int (optional)
           An optional timeout applied to each run of this job. A value of `0` means no timeout.
         :param trigger: :class:`TriggerSettings` (optional)
@@ -7193,19 +7246,28 @@ def export_run(self, run_id: int, *, views_to_export: Optional[ViewsToExport] =
         res = self._api.do('GET', '/api/2.1/jobs/runs/export', query=query, headers=headers)
         return ExportRunOutput.from_dict(res)
 
-    def get(self, job_id: int) -> Job:
+    def get(self, job_id: int, *, page_token: Optional[str] = None) -> Job:
         """Get a single job.
         
         Retrieves the details for a single job.
         
+        In Jobs API 2.2, requests for a single job support pagination of `tasks` and `job_clusters` when
+        either exceeds 100 elements. Use the `next_page_token` field to check for more results and pass its
+        value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements in a page will
+        be empty on later pages.
+        
         :param job_id: int
           The canonical identifier of the job to retrieve information about. This field is required.
+        :param page_token: str (optional)
+          Use `next_page_token` returned from the previous GetJob to request the next page of the job's
+          sub-resources.
         
         :returns: :class:`Job`
         """
 
         query = {}
         if job_id is not None: query['job_id'] = job_id
+        if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
         res = self._api.do('GET', '/api/2.1/jobs/get', query=query, headers=headers)
@@ -7251,7 +7313,12 @@ def get_run(self,
                 page_token: Optional[str] = None) -> Run:
         """Get a single job run.
         
-        Retrieve the metadata of a run.
+        Retrieves the metadata of a run.
+        
+        In Jobs API 2.2, requests for a single job run support pagination of `tasks` and `job_clusters` when
+        either exceeds 100 elements. Use the `next_page_token` field to check for more results and pass its
+        value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements in a page will
+        be empty on later pages.
         
         :param run_id: int
           The canonical identifier of the run for which to retrieve the metadata. This field is required.
@@ -7260,8 +7327,8 @@ def get_run(self,
         :param include_resolved_values: bool (optional)
           Whether to include resolved parameter values in the response.
         :param page_token: str (optional)
-          To list the next page of job tasks, set this field to the value of the `next_page_token` returned in
-          the GetJob response.
+          Use `next_page_token` returned from the previous GetRun to request the next page of the run's
+          sub-resources.
         
         :returns: :class:`Run`
         """
@@ -7313,7 +7380,8 @@ def list(self,
         Retrieves a list of jobs.
         
         :param expand_tasks: bool (optional)
-          Whether to include task and cluster details in the response.
+          Whether to include task and cluster details in the response. Note that in API 2.2, only the first
+          100 elements will be shown. Use :method:jobs/get to paginate through all tasks and clusters.
         :param limit: int (optional)
           The number of jobs to return. This value must be greater than 0 and less or equal to 100. The
           default value is 20.
@@ -7370,7 +7438,8 @@ def list_runs(self,
           If completed_only is `true`, only completed runs are included in the results; otherwise, lists both
           active and completed runs. This field cannot be `true` when active_only is `true`.
         :param expand_tasks: bool (optional)
-          Whether to include task and cluster details in the response.
+          Whether to include task and cluster details in the response. Note that in API 2.2, only the first
+          100 elements will be shown. Use :method:jobs/getrun to paginate through all tasks and clusters.
         :param job_id: int (optional)
           The job for which to list runs. If omitted, the Jobs service lists runs from all jobs.
         :param limit: int (optional)
diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py
index f7df5a25e..1aac8bc1c 100755
--- a/databricks/sdk/service/oauth2.py
+++ b/databricks/sdk/service/oauth2.py
@@ -202,35 +202,6 @@ def from_dict(cls, d: Dict[str, any]) -> CreateServicePrincipalSecretResponse:
                    update_time=d.get('update_time', None))
 
 
-@dataclass
-class DataPlaneInfo:
-    authorization_details: Optional[str] = None
-    """Authorization details as a string."""
-
-    endpoint_url: Optional[str] = None
-    """The URL of the endpoint for this operation in the dataplane."""
-
-    def as_dict(self) -> dict:
-        """Serializes the DataPlaneInfo into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.authorization_details is not None: body['authorization_details'] = self.authorization_details
-        if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url
-        return body
-
-    def as_shallow_dict(self) -> dict:
-        """Serializes the DataPlaneInfo into a shallow dictionary of its immediate attributes."""
-        body = {}
-        if self.authorization_details is not None: body['authorization_details'] = self.authorization_details
-        if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> DataPlaneInfo:
-        """Deserializes the DataPlaneInfo from a dictionary."""
-        return cls(authorization_details=d.get('authorization_details', None),
-                   endpoint_url=d.get('endpoint_url', None))
-
-
 @dataclass
 class DeleteCustomAppIntegrationOutput:
 
@@ -297,8 +268,13 @@ class FederationPolicy:
     """Description of the federation policy."""
 
     name: Optional[str] = None
-    """Name of the federation policy. The name must contain only lowercase alphanumeric characters,
-    numbers, and hyphens. It must be unique within the account."""
+    """Resource name for the federation policy. Example values include
+    `accounts//federationPolicies/my-federation-policy` for Account Federation Policies,
+    and
+    `accounts//servicePrincipals//federationPolicies/my-federation-policy`
+    for Service Principal Federation Policies. Typically an output parameter, which does not need to
+    be specified in create or update requests. If specified in a request, must match the value in
+    the request URL."""
 
     oidc_policy: Optional[OidcFederationPolicy] = None
     """Specifies the policy to use for validating OIDC claims in your federated tokens."""
@@ -961,7 +937,8 @@ def create(self,
         
         :param policy: :class:`FederationPolicy` (optional)
         :param policy_id: str (optional)
-          The identifier for the federation policy. If unspecified, the id will be assigned by Databricks.
+          The identifier for the federation policy. The identifier must contain only lowercase alphanumeric
+          characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks.
         
         :returns: :class:`FederationPolicy`
         """
@@ -979,6 +956,7 @@ def delete(self, policy_id: str):
         """Delete account federation policy.
         
         :param policy_id: str
+          The identifier for the federation policy.
         
         
         """
@@ -993,6 +971,7 @@ def get(self, policy_id: str) -> FederationPolicy:
         """Get account federation policy.
         
         :param policy_id: str
+          The identifier for the federation policy.
         
         :returns: :class:`FederationPolicy`
         """
@@ -1035,17 +1014,20 @@ def list(self,
 
     def update(self,
                policy_id: str,
-               update_mask: str,
                *,
-               policy: Optional[FederationPolicy] = None) -> FederationPolicy:
+               policy: Optional[FederationPolicy] = None,
+               update_mask: Optional[str] = None) -> FederationPolicy:
         """Update account federation policy.
         
         :param policy_id: str
-        :param update_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The identifier for the federation policy.
         :param policy: :class:`FederationPolicy` (optional)
+        :param update_mask: str (optional)
+          The field mask specifies which fields of the policy to update. To specify multiple fields in the
+          field mask, use comma as the separator (no space). The special value '*' indicates that all fields
+          should be updated (full replacement). If unspecified, all fields that are set in the policy provided
+          in the update request will overwrite the corresponding fields in the existing policy. Example value:
+          'description,oidc_policy.audiences'.
         
         :returns: :class:`FederationPolicy`
         """
@@ -1433,7 +1415,8 @@ def create(self,
           The service principal id for the federation policy.
         :param policy: :class:`FederationPolicy` (optional)
         :param policy_id: str (optional)
-          The identifier for the federation policy. If unspecified, the id will be assigned by Databricks.
+          The identifier for the federation policy. The identifier must contain only lowercase alphanumeric
+          characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks.
         
         :returns: :class:`FederationPolicy`
         """
@@ -1454,6 +1437,7 @@ def delete(self, service_principal_id: int, policy_id: str):
         :param service_principal_id: int
           The service principal id for the federation policy.
         :param policy_id: str
+          The identifier for the federation policy.
         
         
         """
@@ -1471,6 +1455,7 @@ def get(self, service_principal_id: int, policy_id: str) -> FederationPolicy:
         :param service_principal_id: int
           The service principal id for the federation policy.
         :param policy_id: str
+          The identifier for the federation policy.
         
         :returns: :class:`FederationPolicy`
         """
@@ -1519,19 +1504,22 @@ def list(self,
     def update(self,
                service_principal_id: int,
                policy_id: str,
-               update_mask: str,
                *,
-               policy: Optional[FederationPolicy] = None) -> FederationPolicy:
+               policy: Optional[FederationPolicy] = None,
+               update_mask: Optional[str] = None) -> FederationPolicy:
         """Update service principal federation policy.
         
         :param service_principal_id: int
           The service principal id for the federation policy.
         :param policy_id: str
-        :param update_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The identifier for the federation policy.
         :param policy: :class:`FederationPolicy` (optional)
+        :param update_mask: str (optional)
+          The field mask specifies which fields of the policy to update. To specify multiple fields in the
+          field mask, use comma as the separator (no space). The special value '*' indicates that all fields
+          should be updated (full replacement). If unspecified, all fields that are set in the policy provided
+          in the update request will overwrite the corresponding fields in the existing policy. Example value:
+          'description,oidc_policy.audiences'.
         
         :returns: :class:`FederationPolicy`
         """
diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py
index 8f8b015c5..db5d698d6 100755
--- a/databricks/sdk/service/pipelines.py
+++ b/databricks/sdk/service/pipelines.py
@@ -85,6 +85,14 @@ class CreatePipeline:
     restart_window: Optional[RestartWindow] = None
     """Restart window of this pipeline."""
 
+    run_as: Optional[RunAs] = None
+    """Write-only setting, available only in Create/Update calls. Specifies the user or service
+    principal that the pipeline runs as. If not specified, the pipeline runs as the user who created
+    the pipeline.
+    
+    Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
+    is thrown."""
+
     schema: Optional[str] = None
     """The default schema (database) where tables are read from or published to. The presence of this
     field implies that the pipeline is in direct publishing mode."""
@@ -126,6 +134,7 @@ def as_dict(self) -> dict:
         if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
         if self.photon is not None: body['photon'] = self.photon
         if self.restart_window: body['restart_window'] = self.restart_window.as_dict()
+        if self.run_as: body['run_as'] = self.run_as.as_dict()
         if self.schema is not None: body['schema'] = self.schema
         if self.serverless is not None: body['serverless'] = self.serverless
         if self.storage is not None: body['storage'] = self.storage
@@ -156,6 +165,7 @@ def as_shallow_dict(self) -> dict:
         if self.notifications: body['notifications'] = self.notifications
         if self.photon is not None: body['photon'] = self.photon
         if self.restart_window: body['restart_window'] = self.restart_window
+        if self.run_as: body['run_as'] = self.run_as
         if self.schema is not None: body['schema'] = self.schema
         if self.serverless is not None: body['serverless'] = self.serverless
         if self.storage is not None: body['storage'] = self.storage
@@ -186,6 +196,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreatePipeline:
                    notifications=_repeated_dict(d, 'notifications', Notifications),
                    photon=d.get('photon', None),
                    restart_window=_from_dict(d, 'restart_window', RestartWindow),
+                   run_as=_from_dict(d, 'run_as', RunAs),
                    schema=d.get('schema', None),
                    serverless=d.get('serverless', None),
                    storage=d.get('storage', None),
@@ -277,6 +288,19 @@ def from_dict(cls, d: Dict[str, any]) -> DataPlaneId:
         return cls(instance=d.get('instance', None), seq_no=d.get('seq_no', None))
 
 
+class DayOfWeek(Enum):
+    """Days of week in which the restart is allowed to happen (within a five-hour window starting at
+    start_hour). If not specified all days of the week will be used."""
+
+    FRIDAY = 'FRIDAY'
+    MONDAY = 'MONDAY'
+    SATURDAY = 'SATURDAY'
+    SUNDAY = 'SUNDAY'
+    THURSDAY = 'THURSDAY'
+    TUESDAY = 'TUESDAY'
+    WEDNESDAY = 'WEDNESDAY'
+
+
 @dataclass
 class DeletePipelineResponse:
 
@@ -373,6 +397,14 @@ class EditPipeline:
     restart_window: Optional[RestartWindow] = None
     """Restart window of this pipeline."""
 
+    run_as: Optional[RunAs] = None
+    """Write-only setting, available only in Create/Update calls. Specifies the user or service
+    principal that the pipeline runs as. If not specified, the pipeline runs as the user who created
+    the pipeline.
+    
+    Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
+    is thrown."""
+
     schema: Optional[str] = None
     """The default schema (database) where tables are read from or published to. The presence of this
     field implies that the pipeline is in direct publishing mode."""
@@ -416,6 +448,7 @@ def as_dict(self) -> dict:
         if self.photon is not None: body['photon'] = self.photon
         if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
         if self.restart_window: body['restart_window'] = self.restart_window.as_dict()
+        if self.run_as: body['run_as'] = self.run_as.as_dict()
         if self.schema is not None: body['schema'] = self.schema
         if self.serverless is not None: body['serverless'] = self.serverless
         if self.storage is not None: body['storage'] = self.storage
@@ -448,6 +481,7 @@ def as_shallow_dict(self) -> dict:
         if self.photon is not None: body['photon'] = self.photon
         if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
         if self.restart_window: body['restart_window'] = self.restart_window
+        if self.run_as: body['run_as'] = self.run_as
         if self.schema is not None: body['schema'] = self.schema
         if self.serverless is not None: body['serverless'] = self.serverless
         if self.storage is not None: body['storage'] = self.storage
@@ -479,6 +513,7 @@ def from_dict(cls, d: Dict[str, any]) -> EditPipeline:
                    photon=d.get('photon', None),
                    pipeline_id=d.get('pipeline_id', None),
                    restart_window=_from_dict(d, 'restart_window', RestartWindow),
+                   run_as=_from_dict(d, 'run_as', RunAs),
                    schema=d.get('schema', None),
                    serverless=d.get('serverless', None),
                    storage=d.get('storage', None),
@@ -2105,7 +2140,7 @@ class RestartWindow:
     """An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day.
     Continuous pipeline restart is triggered only within a five-hour window starting at this hour."""
 
-    days_of_week: Optional[List[RestartWindowDaysOfWeek]] = None
+    days_of_week: Optional[List[DayOfWeek]] = None
     """Days of week in which the restart is allowed to happen (within a five-hour window starting at
     start_hour). If not specified all days of the week will be used."""
 
@@ -2133,22 +2168,48 @@ def as_shallow_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestartWindow:
         """Deserializes the RestartWindow from a dictionary."""
-        return cls(days_of_week=_repeated_enum(d, 'days_of_week', RestartWindowDaysOfWeek),
+        return cls(days_of_week=_repeated_enum(d, 'days_of_week', DayOfWeek),
                    start_hour=d.get('start_hour', None),
                    time_zone_id=d.get('time_zone_id', None))
 
 
-class RestartWindowDaysOfWeek(Enum):
-    """Days of week in which the restart is allowed to happen (within a five-hour window starting at
-    start_hour). If not specified all days of the week will be used."""
+@dataclass
+class RunAs:
+    """Write-only setting, available only in Create/Update calls. Specifies the user or service
+    principal that the pipeline runs as. If not specified, the pipeline runs as the user who created
+    the pipeline.
+    
+    Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
+    is thrown."""
 
-    FRIDAY = 'FRIDAY'
-    MONDAY = 'MONDAY'
-    SATURDAY = 'SATURDAY'
-    SUNDAY = 'SUNDAY'
-    THURSDAY = 'THURSDAY'
-    TUESDAY = 'TUESDAY'
-    WEDNESDAY = 'WEDNESDAY'
+    service_principal_name: Optional[str] = None
+    """Application ID of an active service principal. Setting this field requires the
+    `servicePrincipal/user` role."""
+
+    user_name: Optional[str] = None
+    """The email of an active workspace user. Users can only set this field to their own email."""
+
+    def as_dict(self) -> dict:
+        """Serializes the RunAs into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunAs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> RunAs:
+        """Deserializes the RunAs from a dictionary."""
+        return cls(service_principal_name=d.get('service_principal_name', None),
+                   user_name=d.get('user_name', None))
 
 
 @dataclass
@@ -2791,6 +2852,7 @@ def create(self,
                notifications: Optional[List[Notifications]] = None,
                photon: Optional[bool] = None,
                restart_window: Optional[RestartWindow] = None,
+               run_as: Optional[RunAs] = None,
                schema: Optional[str] = None,
                serverless: Optional[bool] = None,
                storage: Optional[str] = None,
@@ -2843,6 +2905,12 @@ def create(self,
           Whether Photon is enabled for this pipeline.
         :param restart_window: :class:`RestartWindow` (optional)
           Restart window of this pipeline.
+        :param run_as: :class:`RunAs` (optional)
+          Write-only setting, available only in Create/Update calls. Specifies the user or service principal
+          that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
+          
+          Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
+          thrown.
         :param schema: str (optional)
           The default schema (database) where tables are read from or published to. The presence of this field
           implies that the pipeline is in direct publishing mode.
@@ -2879,6 +2947,7 @@ def create(self,
         if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications]
         if photon is not None: body['photon'] = photon
         if restart_window is not None: body['restart_window'] = restart_window.as_dict()
+        if run_as is not None: body['run_as'] = run_as.as_dict()
         if schema is not None: body['schema'] = schema
         if serverless is not None: body['serverless'] = serverless
         if storage is not None: body['storage'] = storage
@@ -3213,6 +3282,7 @@ def update(self,
                notifications: Optional[List[Notifications]] = None,
                photon: Optional[bool] = None,
                restart_window: Optional[RestartWindow] = None,
+               run_as: Optional[RunAs] = None,
                schema: Optional[str] = None,
                serverless: Optional[bool] = None,
                storage: Optional[str] = None,
@@ -3268,6 +3338,12 @@ def update(self,
           Whether Photon is enabled for this pipeline.
         :param restart_window: :class:`RestartWindow` (optional)
           Restart window of this pipeline.
+        :param run_as: :class:`RunAs` (optional)
+          Write-only setting, available only in Create/Update calls. Specifies the user or service principal
+          that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
+          
+          Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
+          thrown.
         :param schema: str (optional)
           The default schema (database) where tables are read from or published to. The presence of this field
           implies that the pipeline is in direct publishing mode.
@@ -3304,6 +3380,7 @@ def update(self,
         if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications]
         if photon is not None: body['photon'] = photon
         if restart_window is not None: body['restart_window'] = restart_window.as_dict()
+        if run_as is not None: body['run_as'] = run_as.as_dict()
         if schema is not None: body['schema'] = schema
         if serverless is not None: body['serverless'] = serverless
         if storage is not None: body['storage'] = storage
diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py
index cb7861e88..1ada305cd 100755
--- a/databricks/sdk/service/serving.py
+++ b/databricks/sdk/service/serving.py
@@ -12,14 +12,11 @@
 
 import requests
 
-from ..data_plane import DataPlaneService
 from ..errors import OperationFailed
 from ._internal import Wait, _enum, _from_dict, _repeated_dict
 
 _LOG = logging.getLogger('databricks.sdk')
 
-from databricks.sdk.service import oauth2
-
 # all definitions in this file are in alphabetical order
 
 
@@ -712,6 +709,35 @@ def from_dict(cls, d: Dict[str, any]) -> CreateServingEndpoint:
                    tags=_repeated_dict(d, 'tags', EndpointTag))
 
 
+@dataclass
+class DataPlaneInfo:
+    authorization_details: Optional[str] = None
+    """Authorization details as a string."""
+
+    endpoint_url: Optional[str] = None
+    """The URL of the endpoint for this operation in the dataplane."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DataPlaneInfo into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.authorization_details is not None: body['authorization_details'] = self.authorization_details
+        if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DataPlaneInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.authorization_details is not None: body['authorization_details'] = self.authorization_details
+        if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DataPlaneInfo:
+        """Deserializes the DataPlaneInfo from a dictionary."""
+        return cls(authorization_details=d.get('authorization_details', None),
+                   endpoint_url=d.get('endpoint_url', None))
+
+
 @dataclass
 class DatabricksModelServingConfig:
     databricks_workspace_url: str
@@ -1444,7 +1470,7 @@ def from_dict(cls, d: Dict[str, any]) -> ListEndpointsResponse:
 
 @dataclass
 class ModelDataPlaneInfo:
-    query_info: Optional[oauth2.DataPlaneInfo] = None
+    query_info: Optional[DataPlaneInfo] = None
     """Information required to query DataPlane API 'query' endpoint."""
 
     def as_dict(self) -> dict:
@@ -1462,7 +1488,7 @@ def as_shallow_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelDataPlaneInfo:
         """Deserializes the ModelDataPlaneInfo from a dictionary."""
-        return cls(query_info=_from_dict(d, 'query_info', oauth2.DataPlaneInfo))
+        return cls(query_info=_from_dict(d, 'query_info', DataPlaneInfo))
 
 
 @dataclass
@@ -3725,6 +3751,7 @@ class ServingEndpointsDataPlaneAPI:
     def __init__(self, api_client, control_plane):
         self._api = api_client
         self._control_plane = control_plane
+        from ..data_plane import DataPlaneService
         self._data_plane_service = DataPlaneService()
 
     def query(self,
diff --git a/tests/test_data_plane.py b/tests/test_data_plane.py
index a74658964..1eac92382 100644
--- a/tests/test_data_plane.py
+++ b/tests/test_data_plane.py
@@ -2,7 +2,7 @@
 
 from databricks.sdk.data_plane import DataPlaneService
 from databricks.sdk.oauth import Token
-from databricks.sdk.service.oauth2 import DataPlaneInfo
+from databricks.sdk.service.serving import DataPlaneInfo
 
 info = DataPlaneInfo(authorization_details="authDetails", endpoint_url="url")
 

From 8c4264bcc75671bd6fe62410e612e5c6c00dac7c Mon Sep 17 00:00:00 2001
From: Renaud Hartert 
Date: Tue, 14 Jan 2025 15:30:19 +0100
Subject: [PATCH 082/136] [Internal] Stop testing Python 3.7 on Ubuntu (#858)

## What changes are proposed in this pull request?

Python 3.7.x is essentially not supported;

- It has been removed from Windows 2019, Windows 2022, Ubuntu 20 and
Ubuntu 22 images
([ref](https://github.com/actions/runner-images/issues/10893)).
- `unbuntu-latest` now points toward 24.04 which does not support Python
3.7.x ([ref](https://github.com/actions/runner-images/pull/11332))

 This PR updates our Ubuntu workflows to avoid this version.

## How is this tested?

N/A
---
 .github/workflows/push.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
index 2a2737f16..70f094c18 100644
--- a/.github/workflows/push.yml
+++ b/.github/workflows/push.yml
@@ -12,7 +12,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        pyVersion: [ '3.7', '3.8', '3.9', '3.10', '3.11', '3.12' ]
+        pyVersion: [ '3.8', '3.9', '3.10', '3.11', '3.12' ]
     with:
       os: ubuntu-latest
       pyVersion: ${{ matrix.pyVersion }}

From ee136e2f1e05071fb3607f740d90b12e2aa635ad Mon Sep 17 00:00:00 2001
From: Renaud Hartert 
Date: Tue, 14 Jan 2025 16:34:19 +0100
Subject: [PATCH 083/136] [Feature] Add `serving.http_request` to call external
 functions. (#857)

## What changes are proposed in this pull request?

This PR adds the `serving.http_request` function to call
`/external-function`.

The goal of this function is to make it easy for the AI agent authors to
create tools that can make request to external services by invoking the
/external-function API where the secrets are stored in UC Connections.

This PR is based on PR #852.

## How is this tested?

The mixin itself was not tested. Further testing will be conducted by
the Model Serving team.
---
 .codegen/_openapi_sha                         |   2 +-
 databricks/sdk/credentials_provider.py        |  27 +-
 databricks/sdk/mixins/open_ai_client.py       |  41 +-
 databricks/sdk/service/cleanrooms.py          |   3 +-
 databricks/sdk/service/files.py               |   9 +-
 databricks/sdk/service/jobs.py                |  85 ++-
 databricks/sdk/service/oauth2.py              |  12 +
 databricks/sdk/service/serving.py             | 601 ++++++++++++------
 .../account/oauth2/custom_app_integration.rst |   5 +-
 docs/account/oauth2/federation_policy.rst     |  18 +-
 .../service_principal_federation_policy.rst   |  18 +-
 docs/dbdataclasses/catalog.rst                |   3 +
 docs/dbdataclasses/cleanrooms.rst             |   3 +
 docs/dbdataclasses/jobs.rst                   |  14 +
 docs/dbdataclasses/oauth2.rst                 |   4 -
 docs/dbdataclasses/pipelines.rst              |  52 +-
 docs/dbdataclasses/serving.rst                |  85 ++-
 docs/workspace/apps/apps.rst                  |   6 +-
 docs/workspace/files/files.rst                |   9 +-
 docs/workspace/jobs/jobs.rst                  |  25 +-
 docs/workspace/pipelines/pipelines.rst        |  16 +-
 docs/workspace/serving/serving_endpoints.rst  |  50 +-
 22 files changed, 766 insertions(+), 322 deletions(-)

diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index dfe78790a..431b7678a 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-779817ed8d63031f5ea761fbd25ee84f38feec0d
\ No newline at end of file
+05a10af4ed43566968119b43605f0a7fecbe780f
\ No newline at end of file
diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py
index e91e37af4..c20464d5e 100644
--- a/databricks/sdk/credentials_provider.py
+++ b/databricks/sdk/credentials_provider.py
@@ -167,6 +167,7 @@ def oauth_service_principal(cfg: 'Config') -> Optional[CredentialsProvider]:
     oidc = cfg.oidc_endpoints
     if oidc is None:
         return None
+
     token_source = ClientCredentials(client_id=cfg.client_id,
                                      client_secret=cfg.client_secret,
                                      token_url=oidc.token_endpoint,
@@ -210,16 +211,22 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
     credentials = token_cache.load()
     if credentials:
         # Force a refresh in case the loaded credentials are expired.
-        credentials.token()
-    else:
-        oauth_client = OAuthClient(oidc_endpoints=oidc_endpoints,
-                                   client_id=client_id,
-                                   redirect_url=redirect_url,
-                                   client_secret=client_secret)
-        consent = oauth_client.initiate_consent()
-        if not consent:
-            return None
-        credentials = consent.launch_external_browser()
+        # If the refresh fails, rather than throw exception we will initiate a new OAuth login flow.
+        try:
+            credentials.token()
+            return credentials(cfg)
+        # TODO: we should ideally use more specific exceptions.
+        except Exception as e:
+            logger.warning(f'Failed to refresh cached token: {e}. Initiating new OAuth login flow')
+
+    oauth_client = OAuthClient(oidc_endpoints=oidc_endpoints,
+                               client_id=client_id,
+                               redirect_url=redirect_url,
+                               client_secret=client_secret)
+    consent = oauth_client.initiate_consent()
+    if not consent:
+        return None
+    credentials = consent.launch_external_browser()
     token_cache.save(credentials)
     return credentials(cfg)
 
diff --git a/databricks/sdk/mixins/open_ai_client.py b/databricks/sdk/mixins/open_ai_client.py
index a86827128..5f9713117 100644
--- a/databricks/sdk/mixins/open_ai_client.py
+++ b/databricks/sdk/mixins/open_ai_client.py
@@ -1,4 +1,9 @@
-from databricks.sdk.service.serving import ServingEndpointsAPI
+import json as js
+from typing import Dict, Optional
+
+from databricks.sdk.service.serving import (ExternalFunctionRequestHttpMethod,
+                                            ExternalFunctionResponse,
+                                            ServingEndpointsAPI)
 
 
 class ServingEndpointsExt(ServingEndpointsAPI):
@@ -50,3 +55,37 @@ def get_langchain_chat_open_ai_client(self, model):
             openai_api_base=self._api._cfg.host + "/serving-endpoints",
             api_key="no-token", # Passing in a placeholder to pass validations, this will not be used
             http_client=self._get_authorized_http_client())
+
+    def http_request(self,
+                     conn: str,
+                     method: ExternalFunctionRequestHttpMethod,
+                     path: str,
+                     *,
+                     headers: Optional[Dict[str, str]] = None,
+                     json: Optional[Dict[str, str]] = None,
+                     params: Optional[Dict[str, str]] = None) -> ExternalFunctionResponse:
+        """Make external services call using the credentials stored in UC Connection.
+        **NOTE:** Experimental: This API may change or be removed in a future release without warning.
+        :param conn: str
+          The connection name to use. This is required to identify the external connection.
+        :param method: :class:`ExternalFunctionRequestHttpMethod`
+          The HTTP method to use (e.g., 'GET', 'POST'). This is required.
+        :param path: str
+          The relative path for the API endpoint. This is required.
+        :param headers: Dict[str,str] (optional)
+          Additional headers for the request. If not provided, only auth headers from connections would be
+          passed.
+        :param json: Dict[str,str] (optional)
+          JSON payload for the request.
+        :param params: Dict[str,str] (optional)
+          Query parameters for the request.
+        :returns: :class:`ExternalFunctionResponse`
+        """
+
+        return super.http_request(connection_name=conn,
+                                  method=method,
+                                  path=path,
+                                  headers=js.dumps(headers),
+                                  json=js.dumps(json),
+                                  params=js.dumps(params),
+                                  )
diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py
index 393c68a0b..20d57527e 100755
--- a/databricks/sdk/service/cleanrooms.py
+++ b/databricks/sdk/service/cleanrooms.py
@@ -312,6 +312,7 @@ def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetNotebook:
 class CleanRoomAssetStatusEnum(Enum):
 
     ACTIVE = 'ACTIVE'
+    PENDING = 'PENDING'
     PERMISSION_DENIED = 'PERMISSION_DENIED'
 
 
@@ -443,7 +444,7 @@ def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetVolumeLocalDetails:
 class CleanRoomCollaborator:
     """Publicly visible clean room collaborator."""
 
-    collaborator_alias: Optional[str] = None
+    collaborator_alias: str
     """Collaborator alias specified by the clean room creator. It is unique across all collaborators of
     this clean room, and used to derive multiple values internally such as catalog alias and clean
     room name for single metastore clean rooms. It should follow [UC securable naming requirements].
diff --git a/databricks/sdk/service/files.py b/databricks/sdk/service/files.py
index 07cdaea54..99c252298 100755
--- a/databricks/sdk/service/files.py
+++ b/databricks/sdk/service/files.py
@@ -925,9 +925,12 @@ class FilesAPI:
     /Volumes/<catalog_name>/<schema_name>/<volume_name>/<path_to_file>.
     
     The Files API has two distinct endpoints, one for working with files (`/fs/files`) and another one for
-    working with directories (`/fs/directories`). Both endpoints, use the standard HTTP methods GET, HEAD,
-    PUT, and DELETE to manage files and directories specified using their URI path. The path is always
-    absolute.
+    working with directories (`/fs/directories`). Both endpoints use the standard HTTP methods GET, HEAD, PUT,
+    and DELETE to manage files and directories specified using their URI path. The path is always absolute.
+    
+    Some Files API client features are currently experimental. To enable them, set
+    `enable_experimental_files_api_client = True` in your configuration profile or use the environment
+    variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`.
     
     [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html"""
 
diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index c5fdb8393..dd4bc8075 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -443,6 +443,7 @@ class CleanRoomTaskRunLifeCycleState(Enum):
     PENDING = 'PENDING'
     QUEUED = 'QUEUED'
     RUNNING = 'RUNNING'
+    RUN_LIFE_CYCLE_STATE_UNSPECIFIED = 'RUN_LIFE_CYCLE_STATE_UNSPECIFIED'
     SKIPPED = 'SKIPPED'
     TERMINATED = 'TERMINATED'
     TERMINATING = 'TERMINATING'
@@ -459,6 +460,7 @@ class CleanRoomTaskRunResultState(Enum):
     EXCLUDED = 'EXCLUDED'
     FAILED = 'FAILED'
     MAXIMUM_CONCURRENT_RUNS_REACHED = 'MAXIMUM_CONCURRENT_RUNS_REACHED'
+    RUN_RESULT_STATE_UNSPECIFIED = 'RUN_RESULT_STATE_UNSPECIFIED'
     SUCCESS = 'SUCCESS'
     SUCCESS_WITH_FAILURES = 'SUCCESS_WITH_FAILURES'
     TIMEDOUT = 'TIMEDOUT'
@@ -541,6 +543,42 @@ def from_dict(cls, d: Dict[str, any]) -> CleanRoomsNotebookTask:
                    notebook_name=d.get('notebook_name', None))
 
 
+@dataclass
+class CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput:
+    clean_room_job_run_state: Optional[CleanRoomTaskRunState] = None
+    """The run state of the clean rooms notebook task."""
+
+    notebook_output: Optional[NotebookOutput] = None
+    """The notebook output for the clean room run"""
+
+    output_schema_info: Optional[OutputSchemaInfo] = None
+    """Information on how to access the output schema for the clean room run"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.clean_room_job_run_state:
+            body['clean_room_job_run_state'] = self.clean_room_job_run_state.as_dict()
+        if self.notebook_output: body['notebook_output'] = self.notebook_output.as_dict()
+        if self.output_schema_info: body['output_schema_info'] = self.output_schema_info.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.clean_room_job_run_state: body['clean_room_job_run_state'] = self.clean_room_job_run_state
+        if self.notebook_output: body['notebook_output'] = self.notebook_output
+        if self.output_schema_info: body['output_schema_info'] = self.output_schema_info
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput:
+        """Deserializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput from a dictionary."""
+        return cls(clean_room_job_run_state=_from_dict(d, 'clean_room_job_run_state', CleanRoomTaskRunState),
+                   notebook_output=_from_dict(d, 'notebook_output', NotebookOutput),
+                   output_schema_info=_from_dict(d, 'output_schema_info', OutputSchemaInfo))
+
+
 @dataclass
 class ClusterInstance:
     cluster_id: Optional[str] = None
@@ -2914,6 +2952,42 @@ def from_dict(cls, d: Dict[str, any]) -> NotebookTask:
                    warehouse_id=d.get('warehouse_id', None))
 
 
+@dataclass
+class OutputSchemaInfo:
+    """Stores the catalog name, schema name, and the output schema expiration time for the clean room
+    run."""
+
+    catalog_name: Optional[str] = None
+
+    expiration_time: Optional[int] = None
+    """The expiration time for the output schema as a Unix timestamp in milliseconds."""
+
+    schema_name: Optional[str] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the OutputSchemaInfo into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the OutputSchemaInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> OutputSchemaInfo:
+        """Deserializes the OutputSchemaInfo from a dictionary."""
+        return cls(catalog_name=d.get('catalog_name', None),
+                   expiration_time=d.get('expiration_time', None),
+                   schema_name=d.get('schema_name', None))
+
+
 class PauseStatus(Enum):
 
     PAUSED = 'PAUSED'
@@ -4415,6 +4489,9 @@ def from_dict(cls, d: Dict[str, any]) -> RunNowResponse:
 class RunOutput:
     """Run output was retrieved successfully."""
 
+    clean_rooms_notebook_output: Optional[CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput] = None
+    """The output of a clean rooms notebook task, if available"""
+
     dbt_output: Optional[DbtOutput] = None
     """The output of a dbt task, if available."""
 
@@ -4459,6 +4536,8 @@ class RunOutput:
     def as_dict(self) -> dict:
         """Serializes the RunOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.clean_rooms_notebook_output:
+            body['clean_rooms_notebook_output'] = self.clean_rooms_notebook_output.as_dict()
         if self.dbt_output: body['dbt_output'] = self.dbt_output.as_dict()
         if self.error is not None: body['error'] = self.error
         if self.error_trace is not None: body['error_trace'] = self.error_trace
@@ -4474,6 +4553,8 @@ def as_dict(self) -> dict:
     def as_shallow_dict(self) -> dict:
         """Serializes the RunOutput into a shallow dictionary of its immediate attributes."""
         body = {}
+        if self.clean_rooms_notebook_output:
+            body['clean_rooms_notebook_output'] = self.clean_rooms_notebook_output
         if self.dbt_output: body['dbt_output'] = self.dbt_output
         if self.error is not None: body['error'] = self.error
         if self.error_trace is not None: body['error_trace'] = self.error_trace
@@ -4489,7 +4570,9 @@ def as_shallow_dict(self) -> dict:
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunOutput:
         """Deserializes the RunOutput from a dictionary."""
-        return cls(dbt_output=_from_dict(d, 'dbt_output', DbtOutput),
+        return cls(clean_rooms_notebook_output=_from_dict(d, 'clean_rooms_notebook_output',
+                                                          CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput),
+                   dbt_output=_from_dict(d, 'dbt_output', DbtOutput),
                    error=d.get('error', None),
                    error_trace=d.get('error_trace', None),
                    info=d.get('info', None),
diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py
index 1aac8bc1c..dc51cd455 100755
--- a/databricks/sdk/service/oauth2.py
+++ b/databricks/sdk/service/oauth2.py
@@ -791,6 +791,10 @@ class UpdateCustomAppIntegration:
     redirect_urls: Optional[List[str]] = None
     """List of OAuth redirect urls to be updated in the custom OAuth app integration"""
 
+    scopes: Optional[List[str]] = None
+    """List of OAuth scopes to be updated in the custom OAuth app integration, similar to redirect URIs
+    this will fully replace the existing values instead of appending"""
+
     token_access_policy: Optional[TokenAccessPolicy] = None
     """Token access policy to be updated in the custom OAuth app integration"""
 
@@ -799,6 +803,7 @@ def as_dict(self) -> dict:
         body = {}
         if self.integration_id is not None: body['integration_id'] = self.integration_id
         if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls]
+        if self.scopes: body['scopes'] = [v for v in self.scopes]
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
         return body
 
@@ -807,6 +812,7 @@ def as_shallow_dict(self) -> dict:
         body = {}
         if self.integration_id is not None: body['integration_id'] = self.integration_id
         if self.redirect_urls: body['redirect_urls'] = self.redirect_urls
+        if self.scopes: body['scopes'] = self.scopes
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
         return body
 
@@ -815,6 +821,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateCustomAppIntegration:
         """Deserializes the UpdateCustomAppIntegration from a dictionary."""
         return cls(integration_id=d.get('integration_id', None),
                    redirect_urls=d.get('redirect_urls', None),
+                   scopes=d.get('scopes', None),
                    token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy))
 
 
@@ -1165,6 +1172,7 @@ def update(self,
                integration_id: str,
                *,
                redirect_urls: Optional[List[str]] = None,
+               scopes: Optional[List[str]] = None,
                token_access_policy: Optional[TokenAccessPolicy] = None):
         """Updates Custom OAuth App Integration.
         
@@ -1174,6 +1182,9 @@ def update(self,
         :param integration_id: str
         :param redirect_urls: List[str] (optional)
           List of OAuth redirect urls to be updated in the custom OAuth app integration
+        :param scopes: List[str] (optional)
+          List of OAuth scopes to be updated in the custom OAuth app integration, similar to redirect URIs
+          this will fully replace the existing values instead of appending
         :param token_access_policy: :class:`TokenAccessPolicy` (optional)
           Token access policy to be updated in the custom OAuth app integration
         
@@ -1181,6 +1192,7 @@ def update(self,
         """
         body = {}
         if redirect_urls is not None: body['redirect_urls'] = [v for v in redirect_urls]
+        if scopes is not None: body['scopes'] = [v for v in scopes]
         if token_access_policy is not None: body['token_access_policy'] = token_access_policy.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py
index 1ada305cd..971e3fd7e 100755
--- a/databricks/sdk/service/serving.py
+++ b/databricks/sdk/service/serving.py
@@ -145,11 +145,8 @@ def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrailParameters:
 
 @dataclass
 class AiGatewayGuardrailPiiBehavior:
-    behavior: AiGatewayGuardrailPiiBehaviorBehavior
-    """Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input
-    guardrail and the request contains PII, the request is not sent to the model server and 400
-    status code is returned; if 'BLOCK' is set for the output guardrail and the model response
-    contains PII, the PII info in the response is redacted and 400 status code is returned."""
+    behavior: Optional[AiGatewayGuardrailPiiBehaviorBehavior] = None
+    """Configuration for input guardrail filters."""
 
     def as_dict(self) -> dict:
         """Serializes the AiGatewayGuardrailPiiBehavior into a dictionary suitable for use as a JSON request body."""
@@ -170,10 +167,6 @@ def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrailPiiBehavior:
 
 
 class AiGatewayGuardrailPiiBehaviorBehavior(Enum):
-    """Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input
-    guardrail and the request contains PII, the request is not sent to the model server and 400
-    status code is returned; if 'BLOCK' is set for the output guardrail and the model response
-    contains PII, the PII info in the response is redacted and 400 status code is returned."""
 
     BLOCK = 'BLOCK'
     NONE = 'NONE'
@@ -289,15 +282,12 @@ def from_dict(cls, d: Dict[str, any]) -> AiGatewayRateLimit:
 
 
 class AiGatewayRateLimitKey(Enum):
-    """Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint'
-    being the default if not specified."""
 
     ENDPOINT = 'endpoint'
     USER = 'user'
 
 
 class AiGatewayRateLimitRenewalPeriod(Enum):
-    """Renewal period field for a rate limit. Currently, only 'minute' is supported."""
 
     MINUTE = 'minute'
 
@@ -336,9 +326,9 @@ class AmazonBedrockConfig:
 
     aws_access_key_id: Optional[str] = None
     """The Databricks secret key reference for an AWS access key ID with permissions to interact with
-    Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id`. You
-    must provide an API key using one of the following fields: `aws_access_key_id` or
-    `aws_access_key_id_plaintext`."""
+    Bedrock services. If you prefer to paste your API key directly, see
+    `aws_access_key_id_plaintext`. You must provide an API key using one of the following fields:
+    `aws_access_key_id` or `aws_access_key_id_plaintext`."""
 
     aws_access_key_id_plaintext: Optional[str] = None
     """An AWS access key ID with permissions to interact with Bedrock services provided as a plaintext
@@ -396,8 +386,6 @@ def from_dict(cls, d: Dict[str, any]) -> AmazonBedrockConfig:
 
 
 class AmazonBedrockConfigBedrockProvider(Enum):
-    """The underlying provider in Amazon Bedrock. Supported values (case insensitive) include:
-    Anthropic, Cohere, AI21Labs, Amazon."""
 
     AI21LABS = 'ai21labs'
     AMAZON = 'amazon'
@@ -487,18 +475,21 @@ def from_dict(cls, d: Dict[str, any]) -> AutoCaptureConfigInput:
 @dataclass
 class AutoCaptureConfigOutput:
     catalog_name: Optional[str] = None
-    """The name of the catalog in Unity Catalog."""
+    """The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if
+    the inference table is already enabled."""
 
     enabled: Optional[bool] = None
     """Indicates whether the inference table is enabled."""
 
     schema_name: Optional[str] = None
-    """The name of the schema in Unity Catalog."""
+    """The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if
+    the inference table is already enabled."""
 
     state: Optional[AutoCaptureState] = None
 
     table_name_prefix: Optional[str] = None
-    """The prefix of the table in Unity Catalog."""
+    """The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if
+    the inference table is already enabled."""
 
     def as_dict(self) -> dict:
         """Serializes the AutoCaptureConfigOutput into a dictionary suitable for use as a JSON request body."""
@@ -663,8 +654,8 @@ class CreateServingEndpoint:
     """The core config of the serving endpoint."""
 
     ai_gateway: Optional[AiGatewayConfig] = None
-    """The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
-    supported as of now."""
+    """The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned
+    throughput endpoints are currently supported."""
 
     rate_limits: Optional[List[RateLimit]] = None
     """Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
@@ -711,6 +702,8 @@ def from_dict(cls, d: Dict[str, any]) -> CreateServingEndpoint:
 
 @dataclass
 class DataPlaneInfo:
+    """Details necessary to query this object's API through the DataPlane APIs."""
+
     authorization_details: Optional[str] = None
     """Authorization details as a string."""
 
@@ -879,21 +872,22 @@ class EmbeddingsV1ResponseEmbeddingElementObject(Enum):
 class EndpointCoreConfigInput:
     auto_capture_config: Optional[AutoCaptureConfigInput] = None
     """Configuration for Inference Tables which automatically logs requests and responses to Unity
-    Catalog."""
+    Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or
+    updating existing provisioned throughput endpoints that never have inference table configured;
+    in these cases please use AI Gateway to manage inference tables."""
 
     name: Optional[str] = None
     """The name of the serving endpoint to update. This field is required."""
 
     served_entities: Optional[List[ServedEntityInput]] = None
-    """A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served
-    entities."""
+    """The list of served entities under the serving endpoint config."""
 
     served_models: Optional[List[ServedModelInput]] = None
-    """(Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A
-    serving endpoint can have up to 15 served models."""
+    """(Deprecated, use served_entities instead) The list of served models under the serving endpoint
+    config."""
 
     traffic_config: Optional[TrafficConfig] = None
-    """The traffic config defining how invocations to the serving endpoint should be routed."""
+    """The traffic configuration associated with the serving endpoint config."""
 
     def as_dict(self) -> dict:
         """Serializes the EndpointCoreConfigInput into a dictionary suitable for use as a JSON request body."""
@@ -929,7 +923,9 @@ def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigInput:
 class EndpointCoreConfigOutput:
     auto_capture_config: Optional[AutoCaptureConfigOutput] = None
     """Configuration for Inference Tables which automatically logs requests and responses to Unity
-    Catalog."""
+    Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or
+    updating existing provisioned throughput endpoints that never have inference table configured;
+    in these cases please use AI Gateway to manage inference tables."""
 
     config_version: Optional[int] = None
     """The config version that the serving endpoint is currently serving."""
@@ -1008,7 +1004,9 @@ def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigSummary:
 class EndpointPendingConfig:
     auto_capture_config: Optional[AutoCaptureConfigOutput] = None
     """Configuration for Inference Tables which automatically logs requests and responses to Unity
-    Catalog."""
+    Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or
+    updating existing provisioned throughput endpoints that never have inference table configured;
+    in these cases please use AI Gateway to manage inference tables."""
 
     config_version: Optional[int] = None
     """The config version that the serving endpoint is currently serving."""
@@ -1094,10 +1092,6 @@ def from_dict(cls, d: Dict[str, any]) -> EndpointState:
 
 
 class EndpointStateConfigUpdate(Enum):
-    """The state of an endpoint's config update. This informs the user if the pending_config is in
-    progress, if the update failed, or if there is no update in progress. Note that if the
-    endpoint's config_update state value is IN_PROGRESS, another update can not be made until the
-    update completes or fails."""
 
     IN_PROGRESS = 'IN_PROGRESS'
     NOT_UPDATING = 'NOT_UPDATING'
@@ -1106,9 +1100,6 @@ class EndpointStateConfigUpdate(Enum):
 
 
 class EndpointStateReady(Enum):
-    """The state of an endpoint, indicating whether or not the endpoint is queryable. An endpoint is
-    READY if all of the served entities in its active configuration are ready. If any of the
-    actively served entities are in a non-ready state, the endpoint state will be NOT_READY."""
 
     NOT_READY = 'NOT_READY'
     READY = 'READY'
@@ -1142,6 +1133,28 @@ def from_dict(cls, d: Dict[str, any]) -> EndpointTag:
         return cls(key=d.get('key', None), value=d.get('value', None))
 
 
+@dataclass
+class EndpointTags:
+    tags: Optional[List[EndpointTag]] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the EndpointTags into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointTags into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.tags: body['tags'] = self.tags
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EndpointTags:
+        """Deserializes the EndpointTags from a dictionary."""
+        return cls(tags=_repeated_dict(d, 'tags', EndpointTag))
+
+
 @dataclass
 class ExportMetricsResponse:
     contents: Optional[BinaryIO] = None
@@ -1164,12 +1177,105 @@ def from_dict(cls, d: Dict[str, any]) -> ExportMetricsResponse:
         return cls(contents=d.get('contents', None))
 
 
+@dataclass
+class ExternalFunctionRequest:
+    """Simple Proto message for testing"""
+
+    connection_name: str
+    """The connection name to use. This is required to identify the external connection."""
+
+    method: ExternalFunctionRequestHttpMethod
+    """The HTTP method to use (e.g., 'GET', 'POST')."""
+
+    path: str
+    """The relative path for the API endpoint. This is required."""
+
+    headers: Optional[str] = None
+    """Additional headers for the request. If not provided, only auth headers from connections would be
+    passed."""
+
+    json: Optional[str] = None
+    """The JSON payload to send in the request body."""
+
+    params: Optional[str] = None
+    """Query parameters for the request."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ExternalFunctionRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.connection_name is not None: body['connection_name'] = self.connection_name
+        if self.headers is not None: body['headers'] = self.headers
+        if self.json is not None: body['json'] = self.json
+        if self.method is not None: body['method'] = self.method.value
+        if self.params is not None: body['params'] = self.params
+        if self.path is not None: body['path'] = self.path
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExternalFunctionRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.connection_name is not None: body['connection_name'] = self.connection_name
+        if self.headers is not None: body['headers'] = self.headers
+        if self.json is not None: body['json'] = self.json
+        if self.method is not None: body['method'] = self.method
+        if self.params is not None: body['params'] = self.params
+        if self.path is not None: body['path'] = self.path
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ExternalFunctionRequest:
+        """Deserializes the ExternalFunctionRequest from a dictionary."""
+        return cls(connection_name=d.get('connection_name', None),
+                   headers=d.get('headers', None),
+                   json=d.get('json', None),
+                   method=_enum(d, 'method', ExternalFunctionRequestHttpMethod),
+                   params=d.get('params', None),
+                   path=d.get('path', None))
+
+
+class ExternalFunctionRequestHttpMethod(Enum):
+
+    DELETE = 'DELETE'
+    GET = 'GET'
+    PATCH = 'PATCH'
+    POST = 'POST'
+    PUT = 'PUT'
+
+
+@dataclass
+class ExternalFunctionResponse:
+    status_code: Optional[int] = None
+    """The HTTP status code of the response"""
+
+    text: Optional[str] = None
+    """The content of the response"""
+
+    def as_dict(self) -> dict:
+        """Serializes the ExternalFunctionResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.status_code is not None: body['status_code'] = self.status_code
+        if self.text is not None: body['text'] = self.text
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExternalFunctionResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.status_code is not None: body['status_code'] = self.status_code
+        if self.text is not None: body['text'] = self.text
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ExternalFunctionResponse:
+        """Deserializes the ExternalFunctionResponse from a dictionary."""
+        return cls(status_code=d.get('status_code', None), text=d.get('text', None))
+
+
 @dataclass
 class ExternalModel:
     provider: ExternalModelProvider
     """The name of the provider for the external model. Currently, the supported providers are
     'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving',
-    'google-cloud-vertex-ai', 'openai', and 'palm'.","""
+    'google-cloud-vertex-ai', 'openai', and 'palm'."""
 
     name: str
     """The name of the external model."""
@@ -1256,9 +1362,6 @@ def from_dict(cls, d: Dict[str, any]) -> ExternalModel:
 
 
 class ExternalModelProvider(Enum):
-    """The name of the provider for the external model. Currently, the supported providers are
-    'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving',
-    'google-cloud-vertex-ai', 'openai', and 'palm'.","""
 
     AI21LABS = 'ai21labs'
     AMAZON_BEDROCK = 'amazon-bedrock'
@@ -1307,17 +1410,16 @@ def from_dict(cls, d: Dict[str, any]) -> ExternalModelUsageElement:
 
 @dataclass
 class FoundationModel:
+    """All fields are not sensitive as they are hard-coded in the system and made available to
+    customers."""
+
     description: Optional[str] = None
-    """The description of the foundation model."""
 
     display_name: Optional[str] = None
-    """The display name of the foundation model."""
 
     docs: Optional[str] = None
-    """The URL to the documentation of the foundation model."""
 
     name: Optional[str] = None
-    """The name of the foundation model."""
 
     def as_dict(self) -> dict:
         """Serializes the FoundationModel into a dictionary suitable for use as a JSON request body."""
@@ -1348,23 +1450,24 @@ def from_dict(cls, d: Dict[str, any]) -> FoundationModel:
 
 @dataclass
 class GetOpenApiResponse:
-    """The response is an OpenAPI spec in JSON format that typically includes fields like openapi,
-    info, servers and paths, etc."""
+    contents: Optional[BinaryIO] = None
 
     def as_dict(self) -> dict:
         """Serializes the GetOpenApiResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.contents: body['contents'] = self.contents
         return body
 
     def as_shallow_dict(self) -> dict:
         """Serializes the GetOpenApiResponse into a shallow dictionary of its immediate attributes."""
         body = {}
+        if self.contents: body['contents'] = self.contents
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetOpenApiResponse:
         """Deserializes the GetOpenApiResponse from a dictionary."""
-        return cls()
+        return cls(contents=d.get('contents', None))
 
 
 @dataclass
@@ -1393,13 +1496,23 @@ def from_dict(cls, d: Dict[str, any]) -> GetServingEndpointPermissionLevelsRespo
 
 @dataclass
 class GoogleCloudVertexAiConfig:
+    project_id: str
+    """This is the Google Cloud project id that the service account is associated with."""
+
+    region: str
+    """This is the region for the Google Cloud Vertex AI Service. See [supported regions] for more
+    details. Some models are only available in specific regions.
+    
+    [supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations"""
+
     private_key: Optional[str] = None
     """The Databricks secret key reference for a private key for the service account which has access
     to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys].
     If you prefer to paste your API key directly, see `private_key_plaintext`. You must provide an
     API key using one of the following fields: `private_key` or `private_key_plaintext`
     
-    [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys"""
+    [Best practices for managing service account keys]:
+    https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys"""
 
     private_key_plaintext: Optional[str] = None
     """The private key for the service account which has access to the Google Cloud Vertex AI Service
@@ -1407,16 +1520,8 @@ class GoogleCloudVertexAiConfig:
     prefer to reference your key using Databricks Secrets, see `private_key`. You must provide an
     API key using one of the following fields: `private_key` or `private_key_plaintext`.
     
-    [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys"""
-
-    project_id: Optional[str] = None
-    """This is the Google Cloud project id that the service account is associated with."""
-
-    region: Optional[str] = None
-    """This is the region for the Google Cloud Vertex AI Service. See [supported regions] for more
-    details. Some models are only available in specific regions.
-    
-    [supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations"""
+    [Best practices for managing service account keys]:
+    https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys"""
 
     def as_dict(self) -> dict:
         """Serializes the GoogleCloudVertexAiConfig into a dictionary suitable for use as a JSON request body."""
@@ -1470,6 +1575,9 @@ def from_dict(cls, d: Dict[str, any]) -> ListEndpointsResponse:
 
 @dataclass
 class ModelDataPlaneInfo:
+    """A representation of all DataPlaneInfo for operations that can be done on a model through Data
+    Plane APIs."""
+
     query_info: Optional[DataPlaneInfo] = None
     """Information required to query DataPlane API 'query' endpoint."""
 
@@ -1493,6 +1601,8 @@ def from_dict(cls, d: Dict[str, any]) -> ModelDataPlaneInfo:
 
 @dataclass
 class OpenAiConfig:
+    """Configs needed to create an OpenAI model route."""
+
     microsoft_entra_client_id: Optional[str] = None
     """This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID."""
 
@@ -1678,13 +1788,10 @@ def from_dict(cls, d: Dict[str, any]) -> PatchServingEndpointTags:
 @dataclass
 class PayloadTable:
     name: Optional[str] = None
-    """The name of the payload table."""
 
     status: Optional[str] = None
-    """The status of the payload table."""
 
     status_message: Optional[str] = None
-    """The status message of the payload table."""
 
     def as_dict(self) -> dict:
         """Serializes the PayloadTable into a dictionary suitable for use as a JSON request body."""
@@ -1710,6 +1817,57 @@ def from_dict(cls, d: Dict[str, any]) -> PayloadTable:
                    status_message=d.get('status_message', None))
 
 
+@dataclass
+class PutAiGatewayRequest:
+    guardrails: Optional[AiGatewayGuardrails] = None
+    """Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and
+    responses."""
+
+    inference_table_config: Optional[AiGatewayInferenceTableConfig] = None
+    """Configuration for payload logging using inference tables. Use these tables to monitor and audit
+    data being sent to and received from model APIs and to improve model quality."""
+
+    name: Optional[str] = None
+    """The name of the serving endpoint whose AI Gateway is being updated. This field is required."""
+
+    rate_limits: Optional[List[AiGatewayRateLimit]] = None
+    """Configuration for rate limits which can be set to limit endpoint traffic."""
+
+    usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None
+    """Configuration to enable usage tracking using system tables. These tables allow you to monitor
+    operational usage on endpoints and their associated costs."""
+
+    def as_dict(self) -> dict:
+        """Serializes the PutAiGatewayRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.guardrails: body['guardrails'] = self.guardrails.as_dict()
+        if self.inference_table_config: body['inference_table_config'] = self.inference_table_config.as_dict()
+        if self.name is not None: body['name'] = self.name
+        if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits]
+        if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PutAiGatewayRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.guardrails: body['guardrails'] = self.guardrails
+        if self.inference_table_config: body['inference_table_config'] = self.inference_table_config
+        if self.name is not None: body['name'] = self.name
+        if self.rate_limits: body['rate_limits'] = self.rate_limits
+        if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> PutAiGatewayRequest:
+        """Deserializes the PutAiGatewayRequest from a dictionary."""
+        return cls(guardrails=_from_dict(d, 'guardrails', AiGatewayGuardrails),
+                   inference_table_config=_from_dict(d, 'inference_table_config',
+                                                     AiGatewayInferenceTableConfig),
+                   name=d.get('name', None),
+                   rate_limits=_repeated_dict(d, 'rate_limits', AiGatewayRateLimit),
+                   usage_tracking_config=_from_dict(d, 'usage_tracking_config', AiGatewayUsageTrackingConfig))
+
+
 @dataclass
 class PutAiGatewayResponse:
     guardrails: Optional[AiGatewayGuardrails] = None
@@ -1718,7 +1876,7 @@ class PutAiGatewayResponse:
 
     inference_table_config: Optional[AiGatewayInferenceTableConfig] = None
     """Configuration for payload logging using inference tables. Use these tables to monitor and audit
-    data being sent to and received from model APIs and to improve model quality ."""
+    data being sent to and received from model APIs and to improve model quality."""
 
     rate_limits: Optional[List[AiGatewayRateLimit]] = None
     """Configuration for rate limits which can be set to limit endpoint traffic."""
@@ -1755,6 +1913,34 @@ def from_dict(cls, d: Dict[str, any]) -> PutAiGatewayResponse:
                    usage_tracking_config=_from_dict(d, 'usage_tracking_config', AiGatewayUsageTrackingConfig))
 
 
+@dataclass
+class PutRequest:
+    name: Optional[str] = None
+    """The name of the serving endpoint whose rate limits are being updated. This field is required."""
+
+    rate_limits: Optional[List[RateLimit]] = None
+    """The list of endpoint rate limits."""
+
+    def as_dict(self) -> dict:
+        """Serializes the PutRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PutRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.rate_limits: body['rate_limits'] = self.rate_limits
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> PutRequest:
+        """Deserializes the PutRequest from a dictionary."""
+        return cls(name=d.get('name', None), rate_limits=_repeated_dict(d, 'rate_limits', RateLimit))
+
+
 @dataclass
 class PutResponse:
     rate_limits: Optional[List[RateLimit]] = None
@@ -2020,15 +2206,12 @@ def from_dict(cls, d: Dict[str, any]) -> RateLimit:
 
 
 class RateLimitKey(Enum):
-    """Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are
-    supported, with 'endpoint' being the default if not specified."""
 
     ENDPOINT = 'endpoint'
     USER = 'user'
 
 
 class RateLimitRenewalPeriod(Enum):
-    """Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported."""
 
     MINUTE = 'minute'
 
@@ -2069,11 +2252,9 @@ class ServedEntityInput:
     """The name of the entity to be served. The entity may be a model in the Databricks Model Registry,
     a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC
     object, the full name of the object should be given in the form of
-    __catalog_name__.__schema_name__.__model_name__."""
+    **catalog_name.schema_name.model_name**."""
 
     entity_version: Optional[str] = None
-    """The version of the model in Databricks Model Registry to be served or empty if the entity is a
-    FEATURE_SPEC."""
 
     environment_vars: Optional[Dict[str, str]] = None
     """An object containing a set of optional, user-specified environment variable key-value pairs used
@@ -2102,7 +2283,7 @@ class ServedEntityInput:
     """The name of a served entity. It must be unique across an endpoint. A served entity name can
     consist of alphanumeric characters, dashes, and underscores. If not specified for an external
     model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if
-    not specified for other entities, it defaults to -."""
+    not specified for other entities, it defaults to entity_name-entity_version."""
 
     scale_to_zero_enabled: Optional[bool] = None
     """Whether the compute resources for the served entity should scale down to zero."""
@@ -2115,13 +2296,13 @@ class ServedEntityInput:
     scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size
     is 0."""
 
-    workload_type: Optional[str] = None
+    workload_type: Optional[ServingModelWorkloadType] = None
     """The workload type of the served entity. The workload type selects which type of compute to use
     in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU
     acceleration is available by selecting workload types like GPU_SMALL and others. See the
     available [GPU types].
     
-    [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types"""
+    [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types"""
 
     def as_dict(self) -> dict:
         """Serializes the ServedEntityInput into a dictionary suitable for use as a JSON request body."""
@@ -2138,7 +2319,7 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
         if self.workload_size is not None: body['workload_size'] = self.workload_size
-        if self.workload_type is not None: body['workload_type'] = self.workload_type
+        if self.workload_type is not None: body['workload_type'] = self.workload_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
@@ -2172,26 +2353,22 @@ def from_dict(cls, d: Dict[str, any]) -> ServedEntityInput:
                    name=d.get('name', None),
                    scale_to_zero_enabled=d.get('scale_to_zero_enabled', None),
                    workload_size=d.get('workload_size', None),
-                   workload_type=d.get('workload_type', None))
+                   workload_type=_enum(d, 'workload_type', ServingModelWorkloadType))
 
 
 @dataclass
 class ServedEntityOutput:
     creation_timestamp: Optional[int] = None
-    """The creation timestamp of the served entity in Unix time."""
 
     creator: Optional[str] = None
-    """The email of the user who created the served entity."""
 
     entity_name: Optional[str] = None
-    """The name of the entity served. The entity may be a model in the Databricks Model Registry, a
-    model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC
-    object, the full name of the object is given in the form of
-    __catalog_name__.__schema_name__.__model_name__."""
+    """The name of the entity to be served. The entity may be a model in the Databricks Model Registry,
+    a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC
+    object, the full name of the object should be given in the form of
+    **catalog_name.schema_name.model_name**."""
 
     entity_version: Optional[str] = None
-    """The version of the served entity in Databricks Model Registry or empty if the entity is a
-    FEATURE_SPEC."""
 
     environment_vars: Optional[Dict[str, str]] = None
     """An object containing a set of optional, user-specified environment variable key-value pairs used
@@ -2200,14 +2377,16 @@ class ServedEntityOutput:
     "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`"""
 
     external_model: Optional[ExternalModel] = None
-    """The external model that is served. NOTE: Only one of external_model, foundation_model, and
-    (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) is
-    returned based on the endpoint type."""
+    """The external model to be served. NOTE: Only one of external_model and (entity_name,
+    entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with
+    the latter set being used for custom model serving for a Databricks registered model. For an
+    existing endpoint with external_model, it cannot be updated to an endpoint without
+    external_model. If the endpoint is created without external_model, users cannot update it to add
+    external_model later. The task type of all external models within an endpoint must be the same."""
 
     foundation_model: Optional[FoundationModel] = None
-    """The foundation model that is served. NOTE: Only one of foundation_model, external_model, and
-    (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) is
-    returned based on the endpoint type."""
+    """All fields are not sensitive as they are hard-coded in the system and made available to
+    customers."""
 
     instance_profile_arn: Optional[str] = None
     """ARN of the instance profile that the served entity uses to access AWS resources."""
@@ -2219,13 +2398,15 @@ class ServedEntityOutput:
     """The minimum tokens per second that the endpoint can scale down to."""
 
     name: Optional[str] = None
-    """The name of the served entity."""
+    """The name of a served entity. It must be unique across an endpoint. A served entity name can
+    consist of alphanumeric characters, dashes, and underscores. If not specified for an external
+    model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if
+    not specified for other entities, it defaults to entity_name-entity_version."""
 
     scale_to_zero_enabled: Optional[bool] = None
     """Whether the compute resources for the served entity should scale down to zero."""
 
     state: Optional[ServedModelState] = None
-    """Information corresponding to the state of the served entity."""
 
     workload_size: Optional[str] = None
     """The workload size of the served entity. The workload size corresponds to a range of provisioned
@@ -2233,15 +2414,15 @@ class ServedEntityOutput:
     process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency),
     "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If
     scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size
-    will be 0."""
+    is 0."""
 
-    workload_type: Optional[str] = None
+    workload_type: Optional[ServingModelWorkloadType] = None
     """The workload type of the served entity. The workload type selects which type of compute to use
     in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU
     acceleration is available by selecting workload types like GPU_SMALL and others. See the
     available [GPU types].
     
-    [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types"""
+    [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types"""
 
     def as_dict(self) -> dict:
         """Serializes the ServedEntityOutput into a dictionary suitable for use as a JSON request body."""
@@ -2262,7 +2443,7 @@ def as_dict(self) -> dict:
         if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
         if self.state: body['state'] = self.state.as_dict()
         if self.workload_size is not None: body['workload_size'] = self.workload_size
-        if self.workload_type is not None: body['workload_type'] = self.workload_type
+        if self.workload_type is not None: body['workload_type'] = self.workload_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
@@ -2304,31 +2485,22 @@ def from_dict(cls, d: Dict[str, any]) -> ServedEntityOutput:
                    scale_to_zero_enabled=d.get('scale_to_zero_enabled', None),
                    state=_from_dict(d, 'state', ServedModelState),
                    workload_size=d.get('workload_size', None),
-                   workload_type=d.get('workload_type', None))
+                   workload_type=_enum(d, 'workload_type', ServingModelWorkloadType))
 
 
 @dataclass
 class ServedEntitySpec:
     entity_name: Optional[str] = None
-    """The name of the entity served. The entity may be a model in the Databricks Model Registry, a
-    model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC
-    object, the full name of the object is given in the form of
-    __catalog_name__.__schema_name__.__model_name__."""
 
     entity_version: Optional[str] = None
-    """The version of the served entity in Databricks Model Registry or empty if the entity is a
-    FEATURE_SPEC."""
 
     external_model: Optional[ExternalModel] = None
-    """The external model that is served. NOTE: Only one of external_model, foundation_model, and
-    (entity_name, entity_version) is returned based on the endpoint type."""
 
     foundation_model: Optional[FoundationModel] = None
-    """The foundation model that is served. NOTE: Only one of foundation_model, external_model, and
-    (entity_name, entity_version) is returned based on the endpoint type."""
+    """All fields are not sensitive as they are hard-coded in the system and made available to
+    customers."""
 
     name: Optional[str] = None
-    """The name of the served entity."""
 
     def as_dict(self) -> dict:
         """Serializes the ServedEntitySpec into a dictionary suitable for use as a JSON request body."""
@@ -2362,24 +2534,21 @@ def from_dict(cls, d: Dict[str, any]) -> ServedEntitySpec:
 
 @dataclass
 class ServedModelInput:
+    scale_to_zero_enabled: bool
+    """Whether the compute resources for the served entity should scale down to zero."""
+
     model_name: str
-    """The name of the model in Databricks Model Registry to be served or if the model resides in Unity
-    Catalog, the full name of model, in the form of __catalog_name__.__schema_name__.__model_name__."""
 
     model_version: str
-    """The version of the model in Databricks Model Registry or Unity Catalog to be served."""
-
-    scale_to_zero_enabled: bool
-    """Whether the compute resources for the served model should scale down to zero."""
 
     environment_vars: Optional[Dict[str, str]] = None
     """An object containing a set of optional, user-specified environment variable key-value pairs used
-    for serving this model. Note: this is an experimental feature and subject to change. Example
-    model environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY":
+    for serving this entity. Note: this is an experimental feature and subject to change. Example
+    entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY":
     "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`"""
 
     instance_profile_arn: Optional[str] = None
-    """ARN of the instance profile that the served model will use to access AWS resources."""
+    """ARN of the instance profile that the served entity uses to access AWS resources."""
 
     max_provisioned_throughput: Optional[int] = None
     """The maximum tokens per second that the endpoint can scale up to."""
@@ -2388,25 +2557,26 @@ class ServedModelInput:
     """The minimum tokens per second that the endpoint can scale down to."""
 
     name: Optional[str] = None
-    """The name of a served model. It must be unique across an endpoint. If not specified, this field
-    will default to -. A served model name can consist of alphanumeric
-    characters, dashes, and underscores."""
+    """The name of a served entity. It must be unique across an endpoint. A served entity name can
+    consist of alphanumeric characters, dashes, and underscores. If not specified for an external
+    model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if
+    not specified for other entities, it defaults to entity_name-entity_version."""
 
     workload_size: Optional[ServedModelInputWorkloadSize] = None
-    """The workload size of the served model. The workload size corresponds to a range of provisioned
-    concurrency that the compute will autoscale between. A single unit of provisioned concurrency
-    can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned
-    concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned
-    concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for
-    each workload size will be 0."""
+    """The workload size of the served entity. The workload size corresponds to a range of provisioned
+    concurrency that the compute autoscales between. A single unit of provisioned concurrency can
+    process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency),
+    "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If
+    scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size
+    is 0."""
 
     workload_type: Optional[ServedModelInputWorkloadType] = None
-    """The workload type of the served model. The workload type selects which type of compute to use in
-    the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU
+    """The workload type of the served entity. The workload type selects which type of compute to use
+    in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU
     acceleration is available by selecting workload types like GPU_SMALL and others. See the
     available [GPU types].
     
-    [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types"""
+    [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types"""
 
     def as_dict(self) -> dict:
         """Serializes the ServedModelInput into a dictionary suitable for use as a JSON request body."""
@@ -2458,12 +2628,6 @@ def from_dict(cls, d: Dict[str, any]) -> ServedModelInput:
 
 
 class ServedModelInputWorkloadSize(Enum):
-    """The workload size of the served model. The workload size corresponds to a range of provisioned
-    concurrency that the compute will autoscale between. A single unit of provisioned concurrency
-    can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned
-    concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned
-    concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for
-    each workload size will be 0."""
 
     LARGE = 'Large'
     MEDIUM = 'Medium'
@@ -2471,12 +2635,6 @@ class ServedModelInputWorkloadSize(Enum):
 
 
 class ServedModelInputWorkloadType(Enum):
-    """The workload type of the served model. The workload type selects which type of compute to use in
-    the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU
-    acceleration is available by selecting workload types like GPU_SMALL and others. See the
-    available [GPU types].
-    
-    [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types"""
 
     CPU = 'CPU'
     GPU_LARGE = 'GPU_LARGE'
@@ -2488,51 +2646,48 @@ class ServedModelInputWorkloadType(Enum):
 @dataclass
 class ServedModelOutput:
     creation_timestamp: Optional[int] = None
-    """The creation timestamp of the served model in Unix time."""
 
     creator: Optional[str] = None
-    """The email of the user who created the served model."""
 
     environment_vars: Optional[Dict[str, str]] = None
     """An object containing a set of optional, user-specified environment variable key-value pairs used
-    for serving this model. Note: this is an experimental feature and subject to change. Example
-    model environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY":
+    for serving this entity. Note: this is an experimental feature and subject to change. Example
+    entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY":
     "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`"""
 
     instance_profile_arn: Optional[str] = None
-    """ARN of the instance profile that the served model will use to access AWS resources."""
+    """ARN of the instance profile that the served entity uses to access AWS resources."""
 
     model_name: Optional[str] = None
-    """The name of the model in Databricks Model Registry or the full name of the model in Unity
-    Catalog."""
 
     model_version: Optional[str] = None
-    """The version of the model in Databricks Model Registry or Unity Catalog to be served."""
 
     name: Optional[str] = None
-    """The name of the served model."""
+    """The name of a served entity. It must be unique across an endpoint. A served entity name can
+    consist of alphanumeric characters, dashes, and underscores. If not specified for an external
+    model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if
+    not specified for other entities, it defaults to entity_name-entity_version."""
 
     scale_to_zero_enabled: Optional[bool] = None
-    """Whether the compute resources for the Served Model should scale down to zero."""
+    """Whether the compute resources for the served entity should scale down to zero."""
 
     state: Optional[ServedModelState] = None
-    """Information corresponding to the state of the Served Model."""
 
     workload_size: Optional[str] = None
-    """The workload size of the served model. The workload size corresponds to a range of provisioned
-    concurrency that the compute will autoscale between. A single unit of provisioned concurrency
-    can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned
-    concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned
-    concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for
-    each workload size will be 0."""
-
-    workload_type: Optional[str] = None
-    """The workload type of the served model. The workload type selects which type of compute to use in
-    the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU
+    """The workload size of the served entity. The workload size corresponds to a range of provisioned
+    concurrency that the compute autoscales between. A single unit of provisioned concurrency can
+    process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency),
+    "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If
+    scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size
+    is 0."""
+
+    workload_type: Optional[ServingModelWorkloadType] = None
+    """The workload type of the served entity. The workload type selects which type of compute to use
+    in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU
     acceleration is available by selecting workload types like GPU_SMALL and others. See the
     available [GPU types].
     
-    [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types"""
+    [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types"""
 
     def as_dict(self) -> dict:
         """Serializes the ServedModelOutput into a dictionary suitable for use as a JSON request body."""
@@ -2547,7 +2702,7 @@ def as_dict(self) -> dict:
         if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
         if self.state: body['state'] = self.state.as_dict()
         if self.workload_size is not None: body['workload_size'] = self.workload_size
-        if self.workload_type is not None: body['workload_type'] = self.workload_type
+        if self.workload_type is not None: body['workload_type'] = self.workload_type.value
         return body
 
     def as_shallow_dict(self) -> dict:
@@ -2579,20 +2734,18 @@ def from_dict(cls, d: Dict[str, any]) -> ServedModelOutput:
                    scale_to_zero_enabled=d.get('scale_to_zero_enabled', None),
                    state=_from_dict(d, 'state', ServedModelState),
                    workload_size=d.get('workload_size', None),
-                   workload_type=d.get('workload_type', None))
+                   workload_type=_enum(d, 'workload_type', ServingModelWorkloadType))
 
 
 @dataclass
 class ServedModelSpec:
     model_name: Optional[str] = None
-    """The name of the model in Databricks Model Registry or the full name of the model in Unity
-    Catalog."""
+    """Only one of model_name and entity_name should be populated"""
 
     model_version: Optional[str] = None
-    """The version of the model in Databricks Model Registry or Unity Catalog to be served."""
+    """Only one of model_version and entity_version should be populated"""
 
     name: Optional[str] = None
-    """The name of the served model."""
 
     def as_dict(self) -> dict:
         """Serializes the ServedModelSpec into a dictionary suitable for use as a JSON request body."""
@@ -2621,18 +2774,8 @@ def from_dict(cls, d: Dict[str, any]) -> ServedModelSpec:
 @dataclass
 class ServedModelState:
     deployment: Optional[ServedModelStateDeployment] = None
-    """The state of the served entity deployment. DEPLOYMENT_CREATING indicates that the served entity
-    is not ready yet because the deployment is still being created (i.e container image is building,
-    model server is deploying for the first time, etc.). DEPLOYMENT_RECOVERING indicates that the
-    served entity was previously in a ready state but no longer is and is attempting to recover.
-    DEPLOYMENT_READY indicates that the served entity is ready to receive traffic. DEPLOYMENT_FAILED
-    indicates that there was an error trying to bring up the served entity (e.g container image
-    build failed, the model server failed to start due to a model loading error, etc.)
-    DEPLOYMENT_ABORTED indicates that the deployment was terminated likely due to a failure in
-    bringing up another served entity under the same endpoint and config version."""
 
     deployment_state_message: Optional[str] = None
-    """More information about the state of the served entity, if available."""
 
     def as_dict(self) -> dict:
         """Serializes the ServedModelState into a dictionary suitable for use as a JSON request body."""
@@ -2658,15 +2801,6 @@ def from_dict(cls, d: Dict[str, any]) -> ServedModelState:
 
 
 class ServedModelStateDeployment(Enum):
-    """The state of the served entity deployment. DEPLOYMENT_CREATING indicates that the served entity
-    is not ready yet because the deployment is still being created (i.e container image is building,
-    model server is deploying for the first time, etc.). DEPLOYMENT_RECOVERING indicates that the
-    served entity was previously in a ready state but no longer is and is attempting to recover.
-    DEPLOYMENT_READY indicates that the served entity is ready to receive traffic. DEPLOYMENT_FAILED
-    indicates that there was an error trying to bring up the served entity (e.g container image
-    build failed, the model server failed to start due to a model loading error, etc.)
-    DEPLOYMENT_ABORTED indicates that the deployment was terminated likely due to a failure in
-    bringing up another served entity under the same endpoint and config version."""
 
     ABORTED = 'DEPLOYMENT_ABORTED'
     CREATING = 'DEPLOYMENT_CREATING'
@@ -2701,8 +2835,8 @@ def from_dict(cls, d: Dict[str, any]) -> ServerLogsResponse:
 @dataclass
 class ServingEndpoint:
     ai_gateway: Optional[AiGatewayConfig] = None
-    """The AI Gateway configuration for the serving endpoint. NOTE: Only external model endpoints are
-    currently supported."""
+    """The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned
+    throughput endpoints are currently supported."""
 
     config: Optional[EndpointCoreConfigSummary] = None
     """The config that is currently being served by the endpoint."""
@@ -2714,8 +2848,7 @@ class ServingEndpoint:
     """The email of the user who created the serving endpoint."""
 
     id: Optional[str] = None
-    """System-generated ID of the endpoint. This is used to refer to the endpoint in the Permissions
-    API"""
+    """System-generated ID of the endpoint, included to be used by the Permissions API."""
 
     last_updated_timestamp: Optional[int] = None
     """The timestamp when the endpoint was last updated by a user in Unix time."""
@@ -2874,8 +3007,8 @@ def from_dict(cls, d: Dict[str, any]) -> ServingEndpointAccessControlResponse:
 @dataclass
 class ServingEndpointDetailed:
     ai_gateway: Optional[AiGatewayConfig] = None
-    """The AI Gateway configuration for the serving endpoint. NOTE: Only external model endpoints are
-    currently supported."""
+    """The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned
+    throughput endpoints are currently supported."""
 
     config: Optional[EndpointCoreConfigOutput] = None
     """The config that is currently being served by the endpoint."""
@@ -2983,7 +3116,6 @@ def from_dict(cls, d: Dict[str, any]) -> ServingEndpointDetailed:
 
 
 class ServingEndpointDetailedPermissionLevel(Enum):
-    """The permission level of the principal making the request."""
 
     CAN_MANAGE = 'CAN_MANAGE'
     CAN_QUERY = 'CAN_QUERY'
@@ -3123,6 +3255,15 @@ def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissionsRequest:
                    serving_endpoint_id=d.get('serving_endpoint_id', None))
 
 
+class ServingModelWorkloadType(Enum):
+
+    CPU = 'CPU'
+    GPU_LARGE = 'GPU_LARGE'
+    GPU_MEDIUM = 'GPU_MEDIUM'
+    GPU_SMALL = 'GPU_SMALL'
+    MULTIGPU_MEDIUM = 'MULTIGPU_MEDIUM'
+
+
 @dataclass
 class TrafficConfig:
     routes: Optional[List[Route]] = None
@@ -3276,8 +3417,8 @@ def create(self,
         :param config: :class:`EndpointCoreConfigInput`
           The core config of the serving endpoint.
         :param ai_gateway: :class:`AiGatewayConfig` (optional)
-          The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
-          supported as of now.
+          The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned
+          throughput endpoints are currently supported.
         :param rate_limits: List[:class:`RateLimit`] (optional)
           Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
           Gateway to manage rate limits.
@@ -3325,7 +3466,6 @@ def delete(self, name: str):
         """Delete a serving endpoint.
         
         :param name: str
-          The name of the serving endpoint. This field is required.
         
         
         """
@@ -3367,7 +3507,7 @@ def get(self, name: str) -> ServingEndpointDetailed:
         res = self._api.do('GET', f'/api/2.0/serving-endpoints/{name}', headers=headers)
         return ServingEndpointDetailed.from_dict(res)
 
-    def get_open_api(self, name: str):
+    def get_open_api(self, name: str) -> GetOpenApiResponse:
         """Get the schema for a serving endpoint.
         
         Get the query schema of the serving endpoint in OpenAPI format. The schema contains information for
@@ -3376,12 +3516,13 @@ def get_open_api(self, name: str):
         :param name: str
           The name of the serving endpoint that the served model belongs to. This field is required.
         
-        
+        :returns: :class:`GetOpenApiResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {'Accept': 'text/plain', }
 
-        self._api.do('GET', f'/api/2.0/serving-endpoints/{name}/openapi', headers=headers)
+        res = self._api.do('GET', f'/api/2.0/serving-endpoints/{name}/openapi', headers=headers, raw=True)
+        return GetOpenApiResponse.from_dict(res)
 
     def get_permission_levels(self, serving_endpoint_id: str) -> GetServingEndpointPermissionLevelsResponse:
         """Get serving endpoint permission levels.
@@ -3420,6 +3561,44 @@ def get_permissions(self, serving_endpoint_id: str) -> ServingEndpointPermission
                            headers=headers)
         return ServingEndpointPermissions.from_dict(res)
 
+    def http_request(self,
+                     connection_name: str,
+                     method: ExternalFunctionRequestHttpMethod,
+                     path: str,
+                     *,
+                     headers: Optional[str] = None,
+                     json: Optional[str] = None,
+                     params: Optional[str] = None) -> ExternalFunctionResponse:
+        """Make external services call using the credentials stored in UC Connection.
+        
+        :param connection_name: str
+          The connection name to use. This is required to identify the external connection.
+        :param method: :class:`ExternalFunctionRequestHttpMethod`
+          The HTTP method to use (e.g., 'GET', 'POST').
+        :param path: str
+          The relative path for the API endpoint. This is required.
+        :param headers: str (optional)
+          Additional headers for the request. If not provided, only auth headers from connections would be
+          passed.
+        :param json: str (optional)
+          The JSON payload to send in the request body.
+        :param params: str (optional)
+          Query parameters for the request.
+        
+        :returns: :class:`ExternalFunctionResponse`
+        """
+        body = {}
+        if connection_name is not None: body['connection_name'] = connection_name
+        if headers is not None: body['headers'] = headers
+        if json is not None: body['json'] = json
+        if method is not None: body['method'] = method.value
+        if params is not None: body['params'] = params
+        if path is not None: body['path'] = path
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST', '/api/2.0/external-function', body=body, headers=headers)
+        return ExternalFunctionResponse.from_dict(res)
+
     def list(self) -> Iterator[ServingEndpoint]:
         """Get all serving endpoints.
         
@@ -3456,7 +3635,7 @@ def patch(self,
               name: str,
               *,
               add_tags: Optional[List[EndpointTag]] = None,
-              delete_tags: Optional[List[str]] = None) -> Iterator[EndpointTag]:
+              delete_tags: Optional[List[str]] = None) -> EndpointTags:
         """Update tags of a serving endpoint.
         
         Used to batch add and delete tags from a serving endpoint with a single API call.
@@ -3468,7 +3647,7 @@ def patch(self,
         :param delete_tags: List[str] (optional)
           List of tag keys to delete
         
-        :returns: Iterator over :class:`EndpointTag`
+        :returns: :class:`EndpointTags`
         """
         body = {}
         if add_tags is not None: body['add_tags'] = [v.as_dict() for v in add_tags]
@@ -3476,7 +3655,7 @@ def patch(self,
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('PATCH', f'/api/2.0/serving-endpoints/{name}/tags', body=body, headers=headers)
-        return [EndpointTag.from_dict(v) for v in res]
+        return EndpointTags.from_dict(res)
 
     def put(self, name: str, *, rate_limits: Optional[List[RateLimit]] = None) -> PutResponse:
         """Update rate limits of a serving endpoint.
@@ -3511,8 +3690,8 @@ def put_ai_gateway(
             usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None) -> PutAiGatewayResponse:
         """Update AI Gateway of a serving endpoint.
         
-        Used to update the AI Gateway of a serving endpoint. NOTE: Only external model endpoints are currently
-        supported.
+        Used to update the AI Gateway of a serving endpoint. NOTE: Only external model and provisioned
+        throughput endpoints are currently supported.
         
         :param name: str
           The name of the serving endpoint whose AI Gateway is being updated. This field is required.
@@ -3672,14 +3851,16 @@ def update_config(self,
           The name of the serving endpoint to update. This field is required.
         :param auto_capture_config: :class:`AutoCaptureConfigInput` (optional)
           Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.
+          Note: this field is deprecated for creating new provisioned throughput endpoints, or updating
+          existing provisioned throughput endpoints that never have inference table configured; in these cases
+          please use AI Gateway to manage inference tables.
         :param served_entities: List[:class:`ServedEntityInput`] (optional)
-          A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served
-          entities.
+          The list of served entities under the serving endpoint config.
         :param served_models: List[:class:`ServedModelInput`] (optional)
-          (Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A
-          serving endpoint can have up to 15 served models.
+          (Deprecated, use served_entities instead) The list of served models under the serving endpoint
+          config.
         :param traffic_config: :class:`TrafficConfig` (optional)
-          The traffic config defining how invocations to the serving endpoint should be routed.
+          The traffic configuration associated with the serving endpoint config.
         
         :returns:
           Long-running operation waiter for :class:`ServingEndpointDetailed`.
diff --git a/docs/account/oauth2/custom_app_integration.rst b/docs/account/oauth2/custom_app_integration.rst
index 4192b2109..9868a288b 100644
--- a/docs/account/oauth2/custom_app_integration.rst
+++ b/docs/account/oauth2/custom_app_integration.rst
@@ -67,7 +67,7 @@
         :returns: Iterator over :class:`GetCustomAppIntegrationOutput`
         
 
-    .. py:method:: update(integration_id: str [, redirect_urls: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy]])
+    .. py:method:: update(integration_id: str [, redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy]])
 
         Updates Custom OAuth App Integration.
         
@@ -77,6 +77,9 @@
         :param integration_id: str
         :param redirect_urls: List[str] (optional)
           List of OAuth redirect urls to be updated in the custom OAuth app integration
+        :param scopes: List[str] (optional)
+          List of OAuth scopes to be updated in the custom OAuth app integration, similar to redirect URIs
+          this will fully replace the existing values instead of appending
         :param token_access_policy: :class:`TokenAccessPolicy` (optional)
           Token access policy to be updated in the custom OAuth app integration
         
diff --git a/docs/account/oauth2/federation_policy.rst b/docs/account/oauth2/federation_policy.rst
index 4bee8675d..c95bf563c 100644
--- a/docs/account/oauth2/federation_policy.rst
+++ b/docs/account/oauth2/federation_policy.rst
@@ -51,7 +51,8 @@
         
         :param policy: :class:`FederationPolicy` (optional)
         :param policy_id: str (optional)
-          The identifier for the federation policy. If unspecified, the id will be assigned by Databricks.
+          The identifier for the federation policy. The identifier must contain only lowercase alphanumeric
+          characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks.
         
         :returns: :class:`FederationPolicy`
         
@@ -61,6 +62,7 @@
         Delete account federation policy.
         
         :param policy_id: str
+          The identifier for the federation policy.
         
         
         
@@ -70,6 +72,7 @@
         Get account federation policy.
         
         :param policy_id: str
+          The identifier for the federation policy.
         
         :returns: :class:`FederationPolicy`
         
@@ -84,16 +87,19 @@
         :returns: Iterator over :class:`FederationPolicy`
         
 
-    .. py:method:: update(policy_id: str, update_mask: str [, policy: Optional[FederationPolicy]]) -> FederationPolicy
+    .. py:method:: update(policy_id: str [, policy: Optional[FederationPolicy], update_mask: Optional[str]]) -> FederationPolicy
 
         Update account federation policy.
         
         :param policy_id: str
-        :param update_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The identifier for the federation policy.
         :param policy: :class:`FederationPolicy` (optional)
+        :param update_mask: str (optional)
+          The field mask specifies which fields of the policy to update. To specify multiple fields in the
+          field mask, use comma as the separator (no space). The special value '*' indicates that all fields
+          should be updated (full replacement). If unspecified, all fields that are set in the policy provided
+          in the update request will overwrite the corresponding fields in the existing policy. Example value:
+          'description,oidc_policy.audiences'.
         
         :returns: :class:`FederationPolicy`
         
\ No newline at end of file
diff --git a/docs/account/oauth2/service_principal_federation_policy.rst b/docs/account/oauth2/service_principal_federation_policy.rst
index e4293c5f2..2e0577ba4 100644
--- a/docs/account/oauth2/service_principal_federation_policy.rst
+++ b/docs/account/oauth2/service_principal_federation_policy.rst
@@ -53,7 +53,8 @@
           The service principal id for the federation policy.
         :param policy: :class:`FederationPolicy` (optional)
         :param policy_id: str (optional)
-          The identifier for the federation policy. If unspecified, the id will be assigned by Databricks.
+          The identifier for the federation policy. The identifier must contain only lowercase alphanumeric
+          characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks.
         
         :returns: :class:`FederationPolicy`
         
@@ -65,6 +66,7 @@
         :param service_principal_id: int
           The service principal id for the federation policy.
         :param policy_id: str
+          The identifier for the federation policy.
         
         
         
@@ -76,6 +78,7 @@
         :param service_principal_id: int
           The service principal id for the federation policy.
         :param policy_id: str
+          The identifier for the federation policy.
         
         :returns: :class:`FederationPolicy`
         
@@ -92,18 +95,21 @@
         :returns: Iterator over :class:`FederationPolicy`
         
 
-    .. py:method:: update(service_principal_id: int, policy_id: str, update_mask: str [, policy: Optional[FederationPolicy]]) -> FederationPolicy
+    .. py:method:: update(service_principal_id: int, policy_id: str [, policy: Optional[FederationPolicy], update_mask: Optional[str]]) -> FederationPolicy
 
         Update service principal federation policy.
         
         :param service_principal_id: int
           The service principal id for the federation policy.
         :param policy_id: str
-        :param update_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The identifier for the federation policy.
         :param policy: :class:`FederationPolicy` (optional)
+        :param update_mask: str (optional)
+          The field mask specifies which fields of the policy to update. To specify multiple fields in the
+          field mask, use comma as the separator (no space). The special value '*' indicates that all fields
+          should be updated (full replacement). If unspecified, all fields that are set in the policy provided
+          in the update request will overwrite the corresponding fields in the existing policy. Example value:
+          'description,oidc_policy.audiences'.
         
         :returns: :class:`FederationPolicy`
         
\ No newline at end of file
diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst
index 84f3c9867..d1e89277f 100644
--- a/docs/dbdataclasses/catalog.rst
+++ b/docs/dbdataclasses/catalog.rst
@@ -1242,6 +1242,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: ACTIVE
       :value: "ACTIVE"
 
+   .. py:attribute:: DEGRADED
+      :value: "DEGRADED"
+
    .. py:attribute:: DELETING
       :value: "DELETING"
 
diff --git a/docs/dbdataclasses/cleanrooms.rst b/docs/dbdataclasses/cleanrooms.rst
index 762c454bf..85ec98250 100644
--- a/docs/dbdataclasses/cleanrooms.rst
+++ b/docs/dbdataclasses/cleanrooms.rst
@@ -54,6 +54,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: ACTIVE
       :value: "ACTIVE"
 
+   .. py:attribute:: PENDING
+      :value: "PENDING"
+
    .. py:attribute:: PERMISSION_DENIED
       :value: "PERMISSION_DENIED"
 
diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst
index cbb4059a1..3996fa511 100644
--- a/docs/dbdataclasses/jobs.rst
+++ b/docs/dbdataclasses/jobs.rst
@@ -47,6 +47,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: RUNNING
       :value: "RUNNING"
 
+   .. py:attribute:: RUN_LIFE_CYCLE_STATE_UNSPECIFIED
+      :value: "RUN_LIFE_CYCLE_STATE_UNSPECIFIED"
+
    .. py:attribute:: SKIPPED
       :value: "SKIPPED"
 
@@ -81,6 +84,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: MAXIMUM_CONCURRENT_RUNS_REACHED
       :value: "MAXIMUM_CONCURRENT_RUNS_REACHED"
 
+   .. py:attribute:: RUN_RESULT_STATE_UNSPECIFIED
+      :value: "RUN_RESULT_STATE_UNSPECIFIED"
+
    .. py:attribute:: SUCCESS
       :value: "SUCCESS"
 
@@ -107,6 +113,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput
+   :members:
+   :undoc-members:
+
 .. autoclass:: ClusterInstance
    :members:
    :undoc-members:
@@ -448,6 +458,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: OutputSchemaInfo
+   :members:
+   :undoc-members:
+
 .. py:class:: PauseStatus
 
    .. py:attribute:: PAUSED
diff --git a/docs/dbdataclasses/oauth2.rst b/docs/dbdataclasses/oauth2.rst
index 70e09ab05..10202e55e 100644
--- a/docs/dbdataclasses/oauth2.rst
+++ b/docs/dbdataclasses/oauth2.rst
@@ -24,10 +24,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: DataPlaneInfo
-   :members:
-   :undoc-members:
-
 .. autoclass:: DeleteCustomAppIntegrationOutput
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst
index f82cd73c2..903cb52ff 100644
--- a/docs/dbdataclasses/pipelines.rst
+++ b/docs/dbdataclasses/pipelines.rst
@@ -20,6 +20,31 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. py:class:: DayOfWeek
+
+   Days of week in which the restart is allowed to happen (within a five-hour window starting at start_hour). If not specified all days of the week will be used.
+
+   .. py:attribute:: FRIDAY
+      :value: "FRIDAY"
+
+   .. py:attribute:: MONDAY
+      :value: "MONDAY"
+
+   .. py:attribute:: SATURDAY
+      :value: "SATURDAY"
+
+   .. py:attribute:: SUNDAY
+      :value: "SUNDAY"
+
+   .. py:attribute:: THURSDAY
+      :value: "THURSDAY"
+
+   .. py:attribute:: TUESDAY
+      :value: "TUESDAY"
+
+   .. py:attribute:: WEDNESDAY
+      :value: "WEDNESDAY"
+
 .. autoclass:: DeletePipelineResponse
    :members:
    :undoc-members:
@@ -273,30 +298,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. py:class:: RestartWindowDaysOfWeek
-
-   Days of week in which the restart is allowed to happen (within a five-hour window starting at start_hour). If not specified all days of the week will be used.
-
-   .. py:attribute:: FRIDAY
-      :value: "FRIDAY"
-
-   .. py:attribute:: MONDAY
-      :value: "MONDAY"
-
-   .. py:attribute:: SATURDAY
-      :value: "SATURDAY"
-
-   .. py:attribute:: SUNDAY
-      :value: "SUNDAY"
-
-   .. py:attribute:: THURSDAY
-      :value: "THURSDAY"
-
-   .. py:attribute:: TUESDAY
-      :value: "TUESDAY"
-
-   .. py:attribute:: WEDNESDAY
-      :value: "WEDNESDAY"
+.. autoclass:: RunAs
+   :members:
+   :undoc-members:
 
 .. autoclass:: SchemaSpec
    :members:
diff --git a/docs/dbdataclasses/serving.rst b/docs/dbdataclasses/serving.rst
index 3deefc873..af4772f77 100644
--- a/docs/dbdataclasses/serving.rst
+++ b/docs/dbdataclasses/serving.rst
@@ -22,8 +22,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: AiGatewayGuardrailPiiBehaviorBehavior
 
-   Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.
-
    .. py:attribute:: BLOCK
       :value: "BLOCK"
 
@@ -44,8 +42,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: AiGatewayRateLimitKey
 
-   Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.
-
    .. py:attribute:: ENDPOINT
       :value: "ENDPOINT"
 
@@ -54,8 +50,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: AiGatewayRateLimitRenewalPeriod
 
-   Renewal period field for a rate limit. Currently, only 'minute' is supported.
-
    .. py:attribute:: MINUTE
       :value: "MINUTE"
 
@@ -69,8 +63,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: AmazonBedrockConfigBedrockProvider
 
-   The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.
-
    .. py:attribute:: AI21LABS
       :value: "AI21LABS"
 
@@ -128,6 +120,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: DataPlaneInfo
+   :members:
+   :undoc-members:
+
 .. autoclass:: DatabricksModelServingConfig
    :members:
    :undoc-members:
@@ -173,8 +169,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: EndpointStateConfigUpdate
 
-   The state of an endpoint's config update. This informs the user if the pending_config is in progress, if the update failed, or if there is no update in progress. Note that if the endpoint's config_update state value is IN_PROGRESS, another update can not be made until the update completes or fails.
-
    .. py:attribute:: IN_PROGRESS
       :value: "IN_PROGRESS"
 
@@ -189,8 +183,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: EndpointStateReady
 
-   The state of an endpoint, indicating whether or not the endpoint is queryable. An endpoint is READY if all of the served entities in its active configuration are ready. If any of the actively served entities are in a non-ready state, the endpoint state will be NOT_READY.
-
    .. py:attribute:: NOT_READY
       :value: "NOT_READY"
 
@@ -201,18 +193,45 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: EndpointTags
+   :members:
+   :undoc-members:
+
 .. autoclass:: ExportMetricsResponse
    :members:
    :undoc-members:
 
+.. autoclass:: ExternalFunctionRequest
+   :members:
+   :undoc-members:
+
+.. py:class:: ExternalFunctionRequestHttpMethod
+
+   .. py:attribute:: DELETE
+      :value: "DELETE"
+
+   .. py:attribute:: GET
+      :value: "GET"
+
+   .. py:attribute:: PATCH
+      :value: "PATCH"
+
+   .. py:attribute:: POST
+      :value: "POST"
+
+   .. py:attribute:: PUT
+      :value: "PUT"
+
+.. autoclass:: ExternalFunctionResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: ExternalModel
    :members:
    :undoc-members:
 
 .. py:class:: ExternalModelProvider
 
-   The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.",
-
    .. py:attribute:: AI21LABS
       :value: "AI21LABS"
 
@@ -281,10 +300,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: PutAiGatewayRequest
+   :members:
+   :undoc-members:
+
 .. autoclass:: PutAiGatewayResponse
    :members:
    :undoc-members:
 
+.. autoclass:: PutRequest
+   :members:
+   :undoc-members:
+
 .. autoclass:: PutResponse
    :members:
    :undoc-members:
@@ -316,8 +343,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: RateLimitKey
 
-   Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.
-
    .. py:attribute:: ENDPOINT
       :value: "ENDPOINT"
 
@@ -326,8 +351,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: RateLimitRenewalPeriod
 
-   Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.
-
    .. py:attribute:: MINUTE
       :value: "MINUTE"
 
@@ -353,8 +376,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: ServedModelInputWorkloadSize
 
-   The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.
-
    .. py:attribute:: LARGE
       :value: "LARGE"
 
@@ -366,9 +387,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: ServedModelInputWorkloadType
 
-   The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types].
-   [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types
-
    .. py:attribute:: CPU
       :value: "CPU"
 
@@ -398,8 +416,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: ServedModelStateDeployment
 
-   The state of the served entity deployment. DEPLOYMENT_CREATING indicates that the served entity is not ready yet because the deployment is still being created (i.e container image is building, model server is deploying for the first time, etc.). DEPLOYMENT_RECOVERING indicates that the served entity was previously in a ready state but no longer is and is attempting to recover. DEPLOYMENT_READY indicates that the served entity is ready to receive traffic. DEPLOYMENT_FAILED indicates that there was an error trying to bring up the served entity (e.g container image build failed, the model server failed to start due to a model loading error, etc.) DEPLOYMENT_ABORTED indicates that the deployment was terminated likely due to a failure in bringing up another served entity under the same endpoint and config version.
-
    .. py:attribute:: ABORTED
       :value: "ABORTED"
 
@@ -437,8 +453,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: ServingEndpointDetailedPermissionLevel
 
-   The permission level of the principal making the request.
-
    .. py:attribute:: CAN_MANAGE
       :value: "CAN_MANAGE"
 
@@ -477,6 +491,23 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. py:class:: ServingModelWorkloadType
+
+   .. py:attribute:: CPU
+      :value: "CPU"
+
+   .. py:attribute:: GPU_LARGE
+      :value: "GPU_LARGE"
+
+   .. py:attribute:: GPU_MEDIUM
+      :value: "GPU_MEDIUM"
+
+   .. py:attribute:: GPU_SMALL
+      :value: "GPU_SMALL"
+
+   .. py:attribute:: MULTIGPU_MEDIUM
+      :value: "MULTIGPU_MEDIUM"
+
 .. autoclass:: TrafficConfig
    :members:
    :undoc-members:
diff --git a/docs/workspace/apps/apps.rst b/docs/workspace/apps/apps.rst
index 40791a143..af7229f34 100644
--- a/docs/workspace/apps/apps.rst
+++ b/docs/workspace/apps/apps.rst
@@ -7,20 +7,22 @@
     Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend
     Databricks services, and enable users to interact through single sign-on.
 
-    .. py:method:: create( [, app: Optional[App]]) -> Wait[App]
+    .. py:method:: create( [, app: Optional[App], no_compute: Optional[bool]]) -> Wait[App]
 
         Create an app.
         
         Creates a new app.
         
         :param app: :class:`App` (optional)
+        :param no_compute: bool (optional)
+          If true, the app will not be started after creation.
         
         :returns:
           Long-running operation waiter for :class:`App`.
           See :method:wait_get_app_active for more details.
         
 
-    .. py:method:: create_and_wait( [, app: Optional[App], timeout: datetime.timedelta = 0:20:00]) -> App
+    .. py:method:: create_and_wait( [, app: Optional[App], no_compute: Optional[bool], timeout: datetime.timedelta = 0:20:00]) -> App
 
 
     .. py:method:: delete(name: str) -> App
diff --git a/docs/workspace/files/files.rst b/docs/workspace/files/files.rst
index f1bd70317..0151fcce2 100644
--- a/docs/workspace/files/files.rst
+++ b/docs/workspace/files/files.rst
@@ -13,9 +13,12 @@
     /Volumes/<catalog_name>/<schema_name>/<volume_name>/<path_to_file>.
     
     The Files API has two distinct endpoints, one for working with files (`/fs/files`) and another one for
-    working with directories (`/fs/directories`). Both endpoints, use the standard HTTP methods GET, HEAD,
-    PUT, and DELETE to manage files and directories specified using their URI path. The path is always
-    absolute.
+    working with directories (`/fs/directories`). Both endpoints use the standard HTTP methods GET, HEAD, PUT,
+    and DELETE to manage files and directories specified using their URI path. The path is always absolute.
+    
+    Some Files API client features are currently experimental. To enable them, set
+    `enable_experimental_files_api_client = True` in your configuration profile or use the environment
+    variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`.
     
     [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html
 
diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst
index b7d677f03..49bebe60d 100644
--- a/docs/workspace/jobs/jobs.rst
+++ b/docs/workspace/jobs/jobs.rst
@@ -1,5 +1,5 @@
-``w.jobs``: Jobs
-================
+``w.jobs``: Jobs (latest)
+=========================
 .. currentmodule:: databricks.sdk.service.jobs
 
 .. py:class:: JobsExt
@@ -199,6 +199,7 @@
         :param job_clusters: List[:class:`JobCluster`] (optional)
           A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries
           cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.
+          If more than 100 job clusters are available, you can paginate through them using :method:jobs/get.
         :param max_concurrent_runs: int (optional)
           An optional maximum allowed number of concurrent runs of the job. Set this value if you want to be
           able to execute multiple runs of the same job concurrently. This is useful for example if you
@@ -230,7 +231,9 @@
           clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added
           to the job.
         :param tasks: List[:class:`Task`] (optional)
-          A list of task specifications to be executed by this job.
+          A list of task specifications to be executed by this job. If more than 100 tasks are available, you
+          can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root
+          to determine if more results are available.
         :param timeout_seconds: int (optional)
           An optional timeout applied to each run of this job. A value of `0` means no timeout.
         :param trigger: :class:`TriggerSettings` (optional)
@@ -315,7 +318,7 @@
         :returns: :class:`ExportRunOutput`
         
 
-    .. py:method:: get(job_id: int) -> Job
+    .. py:method:: get(job_id: int [, page_token: Optional[str]]) -> Job
 
 
         Usage:
@@ -351,8 +354,16 @@
         
         Retrieves the details for a single job.
         
+        In Jobs API 2.2, requests for a single job support pagination of `tasks` and `job_clusters` when
+        either exceeds 100 elements. Use the `next_page_token` field to check for more results and pass its
+        value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements in a page will
+        be empty on later pages.
+        
         :param job_id: int
           The canonical identifier of the job to retrieve information about. This field is required.
+        :param page_token: str (optional)
+          Use `next_page_token` returned from the previous GetJob to request the next page of the job's
+          sub-resources.
         
         :returns: :class:`Job`
         
@@ -516,7 +527,8 @@
         Retrieves a list of jobs.
         
         :param expand_tasks: bool (optional)
-          Whether to include task and cluster details in the response.
+          Whether to include task and cluster details in the response. Note that in API 2.2, only the first
+          100 elements will be shown. Use :method:jobs/get to paginate through all tasks and clusters.
         :param limit: int (optional)
           The number of jobs to return. This value must be greater than 0 and less or equal to 100. The
           default value is 20.
@@ -578,7 +590,8 @@
           If completed_only is `true`, only completed runs are included in the results; otherwise, lists both
           active and completed runs. This field cannot be `true` when active_only is `true`.
         :param expand_tasks: bool (optional)
-          Whether to include task and cluster details in the response.
+          Whether to include task and cluster details in the response. Note that in API 2.2, only the first
+          100 elements will be shown. Use :method:jobs/getrun to paginate through all tasks and clusters.
         :param job_id: int (optional)
           The job for which to list runs. If omitted, the Jobs service lists runs from all jobs.
         :param limit: int (optional)
diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst
index 1ba875740..ec31991ef 100644
--- a/docs/workspace/pipelines/pipelines.rst
+++ b/docs/workspace/pipelines/pipelines.rst
@@ -15,7 +15,7 @@
     also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected
     data quality and specify how to handle records that fail those expectations.
 
-    .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse
+    .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse
 
 
         Usage:
@@ -95,6 +95,12 @@
           Whether Photon is enabled for this pipeline.
         :param restart_window: :class:`RestartWindow` (optional)
           Restart window of this pipeline.
+        :param run_as: :class:`RunAs` (optional)
+          Write-only setting, available only in Create/Update calls. Specifies the user or service principal
+          that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
+          
+          Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
+          thrown.
         :param schema: str (optional)
           The default schema (database) where tables are read from or published to. The presence of this field
           implies that the pipeline is in direct publishing mode.
@@ -379,7 +385,7 @@
     .. py:method:: stop_and_wait(pipeline_id: str, timeout: datetime.timedelta = 0:20:00) -> GetPipelineResponse
 
 
-    .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]])
+    .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]])
 
 
         Usage:
@@ -475,6 +481,12 @@
           Whether Photon is enabled for this pipeline.
         :param restart_window: :class:`RestartWindow` (optional)
           Restart window of this pipeline.
+        :param run_as: :class:`RunAs` (optional)
+          Write-only setting, available only in Create/Update calls. Specifies the user or service principal
+          that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
+          
+          Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
+          thrown.
         :param schema: str (optional)
           The default schema (database) where tables are read from or published to. The presence of this field
           implies that the pipeline is in direct publishing mode.
diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst
index 430a13182..c0cd774a3 100644
--- a/docs/workspace/serving/serving_endpoints.rst
+++ b/docs/workspace/serving/serving_endpoints.rst
@@ -39,8 +39,8 @@
         :param config: :class:`EndpointCoreConfigInput`
           The core config of the serving endpoint.
         :param ai_gateway: :class:`AiGatewayConfig` (optional)
-          The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
-          supported as of now.
+          The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned
+          throughput endpoints are currently supported.
         :param rate_limits: List[:class:`RateLimit`] (optional)
           Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
           Gateway to manage rate limits.
@@ -62,7 +62,6 @@
         Delete a serving endpoint.
         
         :param name: str
-          The name of the serving endpoint. This field is required.
         
         
         
@@ -98,7 +97,7 @@
     .. py:method:: get_open_ai_client()
 
 
-    .. py:method:: get_open_api(name: str)
+    .. py:method:: get_open_api(name: str) -> GetOpenApiResponse
 
         Get the schema for a serving endpoint.
         
@@ -108,7 +107,7 @@
         :param name: str
           The name of the serving endpoint that the served model belongs to. This field is required.
         
-        
+        :returns: :class:`GetOpenApiResponse`
         
 
     .. py:method:: get_permission_levels(serving_endpoint_id: str) -> GetServingEndpointPermissionLevelsResponse
@@ -136,6 +135,27 @@
         :returns: :class:`ServingEndpointPermissions`
         
 
+    .. py:method:: http_request(connection_name: str, method: ExternalFunctionRequestHttpMethod, path: str [, headers: Optional[str], json: Optional[str], params: Optional[str]]) -> ExternalFunctionResponse
+
+        Make external services call using the credentials stored in UC Connection.
+        
+        :param connection_name: str
+          The connection name to use. This is required to identify the external connection.
+        :param method: :class:`ExternalFunctionRequestHttpMethod`
+          The HTTP method to use (e.g., 'GET', 'POST').
+        :param path: str
+          The relative path for the API endpoint. This is required.
+        :param headers: str (optional)
+          Additional headers for the request. If not provided, only auth headers from connections would be
+          passed.
+        :param json: str (optional)
+          The JSON payload to send in the request body.
+        :param params: str (optional)
+          Query parameters for the request.
+        
+        :returns: :class:`ExternalFunctionResponse`
+        
+
     .. py:method:: list() -> Iterator[ServingEndpoint]
 
         Get all serving endpoints.
@@ -157,7 +177,7 @@
         :returns: :class:`ServerLogsResponse`
         
 
-    .. py:method:: patch(name: str [, add_tags: Optional[List[EndpointTag]], delete_tags: Optional[List[str]]]) -> Iterator[EndpointTag]
+    .. py:method:: patch(name: str [, add_tags: Optional[List[EndpointTag]], delete_tags: Optional[List[str]]]) -> EndpointTags
 
         Update tags of a serving endpoint.
         
@@ -170,7 +190,7 @@
         :param delete_tags: List[str] (optional)
           List of tag keys to delete
         
-        :returns: Iterator over :class:`EndpointTag`
+        :returns: :class:`EndpointTags`
         
 
     .. py:method:: put(name: str [, rate_limits: Optional[List[RateLimit]]]) -> PutResponse
@@ -192,8 +212,8 @@
 
         Update AI Gateway of a serving endpoint.
         
-        Used to update the AI Gateway of a serving endpoint. NOTE: Only external model endpoints are currently
-        supported.
+        Used to update the AI Gateway of a serving endpoint. NOTE: Only external model and provisioned
+        throughput endpoints are currently supported.
         
         :param name: str
           The name of the serving endpoint whose AI Gateway is being updated. This field is required.
@@ -288,14 +308,16 @@
           The name of the serving endpoint to update. This field is required.
         :param auto_capture_config: :class:`AutoCaptureConfigInput` (optional)
           Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.
+          Note: this field is deprecated for creating new provisioned throughput endpoints, or updating
+          existing provisioned throughput endpoints that never have inference table configured; in these cases
+          please use AI Gateway to manage inference tables.
         :param served_entities: List[:class:`ServedEntityInput`] (optional)
-          A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served
-          entities.
+          The list of served entities under the serving endpoint config.
         :param served_models: List[:class:`ServedModelInput`] (optional)
-          (Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A
-          serving endpoint can have up to 15 served models.
+          (Deprecated, use served_entities instead) The list of served models under the serving endpoint
+          config.
         :param traffic_config: :class:`TrafficConfig` (optional)
-          The traffic config defining how invocations to the serving endpoint should be routed.
+          The traffic configuration associated with the serving endpoint config.
         
         :returns:
           Long-running operation waiter for :class:`ServingEndpointDetailed`.

From fe9877aaa86dbdfbe54c4f83c0eed3f98c410017 Mon Sep 17 00:00:00 2001
From: Renaud Hartert 
Date: Fri, 17 Jan 2025 14:23:42 +0100
Subject: [PATCH 084/136] [Fix] Properly pass query parameters in apps and
 oauth2 (#862)

## What changes are proposed in this pull request?

This PR properly passes query parameters in `apps` and `oauth2` API
methods that are relying on inlined request bodies.

## How is this tested?

This case is now properly tested by the SDK code generator. I've also
verified that the API works as intended.
---
 databricks/sdk/service/apps.py   | 2 ++
 databricks/sdk/service/oauth2.py | 8 ++++++++
 2 files changed, 10 insertions(+)

diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py
index 37af1011d..843c94e94 100755
--- a/databricks/sdk/service/apps.py
+++ b/databricks/sdk/service/apps.py
@@ -981,6 +981,8 @@ def create(self, *, app: Optional[App] = None, no_compute: Optional[bool] = None
           See :method:wait_get_app_active for more details.
         """
         body = app.as_dict()
+        query = {}
+        if no_compute is not None: query['no_compute'] = no_compute
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         op_response = self._api.do('POST', '/api/2.0/apps', query=query, body=body, headers=headers)
diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py
index dc51cd455..7c98e4cd5 100755
--- a/databricks/sdk/service/oauth2.py
+++ b/databricks/sdk/service/oauth2.py
@@ -950,6 +950,8 @@ def create(self,
         :returns: :class:`FederationPolicy`
         """
         body = policy.as_dict()
+        query = {}
+        if policy_id is not None: query['policy_id'] = policy_id
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST',
@@ -1039,6 +1041,8 @@ def update(self,
         :returns: :class:`FederationPolicy`
         """
         body = policy.as_dict()
+        query = {}
+        if update_mask is not None: query['update_mask'] = update_mask
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('PATCH',
@@ -1433,6 +1437,8 @@ def create(self,
         :returns: :class:`FederationPolicy`
         """
         body = policy.as_dict()
+        query = {}
+        if policy_id is not None: query['policy_id'] = policy_id
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do(
@@ -1536,6 +1542,8 @@ def update(self,
         :returns: :class:`FederationPolicy`
         """
         body = policy.as_dict()
+        query = {}
+        if update_mask is not None: query['update_mask'] = update_mask
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do(

From 11fdf46563ae8070c11d38a320c77430676212a1 Mon Sep 17 00:00:00 2001
From: Renaud Hartert 
Date: Mon, 20 Jan 2025 09:50:11 +0100
Subject: [PATCH 085/136] [Internal] Add unit tests for external-browser
 authentication (#863)

## What changes are proposed in this pull request?

This PR is a noop that adds unit tests for the "external-browser"
credential strategy.

## How is this tested?

This PR adds the unit test. Though, these are a little brittle and
likely deserve using a stronger typing model in the future.
---
 databricks/sdk/credentials_provider.py |  14 ++-
 tests/test_credentials_provider.py     | 145 +++++++++++++++++++++++++
 2 files changed, 153 insertions(+), 6 deletions(-)
 create mode 100644 tests/test_credentials_provider.py

diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py
index c20464d5e..1604fbcb3 100644
--- a/databricks/sdk/credentials_provider.py
+++ b/databricks/sdk/credentials_provider.py
@@ -188,6 +188,7 @@ def token() -> Token:
 def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
     if cfg.auth_type != 'external-browser':
         return None
+
     client_id, client_secret = None, None
     if cfg.client_id:
         client_id = cfg.client_id
@@ -195,12 +196,11 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
     elif cfg.azure_client_id:
         client_id = cfg.azure_client
         client_secret = cfg.azure_client_secret
-
     if not client_id:
         client_id = 'databricks-cli'
 
-    # Load cached credentials from disk if they exist.
-    # Note that these are local to the Python SDK and not reused by other SDKs.
+    # Load cached credentials from disk if they exist. Note that these are
+    # local to the Python SDK and not reused by other SDKs.
     oidc_endpoints = cfg.oidc_endpoints
     redirect_url = 'http://localhost:8020'
     token_cache = TokenCache(host=cfg.host,
@@ -210,12 +210,13 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
                              redirect_url=redirect_url)
     credentials = token_cache.load()
     if credentials:
-        # Force a refresh in case the loaded credentials are expired.
-        # If the refresh fails, rather than throw exception we will initiate a new OAuth login flow.
         try:
+            # Pro-actively refresh the loaded credentials. This is done
+            # to detect if the token is expired and needs to be refreshed
+            # by going through the OAuth login flow.
             credentials.token()
             return credentials(cfg)
-        # TODO: we should ideally use more specific exceptions.
+        # TODO: We should ideally use more specific exceptions.
         except Exception as e:
             logger.warning(f'Failed to refresh cached token: {e}. Initiating new OAuth login flow')
 
@@ -226,6 +227,7 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
     consent = oauth_client.initiate_consent()
     if not consent:
         return None
+
     credentials = consent.launch_external_browser()
     token_cache.save(credentials)
     return credentials(cfg)
diff --git a/tests/test_credentials_provider.py b/tests/test_credentials_provider.py
new file mode 100644
index 000000000..67e6f5b35
--- /dev/null
+++ b/tests/test_credentials_provider.py
@@ -0,0 +1,145 @@
+from unittest.mock import Mock
+
+from databricks.sdk.credentials_provider import external_browser
+
+
+def test_external_browser_refresh_success(mocker):
+    """Tests successful refresh of existing credentials."""
+
+    # Mock Config.
+    mock_cfg = Mock()
+    mock_cfg.auth_type = 'external-browser'
+    mock_cfg.host = 'test-host'
+    mock_cfg.oidc_endpoints = {'token_endpoint': 'test-token-endpoint'}
+    mock_cfg.client_id = 'test-client-id' # Or use azure_client_id
+    mock_cfg.client_secret = 'test-client-secret' # Or use azure_client_secret
+
+    # Mock TokenCache.
+    mock_token_cache = Mock()
+    mock_session_credentials = Mock()
+    mock_session_credentials.token.return_value = "valid_token" # Simulate successful refresh
+    mock_token_cache.load.return_value = mock_session_credentials
+
+    # Mock SessionCredentials.
+    want_credentials_provider = lambda c: "new_credentials"
+    mock_session_credentials.return_value = want_credentials_provider
+
+    # Inject the mock implementations.
+    mocker.patch('databricks.sdk.credentials_provider.TokenCache', return_value=mock_token_cache)
+
+    got_credentials_provider = external_browser(mock_cfg)
+
+    mock_token_cache.load.assert_called_once()
+    mock_session_credentials.token.assert_called_once() # Verify token refresh was attempted
+    assert got_credentials_provider == want_credentials_provider
+
+
+def test_external_browser_refresh_failure_new_oauth_flow(mocker):
+    """Tests failed refresh, triggering a new OAuth flow."""
+
+    # Mock Config.
+    mock_cfg = Mock()
+    mock_cfg.auth_type = 'external-browser'
+    mock_cfg.host = 'test-host'
+    mock_cfg.oidc_endpoints = {'token_endpoint': 'test-token-endpoint'}
+    mock_cfg.client_id = 'test-client-id'
+    mock_cfg.client_secret = 'test-client-secret'
+
+    # Mock TokenCache.
+    mock_token_cache = Mock()
+    mock_session_credentials = Mock()
+    mock_session_credentials.token.side_effect = Exception(
+        "Simulated refresh error") # Simulate a failed refresh
+    mock_token_cache.load.return_value = mock_session_credentials
+
+    # Mock SessionCredentials.
+    want_credentials_provider = lambda c: "new_credentials"
+    mock_session_credentials.return_value = want_credentials_provider
+
+    # Mock OAuthClient.
+    mock_oauth_client = Mock()
+    mock_consent = Mock()
+    mock_consent.launch_external_browser.return_value = mock_session_credentials
+    mock_oauth_client.initiate_consent.return_value = mock_consent
+
+    # Inject the mock implementations.
+    mocker.patch('databricks.sdk.credentials_provider.TokenCache', return_value=mock_token_cache)
+    mocker.patch('databricks.sdk.credentials_provider.OAuthClient', return_value=mock_oauth_client)
+
+    got_credentials_provider = external_browser(mock_cfg)
+
+    mock_token_cache.load.assert_called_once()
+    mock_session_credentials.token.assert_called_once() # Refresh attempt
+    mock_oauth_client.initiate_consent.assert_called_once()
+    mock_consent.launch_external_browser.assert_called_once()
+    mock_token_cache.save.assert_called_once_with(mock_session_credentials)
+    assert got_credentials_provider == want_credentials_provider
+
+
+def test_external_browser_no_cached_credentials(mocker):
+    """Tests the case where there are no cached credentials, initiating a new OAuth flow."""
+
+    # Mock Config.
+    mock_cfg = Mock()
+    mock_cfg.auth_type = 'external-browser'
+    mock_cfg.host = 'test-host'
+    mock_cfg.oidc_endpoints = {'token_endpoint': 'test-token-endpoint'}
+    mock_cfg.client_id = 'test-client-id'
+    mock_cfg.client_secret = 'test-client-secret'
+
+    # Mock TokenCache.
+    mock_token_cache = Mock()
+    mock_token_cache.load.return_value = None # No cached credentials
+
+    # Mock SessionCredentials.
+    mock_session_credentials = Mock()
+    want_credentials_provider = lambda c: "new_credentials"
+    mock_session_credentials.return_value = want_credentials_provider
+
+    # Mock OAuthClient.
+    mock_consent = Mock()
+    mock_consent.launch_external_browser.return_value = mock_session_credentials
+    mock_oauth_client = Mock()
+    mock_oauth_client.initiate_consent.return_value = mock_consent
+
+    # Inject the mock implementations.
+    mocker.patch('databricks.sdk.credentials_provider.TokenCache', return_value=mock_token_cache)
+    mocker.patch('databricks.sdk.credentials_provider.OAuthClient', return_value=mock_oauth_client)
+
+    got_credentials_provider = external_browser(mock_cfg)
+
+    mock_token_cache.load.assert_called_once()
+    mock_oauth_client.initiate_consent.assert_called_once()
+    mock_consent.launch_external_browser.assert_called_once()
+    mock_token_cache.save.assert_called_once_with(mock_session_credentials)
+    assert got_credentials_provider == want_credentials_provider
+
+
+def test_external_browser_consent_fails(mocker):
+    """Tests the case where OAuth consent initiation fails."""
+
+    # Mock Config.
+    mock_cfg = Mock()
+    mock_cfg.auth_type = 'external-browser'
+    mock_cfg.host = 'test-host'
+    mock_cfg.oidc_endpoints = {'token_endpoint': 'test-token-endpoint'}
+    mock_cfg.client_id = 'test-client-id'
+    mock_cfg.client_secret = 'test-client-secret'
+
+    # Mock TokenCache.
+    mock_token_cache = Mock()
+    mock_token_cache.load.return_value = None # No cached credentials
+
+    # Mock OAuthClient.
+    mock_oauth_client = Mock()
+    mock_oauth_client.initiate_consent.return_value = None # Simulate consent failure
+
+    # Inject the mock implementations.
+    mocker.patch('databricks.sdk.credentials_provider.TokenCache', return_value=mock_token_cache)
+    mocker.patch('databricks.sdk.credentials_provider.OAuthClient', return_value=mock_oauth_client)
+
+    got_credentials_provider = external_browser(mock_cfg)
+
+    mock_token_cache.load.assert_called_once()
+    mock_oauth_client.initiate_consent.assert_called_once()
+    assert got_credentials_provider is None

From 267d369711a2ac1c27b27f3c0f56201a5ce241b0 Mon Sep 17 00:00:00 2001
From: Renaud Hartert 
Date: Mon, 20 Jan 2025 14:24:02 +0100
Subject: [PATCH 086/136] [Release] Release v0.41.0 (#864)

### New Features and Improvements

* Add `serving.http_request` to call external functions.
([#857](https://github.com/databricks/databricks-sdk-py/pull/857)).
* Files API client: recover on download failures
([#844](https://github.com/databricks/databricks-sdk-py/pull/844))
([#845](https://github.com/databricks/databricks-sdk-py/pull/845)).


### Bug Fixes

* Properly pass query parameters in apps and oauth2
([#862](https://github.com/databricks/databricks-sdk-py/pull/862)).


### Internal Changes

* Add unit tests for external-browser authentication
([#863](https://github.com/databricks/databricks-sdk-py/pull/863)).
* Decouple oauth2 and serving
([#855](https://github.com/databricks/databricks-sdk-py/pull/855)).
* Migrate workflows that need write access to use hosted runners
([#850](https://github.com/databricks/databricks-sdk-py/pull/850)).
* Stop testing Python 3.7 on Ubuntu
([#858](https://github.com/databricks/databricks-sdk-py/pull/858)).


### API Changes:

* Added
[w.access_control](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/access_control.html)
workspace-level service.
* Added `http_request()` method for
[w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html)
workspace-level service.
* Added `no_compute` field for
`databricks.sdk.service.apps.CreateAppRequest`.
 * Added `has_more` field for `databricks.sdk.service.jobs.BaseJob`.
 * Added `has_more` field for `databricks.sdk.service.jobs.BaseRun`.
* Added `page_token` field for
`databricks.sdk.service.jobs.GetJobRequest`.
* Added `has_more` and `next_page_token` fields for
`databricks.sdk.service.jobs.Job`.
 * Added `has_more` field for `databricks.sdk.service.jobs.Run`.
* Added `clean_rooms_notebook_output` field for
`databricks.sdk.service.jobs.RunOutput`.
* Added `scopes` field for
`databricks.sdk.service.oauth2.UpdateCustomAppIntegration`.
* Added `run_as` field for
`databricks.sdk.service.pipelines.CreatePipeline`.
* Added `run_as` field for
`databricks.sdk.service.pipelines.EditPipeline`.
* Added `authorization_details` and `endpoint_url` fields for
`databricks.sdk.service.serving.DataPlaneInfo`.
* Added `contents` field for
`databricks.sdk.service.serving.GetOpenApiResponse`.
* Added `activated`, `activation_url`, `authentication_type`, `cloud`,
`comment`, `created_at`, `created_by`,
`data_recipient_global_metastore_id`, `ip_access_list`, `metastore_id`,
`name`, `owner`, `properties_kvpairs`, `region`, `sharing_code`,
`tokens`, `updated_at` and `updated_by` fields for
`databricks.sdk.service.sharing.RecipientInfo`.
* Added `expiration_time` field for
`databricks.sdk.service.sharing.RecipientInfo`.
* Changed `update()` method for
[a.account_federation_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/account_federation_policy.html)
account-level service with new required argument order.
* Changed `update()` method for
[a.service_principal_federation_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/service_principal_federation_policy.html)
account-level service with new required argument order.
* Changed `update()` method for
[w.recipients](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/recipients.html)
workspace-level service to return
`databricks.sdk.service.sharing.RecipientInfo` dataclass.
* Changed `update()` method for
[w.recipients](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/recipients.html)
workspace-level service return type to become non-empty.
* Changed `update()` method for
[w.recipients](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/recipients.html)
workspace-level service to type `update()` method for
[w.recipients](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/recipients.html)
workspace-level service.
* Changed `get_open_api()` method for
[w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html)
workspace-level service return type to become non-empty.
* Changed `patch()` method for
[w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html)
workspace-level service to type `patch()` method for
[w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html)
workspace-level service.
* Changed `patch()` method for
[w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html)
workspace-level service to return
`databricks.sdk.service.serving.EndpointTags` dataclass.
* Changed `databricks.sdk.service.serving.EndpointTagList` dataclass to.
* Changed `collaborator_alias` field for
`databricks.sdk.service.cleanrooms.CleanRoomCollaborator` to be
required.
* Changed `collaborator_alias` field for
`databricks.sdk.service.cleanrooms.CleanRoomCollaborator` to be
required.
* Changed `update_mask` field for
`databricks.sdk.service.oauth2.UpdateAccountFederationPolicyRequest` to
no longer be required.
* Changed `update_mask` field for
`databricks.sdk.service.oauth2.UpdateServicePrincipalFederationPolicyRequest`
to no longer be required.
* Changed `days_of_week` field for
`databricks.sdk.service.pipelines.RestartWindow` to type
`databricks.sdk.service.pipelines.DayOfWeekList` dataclass.
* Changed `behavior` field for
`databricks.sdk.service.serving.AiGatewayGuardrailPiiBehavior` to no
longer be required.
* Changed `behavior` field for
`databricks.sdk.service.serving.AiGatewayGuardrailPiiBehavior` to no
longer be required.
* Changed `project_id` and `region` fields for
`databricks.sdk.service.serving.GoogleCloudVertexAiConfig` to be
required.
* Changed `project_id` and `region` fields for
`databricks.sdk.service.serving.GoogleCloudVertexAiConfig` to be
required.
* Changed `workload_type` field for
`databricks.sdk.service.serving.ServedEntityInput` to type
`databricks.sdk.service.serving.ServingModelWorkloadType` dataclass.
* Changed `workload_type` field for
`databricks.sdk.service.serving.ServedEntityOutput` to type
`databricks.sdk.service.serving.ServingModelWorkloadType` dataclass.
* Changed `workload_type` field for
`databricks.sdk.service.serving.ServedModelOutput` to type
`databricks.sdk.service.serving.ServingModelWorkloadType` dataclass.

OpenAPI SHA: 58905570a9928fc9ed31fba14a2edaf9a7c55b08, Date: 2025-01-20

---------

Signed-off-by: Renaud Hartert 
---
 .codegen/_openapi_sha                        |   2 +-
 CHANGELOG.md                                 |  63 ++++++++
 databricks/sdk/__init__.py                   |  13 +-
 databricks/sdk/service/compute.py            |   4 +
 databricks/sdk/service/dashboards.py         |  11 +-
 databricks/sdk/service/iam.py                | 158 +++++++++++++++++++
 databricks/sdk/service/sharing.py            | 105 ++++++------
 databricks/sdk/version.py                    |   2 +-
 docs/dbdataclasses/compute.rst               |  12 ++
 docs/dbdataclasses/dashboards.rst            |   5 +-
 docs/dbdataclasses/iam.rst                   |  26 +++
 docs/dbdataclasses/sharing.rst               |   4 -
 docs/workspace/iam/access_control.rst        |  23 +++
 docs/workspace/iam/index.rst                 |   1 +
 docs/workspace/serving/serving_endpoints.rst |  17 +-
 docs/workspace/sharing/providers.rst         |   6 +-
 docs/workspace/sharing/recipients.rst        |  20 +--
 17 files changed, 386 insertions(+), 86 deletions(-)
 create mode 100644 docs/workspace/iam/access_control.rst

diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 431b7678a..cabc6cf48 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-05a10af4ed43566968119b43605f0a7fecbe780f
\ No newline at end of file
+58905570a9928fc9ed31fba14a2edaf9a7c55b08
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 4f7aa3cc2..ba19e3ef5 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,68 @@
 # Version changelog
 
+## [Release] Release v0.41.0
+
+### New Features and Improvements
+
+ * Add `serving.http_request` to call external functions. ([#857](https://github.com/databricks/databricks-sdk-py/pull/857)).
+ * Files API client: recover on download failures ([#844](https://github.com/databricks/databricks-sdk-py/pull/844)) ([#845](https://github.com/databricks/databricks-sdk-py/pull/845)).
+
+
+### Bug Fixes
+
+ * Properly pass query parameters in apps and oauth2 ([#862](https://github.com/databricks/databricks-sdk-py/pull/862)).
+
+
+### Internal Changes
+
+ * Add unit tests for external-browser authentication ([#863](https://github.com/databricks/databricks-sdk-py/pull/863)).
+ * Decouple oauth2 and serving  ([#855](https://github.com/databricks/databricks-sdk-py/pull/855)).
+ * Migrate workflows that need write access to use hosted runners ([#850](https://github.com/databricks/databricks-sdk-py/pull/850)).
+ * Stop testing Python 3.7 on Ubuntu ([#858](https://github.com/databricks/databricks-sdk-py/pull/858)).
+
+
+### API Changes:
+
+ * Added [w.access_control](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/access_control.html) workspace-level service.
+ * Added `http_request()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service.
+ * Added `no_compute` field for `databricks.sdk.service.apps.CreateAppRequest`.
+ * Added `has_more` field for `databricks.sdk.service.jobs.BaseJob`.
+ * Added `has_more` field for `databricks.sdk.service.jobs.BaseRun`.
+ * Added `page_token` field for `databricks.sdk.service.jobs.GetJobRequest`.
+ * Added `has_more` and `next_page_token` fields for `databricks.sdk.service.jobs.Job`.
+ * Added `has_more` field for `databricks.sdk.service.jobs.Run`.
+ * Added `clean_rooms_notebook_output` field for `databricks.sdk.service.jobs.RunOutput`.
+ * Added `scopes` field for `databricks.sdk.service.oauth2.UpdateCustomAppIntegration`.
+ * Added `run_as` field for `databricks.sdk.service.pipelines.CreatePipeline`.
+ * Added `run_as` field for `databricks.sdk.service.pipelines.EditPipeline`.
+ * Added `authorization_details` and `endpoint_url` fields for `databricks.sdk.service.serving.DataPlaneInfo`.
+ * Added `contents` field for `databricks.sdk.service.serving.GetOpenApiResponse`.
+ * Added `activated`, `activation_url`, `authentication_type`, `cloud`, `comment`, `created_at`, `created_by`, `data_recipient_global_metastore_id`, `ip_access_list`, `metastore_id`, `name`, `owner`, `properties_kvpairs`, `region`, `sharing_code`, `tokens`, `updated_at` and `updated_by` fields for `databricks.sdk.service.sharing.RecipientInfo`.
+ * Added `expiration_time` field for `databricks.sdk.service.sharing.RecipientInfo`.
+ * Changed `update()` method for [a.account_federation_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/account_federation_policy.html) account-level service with new required argument order.
+ * Changed `update()` method for [a.service_principal_federation_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/service_principal_federation_policy.html) account-level service with new required argument order.
+ * Changed `update()` method for [w.recipients](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/recipients.html) workspace-level service to return `databricks.sdk.service.sharing.RecipientInfo` dataclass.
+ * Changed `update()` method for [w.recipients](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/recipients.html) workspace-level service return type to become non-empty.
+ * Changed `update()` method for [w.recipients](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/recipients.html) workspace-level service to type `update()` method for [w.recipients](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/recipients.html) workspace-level service.
+ * Changed `get_open_api()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service return type to become non-empty.
+ * Changed `patch()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service to type `patch()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service.
+ * Changed `patch()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service to return `databricks.sdk.service.serving.EndpointTags` dataclass.
+ * Changed `databricks.sdk.service.serving.EndpointTagList` dataclass to.
+ * Changed `collaborator_alias` field for `databricks.sdk.service.cleanrooms.CleanRoomCollaborator` to be required.
+ * Changed `collaborator_alias` field for `databricks.sdk.service.cleanrooms.CleanRoomCollaborator` to be required.
+ * Changed `update_mask` field for `databricks.sdk.service.oauth2.UpdateAccountFederationPolicyRequest` to no longer be required.
+ * Changed `update_mask` field for `databricks.sdk.service.oauth2.UpdateServicePrincipalFederationPolicyRequest` to no longer be required.
+ * Changed `days_of_week` field for `databricks.sdk.service.pipelines.RestartWindow` to type `databricks.sdk.service.pipelines.DayOfWeekList` dataclass.
+ * Changed `behavior` field for `databricks.sdk.service.serving.AiGatewayGuardrailPiiBehavior` to no longer be required.
+ * Changed `behavior` field for `databricks.sdk.service.serving.AiGatewayGuardrailPiiBehavior` to no longer be required.
+ * Changed `project_id` and `region` fields for `databricks.sdk.service.serving.GoogleCloudVertexAiConfig` to be required.
+ * Changed `project_id` and `region` fields for `databricks.sdk.service.serving.GoogleCloudVertexAiConfig` to be required.
+ * Changed `workload_type` field for `databricks.sdk.service.serving.ServedEntityInput` to type `databricks.sdk.service.serving.ServingModelWorkloadType` dataclass.
+ * Changed `workload_type` field for `databricks.sdk.service.serving.ServedEntityOutput` to type `databricks.sdk.service.serving.ServingModelWorkloadType` dataclass.
+ * Changed `workload_type` field for `databricks.sdk.service.serving.ServedModelOutput` to type `databricks.sdk.service.serving.ServingModelWorkloadType` dataclass.
+
+OpenAPI SHA: 58905570a9928fc9ed31fba14a2edaf9a7c55b08, Date: 2025-01-20
+
 ## [Release] Release v0.40.0
 
 ### API Changes:
diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py
index 80fe188b8..c81eb626c 100755
--- a/databricks/sdk/__init__.py
+++ b/databricks/sdk/__init__.py
@@ -1,5 +1,6 @@
 # Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
 
+import logging
 from typing import Optional
 
 import databricks.sdk.core as client
@@ -42,7 +43,8 @@
                                             PolicyFamiliesAPI)
 from databricks.sdk.service.dashboards import GenieAPI, LakeviewAPI
 from databricks.sdk.service.files import DbfsAPI, FilesAPI
-from databricks.sdk.service.iam import (AccountAccessControlAPI,
+from databricks.sdk.service.iam import (AccessControlAPI,
+                                        AccountAccessControlAPI,
                                         AccountAccessControlProxyAPI,
                                         AccountGroupsAPI,
                                         AccountServicePrincipalsAPI,
@@ -99,6 +101,8 @@
 from databricks.sdk.service.workspace import (GitCredentialsAPI, ReposAPI,
                                               SecretsAPI, WorkspaceAPI)
 
+_LOG = logging.getLogger(__name__)
+
 
 def _make_dbutils(config: client.Config):
     # We try to directly check if we are in runtime, instead of
@@ -118,6 +122,7 @@ def _make_dbutils(config: client.Config):
 
 def _make_files_client(apiClient: client.ApiClient, config: client.Config):
     if config.enable_experimental_files_api_client:
+        _LOG.info("Experimental Files API client is enabled")
         return FilesExt(apiClient, config)
     else:
         return FilesAPI(apiClient)
@@ -184,6 +189,7 @@ def __init__(self,
         self._dbutils = _make_dbutils(self._config)
         self._api_client = client.ApiClient(self._config)
         serving_endpoints = ServingEndpointsExt(self._api_client)
+        self._access_control = AccessControlAPI(self._api_client)
         self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client)
         self._alerts = AlertsAPI(self._api_client)
         self._alerts_legacy = AlertsLegacyAPI(self._api_client)
@@ -292,6 +298,11 @@ def api_client(self) -> client.ApiClient:
     def dbutils(self) -> dbutils.RemoteDbUtils:
         return self._dbutils
 
+    @property
+    def access_control(self) -> AccessControlAPI:
+        """Rule based Access Control for Databricks Resources."""
+        return self._access_control
+
     @property
     def account_access_control_proxy(self) -> AccountAccessControlProxyAPI:
         """These APIs manage access rules on resources in an account."""
diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py
index 0afdb6f19..53240b4ad 100755
--- a/databricks/sdk/service/compute.py
+++ b/databricks/sdk/service/compute.py
@@ -4184,6 +4184,10 @@ class EventDetailsCause(Enum):
 
 class EventType(Enum):
 
+    ADD_NODES_FAILED = 'ADD_NODES_FAILED'
+    AUTOMATIC_CLUSTER_UPDATE = 'AUTOMATIC_CLUSTER_UPDATE'
+    AUTOSCALING_BACKOFF = 'AUTOSCALING_BACKOFF'
+    AUTOSCALING_FAILED = 'AUTOSCALING_FAILED'
     AUTOSCALING_STATS_REPORT = 'AUTOSCALING_STATS_REPORT'
     CREATING = 'CREATING'
     DBFS_DOWN = 'DBFS_DOWN'
diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py
index 34bd58995..221727230 100755
--- a/databricks/sdk/service/dashboards.py
+++ b/databricks/sdk/service/dashboards.py
@@ -381,8 +381,9 @@ class GenieMessage:
     status: Optional[MessageStatus] = None
     """MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data
     sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. *
-    `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`:
-    Executing AI provided SQL query. Get the SQL query result by calling
+    `ASKING_AI`: Waiting for the LLM to respond to the users question. * `PENDING_WAREHOUSE`:
+    Waiting for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing
+    AI provided SQL query. Get the SQL query result by calling
     [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message
     status will stay in the `EXECUTING_QUERY` until a client calls
     [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a
@@ -678,8 +679,9 @@ class MessageErrorType(Enum):
 class MessageStatus(Enum):
     """MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data
     sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. *
-    `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`:
-    Executing AI provided SQL query. Get the SQL query result by calling
+    `ASKING_AI`: Waiting for the LLM to respond to the users question. * `PENDING_WAREHOUSE`:
+    Waiting for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing
+    AI provided SQL query. Get the SQL query result by calling
     [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message
     status will stay in the `EXECUTING_QUERY` until a client calls
     [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a
@@ -696,6 +698,7 @@ class MessageStatus(Enum):
     FAILED = 'FAILED'
     FETCHING_METADATA = 'FETCHING_METADATA'
     FILTERING_CONTEXT = 'FILTERING_CONTEXT'
+    PENDING_WAREHOUSE = 'PENDING_WAREHOUSE'
     QUERY_RESULT_EXPIRED = 'QUERY_RESULT_EXPIRED'
     SUBMITTED = 'SUBMITTED'
 
diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py
index 28e5247a6..2f752d06c 100755
--- a/databricks/sdk/service/iam.py
+++ b/databricks/sdk/service/iam.py
@@ -106,6 +106,58 @@ def from_dict(cls, d: Dict[str, any]) -> AccessControlResponse:
                    user_name=d.get('user_name', None))
 
 
+@dataclass
+class Actor:
+    """represents an identity trying to access a resource - user or a service principal group can be a
+    principal of a permission set assignment but an actor is always a user or a service principal"""
+
+    actor_id: Optional[int] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the Actor into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.actor_id is not None: body['actor_id'] = self.actor_id
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Actor into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.actor_id is not None: body['actor_id'] = self.actor_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> Actor:
+        """Deserializes the Actor from a dictionary."""
+        return cls(actor_id=d.get('actor_id', None))
+
+
+@dataclass
+class CheckPolicyResponse:
+    consistency_token: ConsistencyToken
+
+    is_permitted: Optional[bool] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the CheckPolicyResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.consistency_token: body['consistency_token'] = self.consistency_token.as_dict()
+        if self.is_permitted is not None: body['is_permitted'] = self.is_permitted
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CheckPolicyResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.consistency_token: body['consistency_token'] = self.consistency_token
+        if self.is_permitted is not None: body['is_permitted'] = self.is_permitted
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CheckPolicyResponse:
+        """Deserializes the CheckPolicyResponse from a dictionary."""
+        return cls(consistency_token=_from_dict(d, 'consistency_token', ConsistencyToken),
+                   is_permitted=d.get('is_permitted', None))
+
+
 @dataclass
 class ComplexValue:
     display: Optional[str] = None
@@ -148,6 +200,28 @@ def from_dict(cls, d: Dict[str, any]) -> ComplexValue:
                    value=d.get('value', None))
 
 
+@dataclass
+class ConsistencyToken:
+    value: str
+
+    def as_dict(self) -> dict:
+        """Serializes the ConsistencyToken into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.value is not None: body['value'] = self.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ConsistencyToken into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.value is not None: body['value'] = self.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ConsistencyToken:
+        """Deserializes the ConsistencyToken from a dictionary."""
+        return cls(value=d.get('value', None))
+
+
 @dataclass
 class DeleteResponse:
 
@@ -1219,6 +1293,49 @@ def from_dict(cls, d: Dict[str, any]) -> PrincipalOutput:
                    user_name=d.get('user_name', None))
 
 
+class RequestAuthzIdentity(Enum):
+    """Defines the identity to be used for authZ of the request on the server side. See one pager for
+    for more information: http://go/acl/service-identity"""
+
+    REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY = 'REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY'
+    REQUEST_AUTHZ_IDENTITY_USER_CONTEXT = 'REQUEST_AUTHZ_IDENTITY_USER_CONTEXT'
+
+
+@dataclass
+class ResourceInfo:
+    id: str
+    """Id of the current resource."""
+
+    legacy_acl_path: Optional[str] = None
+    """The legacy acl path of the current resource."""
+
+    parent_resource_info: Optional[ResourceInfo] = None
+    """Parent resource info for the current resource. The parent may have another parent."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ResourceInfo into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.legacy_acl_path is not None: body['legacy_acl_path'] = self.legacy_acl_path
+        if self.parent_resource_info: body['parent_resource_info'] = self.parent_resource_info.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResourceInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.legacy_acl_path is not None: body['legacy_acl_path'] = self.legacy_acl_path
+        if self.parent_resource_info: body['parent_resource_info'] = self.parent_resource_info
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ResourceInfo:
+        """Deserializes the ResourceInfo from a dictionary."""
+        return cls(id=d.get('id', None),
+                   legacy_acl_path=d.get('legacy_acl_path', None),
+                   parent_resource_info=_from_dict(d, 'parent_resource_info', ResourceInfo))
+
+
 @dataclass
 class ResourceMeta:
     resource_type: Optional[str] = None
@@ -1622,6 +1739,47 @@ def from_dict(cls, d: Dict[str, any]) -> WorkspacePermissions:
         return cls(permissions=_repeated_dict(d, 'permissions', PermissionOutput))
 
 
+class AccessControlAPI:
+    """Rule based Access Control for Databricks Resources."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def check_policy(self,
+                     actor: Actor,
+                     permission: str,
+                     resource: str,
+                     consistency_token: ConsistencyToken,
+                     authz_identity: RequestAuthzIdentity,
+                     *,
+                     resource_info: Optional[ResourceInfo] = None) -> CheckPolicyResponse:
+        """Check access policy to a resource.
+        
+        :param actor: :class:`Actor`
+        :param permission: str
+        :param resource: str
+          Ex: (servicePrincipal/use, accounts//servicePrincipals/) Ex:
+          (servicePrincipal.ruleSet/update, accounts//servicePrincipals//ruleSets/default)
+        :param consistency_token: :class:`ConsistencyToken`
+        :param authz_identity: :class:`RequestAuthzIdentity`
+        :param resource_info: :class:`ResourceInfo` (optional)
+        
+        :returns: :class:`CheckPolicyResponse`
+        """
+
+        query = {}
+        if actor is not None: query['actor'] = actor.as_dict()
+        if authz_identity is not None: query['authz_identity'] = authz_identity.value
+        if consistency_token is not None: query['consistency_token'] = consistency_token.as_dict()
+        if permission is not None: query['permission'] = permission
+        if resource is not None: query['resource'] = resource
+        if resource_info is not None: query['resource_info'] = resource_info.as_dict()
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET', '/api/2.0/access-control/check-policy-v2', query=query, headers=headers)
+        return CheckPolicyResponse.from_dict(res)
+
+
 class AccountAccessControlAPI:
     """These APIs manage access rules on resources in an account. Currently, only grant rules are supported. A
     grant rule specifies a role assigned to a set of principals. A list of rules attached to a resource is
diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py
index 000c85e2c..1990c7c54 100755
--- a/databricks/sdk/service/sharing.py
+++ b/databricks/sdk/service/sharing.py
@@ -35,7 +35,8 @@ class CreateProvider:
     """Description about the provider."""
 
     recipient_profile_str: Optional[str] = None
-    """This field is required when the __authentication_type__ is **TOKEN** or not provided."""
+    """This field is required when the __authentication_type__ is **TOKEN**,
+    **OAUTH_CLIENT_CREDENTIALS** or not provided."""
 
     def as_dict(self) -> dict:
         """Serializes the CreateProvider into a dictionary suitable for use as a JSON request body."""
@@ -76,7 +77,7 @@ class CreateRecipient:
     """Description about the recipient."""
 
     data_recipient_global_metastore_id: Optional[str] = None
-    """The global Unity Catalog metastore id provided by the data recipient. This field is required
+    """The global Unity Catalog metastore id provided by the data recipient. This field is only present
     when the __authentication_type__ is **DATABRICKS**. The identifier is of format
     __cloud__:__region__:__metastore-uuid__."""
 
@@ -90,10 +91,12 @@ class CreateRecipient:
     """Username of the recipient owner."""
 
     properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None
-    """Recipient properties as map of string key-value pairs."""
+    """Recipient properties as map of string key-value pairs. When provided in update request, the
+    specified properties will override the existing properties. To add and remove properties, one
+    would need to perform a read-modify-write."""
 
     sharing_code: Optional[str] = None
-    """The one-time sharing code provided by the data recipient. This field is required when the
+    """The one-time sharing code provided by the data recipient. This field is only present when the
     __authentication_type__ is **DATABRICKS**."""
 
     def as_dict(self) -> dict:
@@ -581,7 +584,7 @@ class ProviderInfo:
     data_provider_global_metastore_id: Optional[str] = None
     """The global UC metastore id of the data provider. This field is only present when the
     __authentication_type__ is **DATABRICKS**. The identifier is of format
-    ::."""
+    __cloud__:__region__:__metastore-uuid__."""
 
     metastore_id: Optional[str] = None
     """UUID of the provider's UC metastore. This field is only present when the __authentication_type__
@@ -594,10 +597,12 @@ class ProviderInfo:
     """Username of Provider owner."""
 
     recipient_profile: Optional[RecipientProfile] = None
-    """The recipient profile. This field is only present when the authentication_type is `TOKEN`."""
+    """The recipient profile. This field is only present when the authentication_type is `TOKEN` or
+    `OAUTH_CLIENT_CREDENTIALS`."""
 
     recipient_profile_str: Optional[str] = None
-    """This field is only present when the authentication_type is `TOKEN` or not provided."""
+    """This field is required when the __authentication_type__ is **TOKEN**,
+    **OAUTH_CLIENT_CREDENTIALS** or not provided."""
 
     region: Optional[str] = None
     """Cloud region of the provider's UC metastore. This field is only present when the
@@ -607,7 +612,7 @@ class ProviderInfo:
     """Time at which this Provider was created, in epoch milliseconds."""
 
     updated_by: Optional[str] = None
-    """Username of user who last modified Share."""
+    """Username of user who last modified Provider."""
 
     def as_dict(self) -> dict:
         """Serializes the ProviderInfo into a dictionary suitable for use as a JSON request body."""
@@ -704,8 +709,8 @@ class RecipientInfo:
     """The delta sharing authentication type."""
 
     cloud: Optional[str] = None
-    """Cloud vendor of the recipient's Unity Catalog Metstore. This field is only present when the
-    __authentication_type__ is **DATABRICKS**`."""
+    """Cloud vendor of the recipient's Unity Catalog Metastore. This field is only present when the
+    __authentication_type__ is **DATABRICKS**."""
 
     comment: Optional[str] = None
     """Description about the recipient."""
@@ -721,12 +726,15 @@ class RecipientInfo:
     when the __authentication_type__ is **DATABRICKS**. The identifier is of format
     __cloud__:__region__:__metastore-uuid__."""
 
+    expiration_time: Optional[int] = None
+    """Expiration timestamp of the token, in epoch milliseconds."""
+
     ip_access_list: Optional[IpAccessList] = None
     """IP Access List"""
 
     metastore_id: Optional[str] = None
-    """Unique identifier of recipient's Unity Catalog metastore. This field is only present when the
-    __authentication_type__ is **DATABRICKS**"""
+    """Unique identifier of recipient's Unity Catalog Metastore. This field is only present when the
+    __authentication_type__ is **DATABRICKS**."""
 
     name: Optional[str] = None
     """Name of Recipient."""
@@ -735,10 +743,12 @@ class RecipientInfo:
     """Username of the recipient owner."""
 
     properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None
-    """Recipient properties as map of string key-value pairs."""
+    """Recipient properties as map of string key-value pairs. When provided in update request, the
+    specified properties will override the existing properties. To add and remove properties, one
+    would need to perform a read-modify-write."""
 
     region: Optional[str] = None
-    """Cloud region of the recipient's Unity Catalog Metstore. This field is only present when the
+    """Cloud region of the recipient's Unity Catalog Metastore. This field is only present when the
     __authentication_type__ is **DATABRICKS**."""
 
     sharing_code: Optional[str] = None
@@ -766,6 +776,7 @@ def as_dict(self) -> dict:
         if self.created_by is not None: body['created_by'] = self.created_by
         if self.data_recipient_global_metastore_id is not None:
             body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
         if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict()
         if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
         if self.name is not None: body['name'] = self.name
@@ -790,6 +801,7 @@ def as_shallow_dict(self) -> dict:
         if self.created_by is not None: body['created_by'] = self.created_by
         if self.data_recipient_global_metastore_id is not None:
             body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
         if self.ip_access_list: body['ip_access_list'] = self.ip_access_list
         if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
         if self.name is not None: body['name'] = self.name
@@ -813,6 +825,7 @@ def from_dict(cls, d: Dict[str, any]) -> RecipientInfo:
                    created_at=d.get('created_at', None),
                    created_by=d.get('created_by', None),
                    data_recipient_global_metastore_id=d.get('data_recipient_global_metastore_id', None),
+                   expiration_time=d.get('expiration_time', None),
                    ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList),
                    metastore_id=d.get('metastore_id', None),
                    name=d.get('name', None),
@@ -869,7 +882,7 @@ class RecipientTokenInfo:
     retrieved."""
 
     created_at: Optional[int] = None
-    """Time at which this recipient Token was created, in epoch milliseconds."""
+    """Time at which this recipient token was created, in epoch milliseconds."""
 
     created_by: Optional[str] = None
     """Username of recipient token creator."""
@@ -881,10 +894,10 @@ class RecipientTokenInfo:
     """Unique ID of the recipient token."""
 
     updated_at: Optional[int] = None
-    """Time at which this recipient Token was updated, in epoch milliseconds."""
+    """Time at which this recipient token was updated, in epoch milliseconds."""
 
     updated_by: Optional[str] = None
-    """Username of recipient Token updater."""
+    """Username of recipient token updater."""
 
     def as_dict(self) -> dict:
         """Serializes the RecipientTokenInfo into a dictionary suitable for use as a JSON request body."""
@@ -973,7 +986,7 @@ class RotateRecipientToken:
     expire the existing token immediately, negative number will return an error."""
 
     name: Optional[str] = None
-    """The name of the recipient."""
+    """The name of the Recipient."""
 
     def as_dict(self) -> dict:
         """Serializes the RotateRecipientToken into a dictionary suitable for use as a JSON request body."""
@@ -1023,9 +1036,6 @@ def from_dict(cls, d: Dict[str, any]) -> SecurablePropertiesKvPairs:
         return cls(properties=d.get('properties', None))
 
 
-SecurablePropertiesMap = Dict[str, str]
-
-
 @dataclass
 class ShareInfo:
     comment: Optional[str] = None
@@ -1346,7 +1356,8 @@ class UpdateProvider:
     """Username of Provider owner."""
 
     recipient_profile_str: Optional[str] = None
-    """This field is required when the __authentication_type__ is **TOKEN** or not provided."""
+    """This field is required when the __authentication_type__ is **TOKEN**,
+    **OAUTH_CLIENT_CREDENTIALS** or not provided."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateProvider into a dictionary suitable for use as a JSON request body."""
@@ -1393,7 +1404,7 @@ class UpdateRecipient:
     """Name of the recipient."""
 
     new_name: Optional[str] = None
-    """New name for the recipient."""
+    """New name for the recipient. ."""
 
     owner: Optional[str] = None
     """Username of the recipient owner."""
@@ -1439,25 +1450,6 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateRecipient:
                    properties_kvpairs=_from_dict(d, 'properties_kvpairs', SecurablePropertiesKvPairs))
 
 
-@dataclass
-class UpdateResponse:
-
-    def as_dict(self) -> dict:
-        """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        return body
-
-    def as_shallow_dict(self) -> dict:
-        """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes."""
-        body = {}
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
-        """Deserializes the UpdateResponse from a dictionary."""
-        return cls()
-
-
 @dataclass
 class UpdateShare:
     comment: Optional[str] = None
@@ -1583,7 +1575,8 @@ def create(self,
         :param comment: str (optional)
           Description about the provider.
         :param recipient_profile_str: str (optional)
-          This field is required when the __authentication_type__ is **TOKEN** or not provided.
+          This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS**
+          or not provided.
         
         :returns: :class:`ProviderInfo`
         """
@@ -1735,7 +1728,8 @@ def update(self,
         :param owner: str (optional)
           Username of Provider owner.
         :param recipient_profile_str: str (optional)
-          This field is required when the __authentication_type__ is **TOKEN** or not provided.
+          This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS**
+          or not provided.
         
         :returns: :class:`ProviderInfo`
         """
@@ -1830,7 +1824,7 @@ def create(self,
         """Create a share recipient.
         
         Creates a new recipient with the delta sharing authentication type in the metastore. The caller must
-        be a metastore admin or has the **CREATE_RECIPIENT** privilege on the metastore.
+        be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore.
         
         :param name: str
           Name of Recipient.
@@ -1839,8 +1833,8 @@ def create(self,
         :param comment: str (optional)
           Description about the recipient.
         :param data_recipient_global_metastore_id: str (optional)
-          The global Unity Catalog metastore id provided by the data recipient. This field is required when
-          the __authentication_type__ is **DATABRICKS**. The identifier is of format
+          The global Unity Catalog metastore id provided by the data recipient. This field is only present
+          when the __authentication_type__ is **DATABRICKS**. The identifier is of format
           __cloud__:__region__:__metastore-uuid__.
         :param expiration_time: int (optional)
           Expiration timestamp of the token, in epoch milliseconds.
@@ -1849,9 +1843,11 @@ def create(self,
         :param owner: str (optional)
           Username of the recipient owner.
         :param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional)
-          Recipient properties as map of string key-value pairs.
+          Recipient properties as map of string key-value pairs. When provided in update request, the
+          specified properties will override the existing properties. To add and remove properties, one would
+          need to perform a read-modify-write.
         :param sharing_code: str (optional)
-          The one-time sharing code provided by the data recipient. This field is required when the
+          The one-time sharing code provided by the data recipient. This field is only present when the
           __authentication_type__ is **DATABRICKS**.
         
         :returns: :class:`RecipientInfo`
@@ -1957,7 +1953,7 @@ def rotate_token(self, name: str, existing_token_expire_in_seconds: int) -> Reci
         The caller must be the owner of the recipient.
         
         :param name: str
-          The name of the recipient.
+          The name of the Recipient.
         :param existing_token_expire_in_seconds: int
           The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of
           existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to expire
@@ -2021,7 +2017,7 @@ def update(self,
                ip_access_list: Optional[IpAccessList] = None,
                new_name: Optional[str] = None,
                owner: Optional[str] = None,
-               properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None):
+               properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None) -> RecipientInfo:
         """Update a share recipient.
         
         Updates an existing recipient in the metastore. The caller must be a metastore admin or the owner of
@@ -2037,7 +2033,7 @@ def update(self,
         :param ip_access_list: :class:`IpAccessList` (optional)
           IP Access List
         :param new_name: str (optional)
-          New name for the recipient.
+          New name for the recipient. .
         :param owner: str (optional)
           Username of the recipient owner.
         :param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional)
@@ -2045,7 +2041,7 @@ def update(self,
           specified properties will override the existing properties. To add and remove properties, one would
           need to perform a read-modify-write.
         
-        
+        :returns: :class:`RecipientInfo`
         """
         body = {}
         if comment is not None: body['comment'] = comment
@@ -2056,7 +2052,8 @@ def update(self,
         if properties_kvpairs is not None: body['properties_kvpairs'] = properties_kvpairs.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        self._api.do('PATCH', f'/api/2.1/unity-catalog/recipients/{name}', body=body, headers=headers)
+        res = self._api.do('PATCH', f'/api/2.1/unity-catalog/recipients/{name}', body=body, headers=headers)
+        return RecipientInfo.from_dict(res)
 
 
 class SharesAPI:
diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py
index eb9b6f12e..9f86a39e2 100644
--- a/databricks/sdk/version.py
+++ b/databricks/sdk/version.py
@@ -1 +1 @@
-__version__ = '0.40.0'
+__version__ = '0.41.0'
diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst
index 9c628c476..6a9a06671 100644
--- a/docs/dbdataclasses/compute.rst
+++ b/docs/dbdataclasses/compute.rst
@@ -495,6 +495,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: EventType
 
+   .. py:attribute:: ADD_NODES_FAILED
+      :value: "ADD_NODES_FAILED"
+
+   .. py:attribute:: AUTOMATIC_CLUSTER_UPDATE
+      :value: "AUTOMATIC_CLUSTER_UPDATE"
+
+   .. py:attribute:: AUTOSCALING_BACKOFF
+      :value: "AUTOSCALING_BACKOFF"
+
+   .. py:attribute:: AUTOSCALING_FAILED
+      :value: "AUTOSCALING_FAILED"
+
    .. py:attribute:: AUTOSCALING_STATS_REPORT
       :value: "AUTOSCALING_STATS_REPORT"
 
diff --git a/docs/dbdataclasses/dashboards.rst b/docs/dbdataclasses/dashboards.rst
index 22a3ea95d..6d0e847ba 100644
--- a/docs/dbdataclasses/dashboards.rst
+++ b/docs/dbdataclasses/dashboards.rst
@@ -254,7 +254,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: MessageStatus
 
-   MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message status will stay in the `EXECUTING_QUERY` until a client calls [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message processing is completed. Results are in the `attachments` field. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user needs to execute the query again. * `CANCELLED`: Message has been cancelled.
+   MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `PENDING_WAREHOUSE`: Waiting for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message status will stay in the `EXECUTING_QUERY` until a client calls [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message processing is completed. Results are in the `attachments` field. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user needs to execute the query again. * `CANCELLED`: Message has been cancelled.
 
    .. py:attribute:: ASKING_AI
       :value: "ASKING_AI"
@@ -277,6 +277,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: FILTERING_CONTEXT
       :value: "FILTERING_CONTEXT"
 
+   .. py:attribute:: PENDING_WAREHOUSE
+      :value: "PENDING_WAREHOUSE"
+
    .. py:attribute:: QUERY_RESULT_EXPIRED
       :value: "QUERY_RESULT_EXPIRED"
 
diff --git a/docs/dbdataclasses/iam.rst b/docs/dbdataclasses/iam.rst
index 643da3d47..6df58ae4e 100644
--- a/docs/dbdataclasses/iam.rst
+++ b/docs/dbdataclasses/iam.rst
@@ -12,10 +12,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: Actor
+   :members:
+   :undoc-members:
+
+.. autoclass:: CheckPolicyResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: ComplexValue
    :members:
    :undoc-members:
 
+.. autoclass:: ConsistencyToken
+   :members:
+   :undoc-members:
+
 .. autoclass:: DeleteResponse
    :members:
    :undoc-members:
@@ -242,6 +254,20 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. py:class:: RequestAuthzIdentity
+
+   Defines the identity to be used for authZ of the request on the server side. See one pager for for more information: http://go/acl/service-identity
+
+   .. py:attribute:: REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY
+      :value: "REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY"
+
+   .. py:attribute:: REQUEST_AUTHZ_IDENTITY_USER_CONTEXT
+      :value: "REQUEST_AUTHZ_IDENTITY_USER_CONTEXT"
+
+.. autoclass:: ResourceInfo
+   :members:
+   :undoc-members:
+
 .. autoclass:: ResourceMeta
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/sharing.rst b/docs/dbdataclasses/sharing.rst
index 2db59fcbe..ed4a4c006 100644
--- a/docs/dbdataclasses/sharing.rst
+++ b/docs/dbdataclasses/sharing.rst
@@ -343,10 +343,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: UpdateResponse
-   :members:
-   :undoc-members:
-
 .. autoclass:: UpdateShare
    :members:
    :undoc-members:
diff --git a/docs/workspace/iam/access_control.rst b/docs/workspace/iam/access_control.rst
new file mode 100644
index 000000000..a5f1feeda
--- /dev/null
+++ b/docs/workspace/iam/access_control.rst
@@ -0,0 +1,23 @@
+``w.access_control``: RbacService
+=================================
+.. currentmodule:: databricks.sdk.service.iam
+
+.. py:class:: AccessControlAPI
+
+    Rule based Access Control for Databricks Resources.
+
+    .. py:method:: check_policy(actor: Actor, permission: str, resource: str, consistency_token: ConsistencyToken, authz_identity: RequestAuthzIdentity [, resource_info: Optional[ResourceInfo]]) -> CheckPolicyResponse
+
+        Check access policy to a resource.
+        
+        :param actor: :class:`Actor`
+        :param permission: str
+        :param resource: str
+          Ex: (servicePrincipal/use, accounts//servicePrincipals/) Ex:
+          (servicePrincipal.ruleSet/update, accounts//servicePrincipals//ruleSets/default)
+        :param consistency_token: :class:`ConsistencyToken`
+        :param authz_identity: :class:`RequestAuthzIdentity`
+        :param resource_info: :class:`ResourceInfo` (optional)
+        
+        :returns: :class:`CheckPolicyResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/iam/index.rst b/docs/workspace/iam/index.rst
index 2a98cc9ae..00a7f1fe7 100644
--- a/docs/workspace/iam/index.rst
+++ b/docs/workspace/iam/index.rst
@@ -7,6 +7,7 @@ Manage users, service principals, groups and their permissions in Accounts and W
 .. toctree::
    :maxdepth: 1
 
+   access_control
    account_access_control_proxy
    current_user
    groups
diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst
index c0cd774a3..d9c806489 100644
--- a/docs/workspace/serving/serving_endpoints.rst
+++ b/docs/workspace/serving/serving_endpoints.rst
@@ -135,24 +135,23 @@
         :returns: :class:`ServingEndpointPermissions`
         
 
-    .. py:method:: http_request(connection_name: str, method: ExternalFunctionRequestHttpMethod, path: str [, headers: Optional[str], json: Optional[str], params: Optional[str]]) -> ExternalFunctionResponse
+    .. py:method:: http_request(conn: str, method: ExternalFunctionRequestHttpMethod, path: str [, headers: typing.Dict[str, str], json: typing.Dict[str, str], params: typing.Dict[str, str]]) -> ExternalFunctionResponse
 
         Make external services call using the credentials stored in UC Connection.
-        
-        :param connection_name: str
+        **NOTE:** Experimental: This API may change or be removed in a future release without warning.
+        :param conn: str
           The connection name to use. This is required to identify the external connection.
         :param method: :class:`ExternalFunctionRequestHttpMethod`
-          The HTTP method to use (e.g., 'GET', 'POST').
+          The HTTP method to use (e.g., 'GET', 'POST'). This is required.
         :param path: str
           The relative path for the API endpoint. This is required.
-        :param headers: str (optional)
+        :param headers: Dict[str,str] (optional)
           Additional headers for the request. If not provided, only auth headers from connections would be
           passed.
-        :param json: str (optional)
-          The JSON payload to send in the request body.
-        :param params: str (optional)
+        :param json: Dict[str,str] (optional)
+          JSON payload for the request.
+        :param params: Dict[str,str] (optional)
           Query parameters for the request.
-        
         :returns: :class:`ExternalFunctionResponse`
         
 
diff --git a/docs/workspace/sharing/providers.rst b/docs/workspace/sharing/providers.rst
index 7cf398ac0..7d27acc3d 100644
--- a/docs/workspace/sharing/providers.rst
+++ b/docs/workspace/sharing/providers.rst
@@ -44,7 +44,8 @@
         :param comment: str (optional)
           Description about the provider.
         :param recipient_profile_str: str (optional)
-          This field is required when the __authentication_type__ is **TOKEN** or not provided.
+          This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS**
+          or not provided.
         
         :returns: :class:`ProviderInfo`
         
@@ -228,7 +229,8 @@
         :param owner: str (optional)
           Username of Provider owner.
         :param recipient_profile_str: str (optional)
-          This field is required when the __authentication_type__ is **TOKEN** or not provided.
+          This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS**
+          or not provided.
         
         :returns: :class:`ProviderInfo`
         
\ No newline at end of file
diff --git a/docs/workspace/sharing/recipients.rst b/docs/workspace/sharing/recipients.rst
index 44f2042bb..76e1da171 100644
--- a/docs/workspace/sharing/recipients.rst
+++ b/docs/workspace/sharing/recipients.rst
@@ -39,7 +39,7 @@
         Create a share recipient.
         
         Creates a new recipient with the delta sharing authentication type in the metastore. The caller must
-        be a metastore admin or has the **CREATE_RECIPIENT** privilege on the metastore.
+        be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore.
         
         :param name: str
           Name of Recipient.
@@ -48,8 +48,8 @@
         :param comment: str (optional)
           Description about the recipient.
         :param data_recipient_global_metastore_id: str (optional)
-          The global Unity Catalog metastore id provided by the data recipient. This field is required when
-          the __authentication_type__ is **DATABRICKS**. The identifier is of format
+          The global Unity Catalog metastore id provided by the data recipient. This field is only present
+          when the __authentication_type__ is **DATABRICKS**. The identifier is of format
           __cloud__:__region__:__metastore-uuid__.
         :param expiration_time: int (optional)
           Expiration timestamp of the token, in epoch milliseconds.
@@ -58,9 +58,11 @@
         :param owner: str (optional)
           Username of the recipient owner.
         :param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional)
-          Recipient properties as map of string key-value pairs.
+          Recipient properties as map of string key-value pairs. When provided in update request, the
+          specified properties will override the existing properties. To add and remove properties, one would
+          need to perform a read-modify-write.
         :param sharing_code: str (optional)
-          The one-time sharing code provided by the data recipient. This field is required when the
+          The one-time sharing code provided by the data recipient. This field is only present when the
           __authentication_type__ is **DATABRICKS**.
         
         :returns: :class:`RecipientInfo`
@@ -174,7 +176,7 @@
         The caller must be the owner of the recipient.
         
         :param name: str
-          The name of the recipient.
+          The name of the Recipient.
         :param existing_token_expire_in_seconds: int
           The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of
           existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to expire
@@ -224,7 +226,7 @@
         :returns: :class:`GetRecipientSharePermissionsResponse`
         
 
-    .. py:method:: update(name: str [, comment: Optional[str], expiration_time: Optional[int], ip_access_list: Optional[IpAccessList], new_name: Optional[str], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs]])
+    .. py:method:: update(name: str [, comment: Optional[str], expiration_time: Optional[int], ip_access_list: Optional[IpAccessList], new_name: Optional[str], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs]]) -> RecipientInfo
 
 
         Usage:
@@ -259,7 +261,7 @@
         :param ip_access_list: :class:`IpAccessList` (optional)
           IP Access List
         :param new_name: str (optional)
-          New name for the recipient.
+          New name for the recipient. .
         :param owner: str (optional)
           Username of the recipient owner.
         :param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional)
@@ -267,5 +269,5 @@
           specified properties will override the existing properties. To add and remove properties, one would
           need to perform a read-modify-write.
         
-        
+        :returns: :class:`RecipientInfo`
         
\ No newline at end of file

From 5576d32844cc04c7aeb332a56c40216883df066b Mon Sep 17 00:00:00 2001
From: Renaud Hartert 
Date: Tue, 21 Jan 2025 21:46:56 +0100
Subject: [PATCH 087/136] [Internal] Add CICD environment to the User Agent
 (#866)

## What changes are proposed in this pull request?

This PR adds CICD environment to the User Agent of each SDK outbound
request. The implementation is similar to the one used in the Go SDK.

## How is this tested?

Added unit tests.
---
 databricks/sdk/useragent.py | 54 +++++++++++++++++++++++++++++++++++++
 tests/test_config.py        |  5 ++++
 tests/test_core.py          |  7 ++++-
 tests/test_user_agent.py    | 44 ++++++++++++++++++++++++++++++
 4 files changed, 109 insertions(+), 1 deletion(-)

diff --git a/databricks/sdk/useragent.py b/databricks/sdk/useragent.py
index 5b15d2822..45adfe51d 100644
--- a/databricks/sdk/useragent.py
+++ b/databricks/sdk/useragent.py
@@ -148,4 +148,58 @@ def to_string(alternate_product_info: Optional[Tuple[str, str]] = None,
     base.extend(_extra)
     base.extend(_get_upstream_user_agent_info())
     base.extend(_get_runtime_info())
+    if cicd_provider() != "":
+        base.append((CICD_KEY, cicd_provider()))
     return " ".join(f"{k}/{v}" for k, v in base)
+
+
+# List of CI/CD providers and pairs of envvar/value that are used to detect them.
+_PROVIDERS = {
+    "github": [("GITHUB_ACTIONS", "true")],
+    "gitlab": [("GITLAB_CI", "true")],
+    "jenkins": [("JENKINS_URL", "")],
+    "azure-devops": [("TF_BUILD", "True")],
+    "circle": [("CIRCLECI", "true")],
+    "travis": [("TRAVIS", "true")],
+    "bitbucket": [("BITBUCKET_BUILD_NUMBER", "")],
+    "google-cloud-build": [("PROJECT_ID", ""), ("BUILD_ID", ""), ("PROJECT_NUMBER", ""), ("LOCATION", "")],
+    "aws-code-build": [("CODEBUILD_BUILD_ARN", "")],
+    "tf-cloud": [("TFC_RUN_ID", "")],
+}
+
+# Private variable to store the CI/CD provider. This value is computed at
+# the first invocation of cicd_providers() and is cached for subsequent calls.
+_cicd_provider = None
+
+
+def cicd_provider() -> str:
+    """Return the CI/CD provider if detected, or an empty string otherwise."""
+
+    # This function is safe because (i) assignation are atomic, and (ii)
+    # computating the CI/CD provider is idempotent.
+    global _cicd_provider
+    if _cicd_provider is not None:
+        return _cicd_provider
+
+    providers = []
+    for p in _PROVIDERS:
+        found = True
+        for envvar, value in _PROVIDERS[p]:
+            v = os.getenv(envvar)
+            if v is None or (value != "" and v != value):
+                found = False
+                break
+
+        if found:
+            providers.append(p)
+
+    if len(providers) == 0:
+        _cicd_provider = ""
+    else:
+        # TODO: reconsider what to do if multiple providers are detected.
+        # The current mechanism as the benefit of being deterministic and
+        # robust to ordering changes in _PROVIDERS.
+        providers.sort()
+        _cicd_provider = providers[0]
+
+    return _cicd_provider
diff --git a/tests/test_config.py b/tests/test_config.py
index 2eac6d2f8..ebc8d683a 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -42,6 +42,11 @@ class MockUname:
         def system(self):
             return 'TestOS'
 
+    # Clear all environment variables and cached CICD provider.
+    for k in os.environ:
+        monkeypatch.delenv(k, raising=False)
+    useragent._cicd_provider = None
+
     monkeypatch.setattr(platform, 'python_version', lambda: '3.0.0')
     monkeypatch.setattr(platform, 'uname', MockUname)
     monkeypatch.setenv('DATABRICKS_SDK_UPSTREAM', "upstream-product")
diff --git a/tests/test_core.py b/tests/test_core.py
index 1cca428cb..32431172b 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -8,7 +8,7 @@
 
 import pytest
 
-from databricks.sdk import WorkspaceClient, errors
+from databricks.sdk import WorkspaceClient, errors, useragent
 from databricks.sdk.core import ApiClient, Config, DatabricksError
 from databricks.sdk.credentials_provider import (CliTokenSource,
                                                  CredentialsProvider,
@@ -178,6 +178,11 @@ class MockUname:
         def system(self):
             return 'TestOS'
 
+    # Clear all environment variables and cached CICD provider.
+    for k in os.environ:
+        monkeypatch.delenv(k, raising=False)
+    useragent._cicd_provider = None
+
     monkeypatch.setattr(platform, 'python_version', lambda: '3.0.0')
     monkeypatch.setattr(platform, 'uname', MockUname)
     monkeypatch.setenv('DATABRICKS_SDK_UPSTREAM', "upstream-product")
diff --git a/tests/test_user_agent.py b/tests/test_user_agent.py
index 5083d9908..ba6f694f5 100644
--- a/tests/test_user_agent.py
+++ b/tests/test_user_agent.py
@@ -1,3 +1,5 @@
+import os
+
 import pytest
 
 from databricks.sdk.version import __version__
@@ -40,3 +42,45 @@ def test_user_agent_with_partner(user_agent):
     user_agent.with_partner('differenttest')
     assert 'partner/test' in user_agent.to_string()
     assert 'partner/differenttest' in user_agent.to_string()
+
+
+@pytest.fixture(scope="function")
+def clear_cicd():
+    # Save and clear env vars.
+    original_env = os.environ.copy()
+    os.environ.clear()
+
+    # Clear cached CICD provider.
+    from databricks.sdk import useragent
+    useragent._cicd_provider = None
+
+    yield
+
+    # Restore env vars.
+    os.environ = original_env
+
+
+def test_user_agent_cicd_no_provider(clear_cicd):
+    from databricks.sdk import useragent
+    user_agent = useragent.to_string()
+
+    assert 'cicd' not in user_agent
+
+
+def test_user_agent_cicd_one_provider(clear_cicd):
+    os.environ['GITHUB_ACTIONS'] = 'true'
+
+    from databricks.sdk import useragent
+    user_agent = useragent.to_string()
+
+    assert 'cicd/github' in user_agent
+
+
+def test_user_agent_cicd_two_provider(clear_cicd):
+    os.environ['GITHUB_ACTIONS'] = 'true'
+    os.environ['GITLAB_CI'] = 'true'
+
+    from databricks.sdk import useragent
+    user_agent = useragent.to_string()
+
+    assert 'cicd/github' in user_agent

From 4bcfb0ad3c817f6e635bb930ff433f56ef5cbd6f Mon Sep 17 00:00:00 2001
From: hectorcast-db 
Date: Fri, 24 Jan 2025 15:46:56 +0100
Subject: [PATCH 088/136] [Fix] Fix docs generation when two services have the
 same name (#872)

## What changes are proposed in this pull request?

Fix docs generation when two services have the same name. Currently,
only one service docs will be generated.

## How is this tested?

Regenerated current Docs
---
 docs/gen-client-docs.py | 21 ++++++++++++++++-----
 1 file changed, 16 insertions(+), 5 deletions(-)

diff --git a/docs/gen-client-docs.py b/docs/gen-client-docs.py
index ac18406b7..6ebfa7bab 100644
--- a/docs/gen-client-docs.py
+++ b/docs/gen-client-docs.py
@@ -267,11 +267,22 @@ def _load_mapping(self) -> dict[str, Tag]:
         pkgs = {p.name: p for p in self.packages}
         spec = json.loads(self._openapi_spec())
         for tag in spec['tags']:
+            is_account=tag.get('x-databricks-is-accounts')
+            # Unique identifier for the tag. Note that the service name may not be unique
+            key = 'a' if is_account else 'w'
+            parent_service = tag.get('x-databricks-parent-service')
+            if parent_service:
+                # SDK generation removes the "account" prefix from account services
+                clean_parent_service = parent_service.lower().removeprefix("account")
+                key = f"{key}.{clean_parent_service}"
+
+            key = f"{key}.{tag['x-databricks-service']}".lower()
+
             t = Tag(name=tag['name'],
                     service=tag['x-databricks-service'],
                     is_account=tag.get('x-databricks-is-accounts', False),
                     package=pkgs[tag['x-databricks-package']])
-            mapping[tag['name']] = t
+            mapping[key] = t
         return mapping
 
     @staticmethod
@@ -360,7 +371,7 @@ def service_docs(self, client_inst, client_prefix: str) -> list[ServiceDoc]:
                            service_name=service_name,
                            class_name=class_name,
                            doc=class_doc,
-                           tag=self._get_tag_name(service_inst.__class__.__name__, service_name),
+                           tag=self._get_tag_name(service_inst.__class__.__name__, client_prefix, service_name),
                            methods=self.class_methods(service_inst),
                            property=self.class_properties(service_inst)))
         return all
@@ -399,13 +410,13 @@ def write_dataclass_docs(self):
    
    {all}''')
 
-    def _get_tag_name(self, class_name, service_name) -> Tag:
+    def _get_tag_name(self, class_name, client_prefix, service_name) -> Tag:
         if class_name[-3:] == 'Ext':
             # ClustersExt, DbfsExt, WorkspaceExt, but not ExternalLocations
             class_name = class_name.replace('Ext', 'API')
         class_name = class_name[:-3]
-        for tag_name, t in self.mapping.items():
-            if t.service.lower() == str(class_name).lower():
+        for key, t in self.mapping.items(): 
+            if key == f'{client_prefix}.{str(class_name).lower()}':
                 return t
         raise KeyError(f'Cannot find {class_name} / {service_name} tag')
 

From 762c57b9bfa14cc30bf5538007f116b676a50172 Mon Sep 17 00:00:00 2001
From: Kirill Safonov <122353021+ksafonov-db@users.noreply.github.com>
Date: Wed, 29 Jan 2025 15:20:47 +0100
Subject: [PATCH 089/136] [Internal] Extract "before retry" handler, use it to
 rewind the stream (#878)

## What changes are proposed in this pull request?

- Introduce a separate handler to be called before we retry the API
call. This will make sure handler is called both when (1) we receive an
error response we want to retry on and (2) when low-level connection
exception is thrown.
- Rewind the stream to the initial position in this handler (if
applicable).

## How is this tested?

Existing tests.

**ALWAYS ANSWER THIS QUESTION:** Answer with "N/A" if tests are not
applicable
to your PR (e.g. if the PR only modifies comments). Do not be afraid of
answering "Not tested" if the PR has not been tested. Being clear about
what
has been done and not done provides important context to the reviewers.
---
 databricks/sdk/_base_client.py | 41 +++++++++++++++++-----------------
 databricks/sdk/retries.py      |  6 ++++-
 2 files changed, 25 insertions(+), 22 deletions(-)

diff --git a/databricks/sdk/_base_client.py b/databricks/sdk/_base_client.py
index e61dd39c3..f0950f656 100644
--- a/databricks/sdk/_base_client.py
+++ b/databricks/sdk/_base_client.py
@@ -159,16 +159,29 @@ def do(self,
         if isinstance(data, (str, bytes)):
             data = io.BytesIO(data.encode('utf-8') if isinstance(data, str) else data)
 
-        # Only retry if the request is not a stream or if the stream is seekable and
-        # we can rewind it. This is necessary to avoid bugs where the retry doesn't
-        # re-read already read data from the body.
-        if data is not None and not self._is_seekable_stream(data):
-            logger.debug(f"Retry disabled for non-seekable stream: type={type(data)}")
-            call = self._perform
-        else:
+        if not data:
+            # The request is not a stream.
             call = retried(timeout=timedelta(seconds=self._retry_timeout_seconds),
                            is_retryable=self._is_retryable,
                            clock=self._clock)(self._perform)
+        elif self._is_seekable_stream(data):
+            # Keep track of the initial position of the stream so that we can rewind to it
+            # if we need to retry the request.
+            initial_data_position = data.tell()
+
+            def rewind():
+                logger.debug(f"Rewinding input data to offset {initial_data_position} before retry")
+                data.seek(initial_data_position)
+
+            call = retried(timeout=timedelta(seconds=self._retry_timeout_seconds),
+                           is_retryable=self._is_retryable,
+                           clock=self._clock,
+                           before_retry=rewind)(self._perform)
+        else:
+            # Do not retry if the stream is not seekable. This is necessary to avoid bugs
+            # where the retry doesn't re-read already read data from the stream.
+            logger.debug(f"Retry disabled for non-seekable stream: type={type(data)}")
+            call = self._perform
 
         response = call(method,
                         url,
@@ -249,12 +262,6 @@ def _perform(self,
                  files=None,
                  data=None,
                  auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None):
-        # Keep track of the initial position of the stream so that we can rewind it if
-        # we need to retry the request.
-        initial_data_position = 0
-        if self._is_seekable_stream(data):
-            initial_data_position = data.tell()
-
         response = self._session.request(method,
                                          url,
                                          params=self._fix_query_string(query),
@@ -266,16 +273,8 @@ def _perform(self,
                                          stream=raw,
                                          timeout=self._http_timeout_seconds)
         self._record_request_log(response, raw=raw or data is not None or files is not None)
-
         error = self._error_parser.get_api_error(response)
         if error is not None:
-            # If the request body is a seekable stream, rewind it so that it is ready
-            # to be read again in case of a retry.
-            #
-            # TODO: This should be moved into a "before-retry" hook to avoid one
-            # unnecessary seek on the last failed retry before aborting.
-            if self._is_seekable_stream(data):
-                data.seek(initial_data_position)
             raise error from None
 
         return response
diff --git a/databricks/sdk/retries.py b/databricks/sdk/retries.py
index b98c54281..4f55087ea 100644
--- a/databricks/sdk/retries.py
+++ b/databricks/sdk/retries.py
@@ -13,7 +13,8 @@ def retried(*,
             on: Sequence[Type[BaseException]] = None,
             is_retryable: Callable[[BaseException], Optional[str]] = None,
             timeout=timedelta(minutes=20),
-            clock: Clock = None):
+            clock: Clock = None,
+            before_retry: Callable = None):
     has_allowlist = on is not None
     has_callback = is_retryable is not None
     if not (has_allowlist or has_callback) or (has_allowlist and has_callback):
@@ -54,6 +55,9 @@ def wrapper(*args, **kwargs):
                         raise err
 
                     logger.debug(f'Retrying: {retry_reason} (sleeping ~{sleep}s)')
+                    if before_retry:
+                        before_retry()
+
                     clock.sleep(sleep + random())
                     attempt += 1
             raise TimeoutError(f'Timed out after {timeout}') from last_err

From 533939668b50f66f02a5cc345aabbc086c153e9e Mon Sep 17 00:00:00 2001
From: Renaud Hartert 
Date: Wed, 29 Jan 2025 18:10:20 +0100
Subject: [PATCH 090/136] [Internal] Update Model Serving `http_request` mixin
 to correctly use the underlying API.  (#876)

## What changes are proposed in this pull request?

This PR updates the Model Serving `http_request` function so that it
properly uses the underlying generated API.

This PR also updates the API of a few unrelated services.

## How is this tested?

Added unit tests from PR #874
---
 .codegen/_openapi_sha                         |   2 +-
 databricks/sdk/__init__.py                    |  13 +-
 databricks/sdk/mixins/open_ai_client.py       |  35 +-
 databricks/sdk/service/billing.py             | 348 ++++++++++++++++++
 databricks/sdk/service/catalog.py             |  77 +---
 databricks/sdk/service/cleanrooms.py          |  72 +++-
 databricks/sdk/service/compute.py             |  36 ++
 databricks/sdk/service/dashboards.py          |   5 +
 databricks/sdk/service/jobs.py                |  86 ++++-
 databricks/sdk/service/oauth2.py              |  46 ++-
 databricks/sdk/service/serving.py             |  74 ++--
 databricks/sdk/service/settings.py            | 206 +++++++++++
 docs/account/billing/budget_policy.rst        |  86 +++++
 docs/account/billing/index.rst                |   1 +
 .../account/oauth2/custom_app_integration.rst |  10 +-
 .../settings/enable_ip_access_lists.rst       |  57 +++
 docs/account/settings/index.rst               |   1 +
 docs/account/settings/settings.rst            |   6 +
 docs/dbdataclasses/billing.rst                |  29 ++
 docs/dbdataclasses/catalog.rst                |  89 +----
 docs/dbdataclasses/cleanrooms.rst             |  15 +
 docs/dbdataclasses/compute.rst                |   4 +
 docs/dbdataclasses/jobs.rst                   |  13 +
 docs/dbdataclasses/serving.rst                |   8 +-
 docs/dbdataclasses/settings.rst               |  15 +
 docs/workspace/catalog/credentials.rst        |  68 ++++
 docs/workspace/catalog/index.rst              |   1 +
 docs/workspace/index.rst                      |   1 -
 docs/workspace/jobs/jobs.rst                  |  13 +-
 docs/workspace/serving/serving_endpoints.rst  |  12 +-
 tests/test_open_ai_mixin.py                   |  21 ++
 31 files changed, 1225 insertions(+), 225 deletions(-)
 create mode 100644 docs/account/billing/budget_policy.rst
 create mode 100644 docs/account/settings/enable_ip_access_lists.rst
 create mode 100644 docs/workspace/catalog/credentials.rst

diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index cabc6cf48..722bd2c6c 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-58905570a9928fc9ed31fba14a2edaf9a7c55b08
\ No newline at end of file
+840c660106f820a1a5dff931d51fa5f65cd9fdd9
\ No newline at end of file
diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py
index c81eb626c..c7f9295a0 100755
--- a/databricks/sdk/__init__.py
+++ b/databricks/sdk/__init__.py
@@ -13,8 +13,9 @@
 from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt
 from databricks.sdk.mixins.workspace import WorkspaceExt
 from databricks.sdk.service.apps import AppsAPI
-from databricks.sdk.service.billing import (BillableUsageAPI, BudgetsAPI,
-                                            LogDeliveryAPI, UsageDashboardsAPI)
+from databricks.sdk.service.billing import (BillableUsageAPI, BudgetPolicyAPI,
+                                            BudgetsAPI, LogDeliveryAPI,
+                                            UsageDashboardsAPI)
 from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI,
                                             AccountMetastoresAPI,
                                             AccountStorageCredentialsAPI,
@@ -80,7 +81,7 @@
     AibiDashboardEmbeddingApprovedDomainsAPI, AutomaticClusterUpdateAPI,
     ComplianceSecurityProfileAPI, CredentialsManagerAPI,
     CspEnablementAccountAPI, DefaultNamespaceAPI, DisableLegacyAccessAPI,
-    DisableLegacyDbfsAPI, DisableLegacyFeaturesAPI,
+    DisableLegacyDbfsAPI, DisableLegacyFeaturesAPI, EnableIpAccessListsAPI,
     EnhancedSecurityMonitoringAPI, EsmEnablementAccountAPI, IpAccessListsAPI,
     NetworkConnectivityAPI, NotificationDestinationsAPI, PersonalComputeAPI,
     RestrictWorkspaceAdminsAPI, SettingsAPI, TokenManagementAPI, TokensAPI,
@@ -845,6 +846,7 @@ def __init__(self,
         self._api_client = client.ApiClient(self._config)
         self._access_control = AccountAccessControlAPI(self._api_client)
         self._billable_usage = BillableUsageAPI(self._api_client)
+        self._budget_policy = BudgetPolicyAPI(self._api_client)
         self._credentials = CredentialsAPI(self._api_client)
         self._custom_app_integration = CustomAppIntegrationAPI(self._api_client)
         self._encryption_keys = EncryptionKeysAPI(self._api_client)
@@ -890,6 +892,11 @@ def billable_usage(self) -> BillableUsageAPI:
         """This API allows you to download billable usage logs for the specified account and date range."""
         return self._billable_usage
 
+    @property
+    def budget_policy(self) -> BudgetPolicyAPI:
+        """A service serves REST API about Budget policies."""
+        return self._budget_policy
+
     @property
     def credentials(self) -> CredentialsAPI:
         """These APIs manage credential configurations for this workspace."""
diff --git a/databricks/sdk/mixins/open_ai_client.py b/databricks/sdk/mixins/open_ai_client.py
index 5f9713117..e5bea9607 100644
--- a/databricks/sdk/mixins/open_ai_client.py
+++ b/databricks/sdk/mixins/open_ai_client.py
@@ -1,8 +1,9 @@
 import json as js
 from typing import Dict, Optional
 
+from requests import Response
+
 from databricks.sdk.service.serving import (ExternalFunctionRequestHttpMethod,
-                                            ExternalFunctionResponse,
                                             ServingEndpointsAPI)
 
 
@@ -63,7 +64,7 @@ def http_request(self,
                      *,
                      headers: Optional[Dict[str, str]] = None,
                      json: Optional[Dict[str, str]] = None,
-                     params: Optional[Dict[str, str]] = None) -> ExternalFunctionResponse:
+                     params: Optional[Dict[str, str]] = None) -> Response:
         """Make external services call using the credentials stored in UC Connection.
         **NOTE:** Experimental: This API may change or be removed in a future release without warning.
         :param conn: str
@@ -79,13 +80,27 @@ def http_request(self,
           JSON payload for the request.
         :param params: Dict[str,str] (optional)
           Query parameters for the request.
-        :returns: :class:`ExternalFunctionResponse`
+        :returns: :class:`Response`
         """
+        response = Response()
+        response.status_code = 200
+        server_response = super().http_request(connection_name=conn,
+                                               method=method,
+                                               path=path,
+                                               headers=js.dumps(headers) if headers is not None else None,
+                                               json=js.dumps(json) if json is not None else None,
+                                               params=js.dumps(params) if params is not None else None)
+
+        # Read the content from the HttpRequestResponse object
+        if hasattr(server_response, "contents") and hasattr(server_response.contents, "read"):
+            raw_content = server_response.contents.read() # Read the bytes
+        else:
+            raise ValueError("Invalid response from the server.")
+
+        # Set the raw content
+        if isinstance(raw_content, bytes):
+            response._content = raw_content
+        else:
+            raise ValueError("Contents must be bytes.")
 
-        return super.http_request(connection_name=conn,
-                                  method=method,
-                                  path=path,
-                                  headers=js.dumps(headers),
-                                  json=js.dumps(json),
-                                  params=js.dumps(params),
-                                  )
+        return response
diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py
index 62f596d0b..d58765f23 100755
--- a/databricks/sdk/service/billing.py
+++ b/databricks/sdk/service/billing.py
@@ -11,6 +11,8 @@
 
 _LOG = logging.getLogger('databricks.sdk')
 
+from databricks.sdk.service import compute
+
 # all definitions in this file are in alphabetical order
 
 
@@ -311,6 +313,44 @@ def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterWorkspaceIdCla
                    values=d.get('values', None))
 
 
+@dataclass
+class BudgetPolicy:
+    """Contains the BudgetPolicy details."""
+
+    policy_id: str
+    """The Id of the policy. This field is generated by Databricks and globally unique."""
+
+    custom_tags: Optional[List[compute.CustomPolicyTag]] = None
+    """A list of tags defined by the customer. At most 20 entries are allowed per policy."""
+
+    policy_name: Optional[str] = None
+    """The name of the policy. - Must be unique among active policies. - Can contain only characters
+    from the ISO 8859-1 (latin1) set."""
+
+    def as_dict(self) -> dict:
+        """Serializes the BudgetPolicy into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags]
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.policy_name is not None: body['policy_name'] = self.policy_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BudgetPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.policy_name is not None: body['policy_name'] = self.policy_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> BudgetPolicy:
+        """Deserializes the BudgetPolicy from a dictionary."""
+        return cls(custom_tags=_repeated_dict(d, 'custom_tags', compute.CustomPolicyTag),
+                   policy_id=d.get('policy_id', None),
+                   policy_name=d.get('policy_name', None))
+
+
 @dataclass
 class CreateBillingUsageDashboardRequest:
     dashboard_type: Optional[UsageDashboardType] = None
@@ -536,6 +576,45 @@ def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationResponse:
         return cls(budget=_from_dict(d, 'budget', BudgetConfiguration))
 
 
+@dataclass
+class CreateBudgetPolicyRequest:
+    """A request to create a BudgetPolicy."""
+
+    custom_tags: Optional[List[compute.CustomPolicyTag]] = None
+    """A list of tags defined by the customer. At most 40 entries are allowed per policy."""
+
+    policy_name: Optional[str] = None
+    """The name of the policy. - Must be unique among active policies. - Can contain only characters of
+    0-9, a-z, A-Z, -, =, ., :, /, @, _, +, whitespace."""
+
+    request_id: Optional[str] = None
+    """A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is
+    recommended. This request is only idempotent if a `request_id` is provided."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateBudgetPolicyRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags]
+        if self.policy_name is not None: body['policy_name'] = self.policy_name
+        if self.request_id is not None: body['request_id'] = self.request_id
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateBudgetPolicyRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.policy_name is not None: body['policy_name'] = self.policy_name
+        if self.request_id is not None: body['request_id'] = self.request_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateBudgetPolicyRequest:
+        """Deserializes the CreateBudgetPolicyRequest from a dictionary."""
+        return cls(custom_tags=_repeated_dict(d, 'custom_tags', compute.CustomPolicyTag),
+                   policy_name=d.get('policy_name', None),
+                   request_id=d.get('request_id', None))
+
+
 @dataclass
 class CreateLogDeliveryConfigurationParams:
     log_type: LogType
@@ -670,6 +749,25 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteBudgetConfigurationResponse:
         return cls()
 
 
+@dataclass
+class DeleteResponse:
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
+        """Deserializes the DeleteResponse from a dictionary."""
+        return cls()
+
+
 class DeliveryStatus(Enum):
     """The status string for log delivery. Possible values are: * `CREATED`: There were no log delivery
     attempts since the config was created. * `SUCCEEDED`: The latest attempt of log delivery has
@@ -708,6 +806,44 @@ def from_dict(cls, d: Dict[str, any]) -> DownloadResponse:
         return cls(contents=d.get('contents', None))
 
 
+@dataclass
+class Filter:
+    """Structured representation of a filter to be applied to a list of policies. All specified filters
+    will be applied in conjunction."""
+
+    creator_user_id: Optional[int] = None
+    """The policy creator user id to be filtered on. If unspecified, all policies will be returned."""
+
+    creator_user_name: Optional[str] = None
+    """The policy creator user name to be filtered on. If unspecified, all policies will be returned."""
+
+    policy_name: Optional[str] = None
+    """The partial name of policies to be filtered on. If unspecified, all policies will be returned."""
+
+    def as_dict(self) -> dict:
+        """Serializes the Filter into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.creator_user_id is not None: body['creator_user_id'] = self.creator_user_id
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.policy_name is not None: body['policy_name'] = self.policy_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Filter into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creator_user_id is not None: body['creator_user_id'] = self.creator_user_id
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.policy_name is not None: body['policy_name'] = self.policy_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> Filter:
+        """Deserializes the Filter from a dictionary."""
+        return cls(creator_user_id=d.get('creator_user_id', None),
+                   creator_user_name=d.get('creator_user_name', None),
+                   policy_name=d.get('policy_name', None))
+
+
 @dataclass
 class GetBillingUsageDashboardResponse:
     dashboard_id: Optional[str] = None
@@ -787,6 +923,44 @@ def from_dict(cls, d: Dict[str, any]) -> ListBudgetConfigurationsResponse:
                    next_page_token=d.get('next_page_token', None))
 
 
+@dataclass
+class ListBudgetPoliciesResponse:
+    """A list of policies."""
+
+    next_page_token: Optional[str] = None
+    """A token that can be sent as `page_token` to retrieve the next page. If this field is omitted,
+    there are no subsequent pages."""
+
+    policies: Optional[List[BudgetPolicy]] = None
+
+    previous_page_token: Optional[str] = None
+    """A token that can be sent as `page_token` to retrieve the previous page. In this field is
+    omitted, there are no previous pages."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListBudgetPoliciesResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.policies: body['policies'] = [v.as_dict() for v in self.policies]
+        if self.previous_page_token is not None: body['previous_page_token'] = self.previous_page_token
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListBudgetPoliciesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.policies: body['policies'] = self.policies
+        if self.previous_page_token is not None: body['previous_page_token'] = self.previous_page_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListBudgetPoliciesResponse:
+        """Deserializes the ListBudgetPoliciesResponse from a dictionary."""
+        return cls(next_page_token=d.get('next_page_token', None),
+                   policies=_repeated_dict(d, 'policies', BudgetPolicy),
+                   previous_page_token=d.get('previous_page_token', None))
+
+
 class LogDeliveryConfigStatus(Enum):
     """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled).
     Defaults to `ENABLED`. You can [enable or disable the
@@ -1046,6 +1220,39 @@ def from_dict(cls, d: Dict[str, any]) -> PatchStatusResponse:
         return cls()
 
 
+@dataclass
+class SortSpec:
+    descending: Optional[bool] = None
+    """Whether to sort in descending order."""
+
+    field: Optional[SortSpecField] = None
+    """The filed to sort by"""
+
+    def as_dict(self) -> dict:
+        """Serializes the SortSpec into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.descending is not None: body['descending'] = self.descending
+        if self.field is not None: body['field'] = self.field.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SortSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.descending is not None: body['descending'] = self.descending
+        if self.field is not None: body['field'] = self.field
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> SortSpec:
+        """Deserializes the SortSpec from a dictionary."""
+        return cls(descending=d.get('descending', None), field=_enum(d, 'field', SortSpecField))
+
+
+class SortSpecField(Enum):
+
+    POLICY_NAME = 'POLICY_NAME'
+
+
 @dataclass
 class UpdateBudgetConfigurationBudget:
     account_id: Optional[str] = None
@@ -1315,6 +1522,147 @@ def download(self,
         return DownloadResponse.from_dict(res)
 
 
+class BudgetPolicyAPI:
+    """A service serves REST API about Budget policies"""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def create(self,
+               *,
+               custom_tags: Optional[List[compute.CustomPolicyTag]] = None,
+               policy_name: Optional[str] = None,
+               request_id: Optional[str] = None) -> BudgetPolicy:
+        """Create a budget policy.
+        
+        Creates a new policy.
+        
+        :param custom_tags: List[:class:`CustomPolicyTag`] (optional)
+          A list of tags defined by the customer. At most 40 entries are allowed per policy.
+        :param policy_name: str (optional)
+          The name of the policy. - Must be unique among active policies. - Can contain only characters of
+          0-9, a-z, A-Z, -, =, ., :, /, @, _, +, whitespace.
+        :param request_id: str (optional)
+          A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is
+          recommended. This request is only idempotent if a `request_id` is provided.
+        
+        :returns: :class:`BudgetPolicy`
+        """
+        body = {}
+        if custom_tags is not None: body['custom_tags'] = [v.as_dict() for v in custom_tags]
+        if policy_name is not None: body['policy_name'] = policy_name
+        if request_id is not None: body['request_id'] = request_id
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST',
+                           f'/api/2.1/accounts/{self._api.account_id}/budget-policies',
+                           body=body,
+                           headers=headers)
+        return BudgetPolicy.from_dict(res)
+
+    def delete(self, policy_id: str):
+        """Delete a budget policy.
+        
+        Deletes a policy
+        
+        :param policy_id: str
+          The Id of the policy.
+        
+        
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        self._api.do('DELETE',
+                     f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}',
+                     headers=headers)
+
+    def get(self, policy_id: str) -> BudgetPolicy:
+        """Get a budget policy.
+        
+        Retrieves a policy by it's ID.
+        
+        :param policy_id: str
+          The Id of the policy.
+        
+        :returns: :class:`BudgetPolicy`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET',
+                           f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}',
+                           headers=headers)
+        return BudgetPolicy.from_dict(res)
+
+    def list(self,
+             *,
+             filter_by: Optional[Filter] = None,
+             page_size: Optional[int] = None,
+             page_token: Optional[str] = None,
+             sort_spec: Optional[SortSpec] = None) -> Iterator[BudgetPolicy]:
+        """List policies.
+        
+        Lists all policies. Policies are returned in the alphabetically ascending order of their names.
+        
+        :param filter_by: :class:`Filter` (optional)
+          A filter to apply to the list of policies.
+        :param page_size: int (optional)
+          The maximum number of budget policies to return. If unspecified, at most 100 budget policies will be
+          returned. The maximum value is 1000; values above 1000 will be coerced to 1000.
+        :param page_token: str (optional)
+          A page token, received from a previous `ListServerlessPolicies` call. Provide this to retrieve the
+          subsequent page. If unspecified, the first page will be returned.
+          
+          When paginating, all other parameters provided to `ListServerlessPoliciesRequest` must match the
+          call that provided the page token.
+        :param sort_spec: :class:`SortSpec` (optional)
+          The sort specification.
+        
+        :returns: Iterator over :class:`BudgetPolicy`
+        """
+
+        query = {}
+        if filter_by is not None: query['filter_by'] = filter_by.as_dict()
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
+        if sort_spec is not None: query['sort_spec'] = sort_spec.as_dict()
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET',
+                                f'/api/2.1/accounts/{self._api.account_id}/budget-policies',
+                                query=query,
+                                headers=headers)
+            if 'policies' in json:
+                for v in json['policies']:
+                    yield BudgetPolicy.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+    def update(self, policy_id: str, *, policy: Optional[BudgetPolicy] = None) -> BudgetPolicy:
+        """Update a budget policy.
+        
+        Updates a policy
+        
+        :param policy_id: str
+          The Id of the policy. This field is generated by Databricks and globally unique.
+        :param policy: :class:`BudgetPolicy` (optional)
+          Contains the BudgetPolicy details.
+        
+        :returns: :class:`BudgetPolicy`
+        """
+        body = policy.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH',
+                           f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}',
+                           body=body,
+                           headers=headers)
+        return BudgetPolicy.from_dict(res)
+
+
 class BudgetsAPI:
     """These APIs manage budget configurations for this account. Budgets enable you to monitor usage across your
     account. You can set up budgets to either track account-wide spending, or apply filters to track the
diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py
index c56acce32..63efcd627 100755
--- a/databricks/sdk/service/catalog.py
+++ b/databricks/sdk/service/catalog.py
@@ -787,9 +787,6 @@ class CatalogInfo:
     provisioning_info: Optional[ProvisioningInfo] = None
     """Status of an asynchronously provisioned resource."""
 
-    securable_kind: Optional[CatalogInfoSecurableKind] = None
-    """Kind of catalog securable."""
-
     securable_type: Optional[str] = None
 
     share_name: Optional[str] = None
@@ -831,7 +828,6 @@ def as_dict(self) -> dict:
         if self.properties: body['properties'] = self.properties
         if self.provider_name is not None: body['provider_name'] = self.provider_name
         if self.provisioning_info: body['provisioning_info'] = self.provisioning_info.as_dict()
-        if self.securable_kind is not None: body['securable_kind'] = self.securable_kind.value
         if self.securable_type is not None: body['securable_type'] = self.securable_type
         if self.share_name is not None: body['share_name'] = self.share_name
         if self.storage_location is not None: body['storage_location'] = self.storage_location
@@ -862,7 +858,6 @@ def as_shallow_dict(self) -> dict:
         if self.properties: body['properties'] = self.properties
         if self.provider_name is not None: body['provider_name'] = self.provider_name
         if self.provisioning_info: body['provisioning_info'] = self.provisioning_info
-        if self.securable_kind is not None: body['securable_kind'] = self.securable_kind
         if self.securable_type is not None: body['securable_type'] = self.securable_type
         if self.share_name is not None: body['share_name'] = self.share_name
         if self.storage_location is not None: body['storage_location'] = self.storage_location
@@ -893,7 +888,6 @@ def from_dict(cls, d: Dict[str, any]) -> CatalogInfo:
                    properties=d.get('properties', None),
                    provider_name=d.get('provider_name', None),
                    provisioning_info=_from_dict(d, 'provisioning_info', ProvisioningInfo),
-                   securable_kind=_enum(d, 'securable_kind', CatalogInfoSecurableKind),
                    securable_type=d.get('securable_type', None),
                    share_name=d.get('share_name', None),
                    storage_location=d.get('storage_location', None),
@@ -902,24 +896,6 @@ def from_dict(cls, d: Dict[str, any]) -> CatalogInfo:
                    updated_by=d.get('updated_by', None))
 
 
-class CatalogInfoSecurableKind(Enum):
-    """Kind of catalog securable."""
-
-    CATALOG_DELTASHARING = 'CATALOG_DELTASHARING'
-    CATALOG_FOREIGN_BIGQUERY = 'CATALOG_FOREIGN_BIGQUERY'
-    CATALOG_FOREIGN_DATABRICKS = 'CATALOG_FOREIGN_DATABRICKS'
-    CATALOG_FOREIGN_MYSQL = 'CATALOG_FOREIGN_MYSQL'
-    CATALOG_FOREIGN_POSTGRESQL = 'CATALOG_FOREIGN_POSTGRESQL'
-    CATALOG_FOREIGN_REDSHIFT = 'CATALOG_FOREIGN_REDSHIFT'
-    CATALOG_FOREIGN_SNOWFLAKE = 'CATALOG_FOREIGN_SNOWFLAKE'
-    CATALOG_FOREIGN_SQLDW = 'CATALOG_FOREIGN_SQLDW'
-    CATALOG_FOREIGN_SQLSERVER = 'CATALOG_FOREIGN_SQLSERVER'
-    CATALOG_INTERNAL = 'CATALOG_INTERNAL'
-    CATALOG_STANDARD = 'CATALOG_STANDARD'
-    CATALOG_SYSTEM = 'CATALOG_SYSTEM'
-    CATALOG_SYSTEM_DELTASHARING = 'CATALOG_SYSTEM_DELTASHARING'
-
-
 class CatalogIsolationMode(Enum):
     """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
 
@@ -1158,9 +1134,6 @@ class ConnectionInfo:
     read_only: Optional[bool] = None
     """If the connection is read only."""
 
-    securable_kind: Optional[ConnectionInfoSecurableKind] = None
-    """Kind of connection securable."""
-
     securable_type: Optional[str] = None
 
     updated_at: Optional[int] = None
@@ -1189,7 +1162,6 @@ def as_dict(self) -> dict:
         if self.properties: body['properties'] = self.properties
         if self.provisioning_info: body['provisioning_info'] = self.provisioning_info.as_dict()
         if self.read_only is not None: body['read_only'] = self.read_only
-        if self.securable_kind is not None: body['securable_kind'] = self.securable_kind.value
         if self.securable_type is not None: body['securable_type'] = self.securable_type
         if self.updated_at is not None: body['updated_at'] = self.updated_at
         if self.updated_by is not None: body['updated_by'] = self.updated_by
@@ -1213,7 +1185,6 @@ def as_shallow_dict(self) -> dict:
         if self.properties: body['properties'] = self.properties
         if self.provisioning_info: body['provisioning_info'] = self.provisioning_info
         if self.read_only is not None: body['read_only'] = self.read_only
-        if self.securable_kind is not None: body['securable_kind'] = self.securable_kind
         if self.securable_type is not None: body['securable_type'] = self.securable_type
         if self.updated_at is not None: body['updated_at'] = self.updated_at
         if self.updated_by is not None: body['updated_by'] = self.updated_by
@@ -1237,31 +1208,12 @@ def from_dict(cls, d: Dict[str, any]) -> ConnectionInfo:
                    properties=d.get('properties', None),
                    provisioning_info=_from_dict(d, 'provisioning_info', ProvisioningInfo),
                    read_only=d.get('read_only', None),
-                   securable_kind=_enum(d, 'securable_kind', ConnectionInfoSecurableKind),
                    securable_type=d.get('securable_type', None),
                    updated_at=d.get('updated_at', None),
                    updated_by=d.get('updated_by', None),
                    url=d.get('url', None))
 
 
-class ConnectionInfoSecurableKind(Enum):
-    """Kind of connection securable."""
-
-    CONNECTION_BIGQUERY = 'CONNECTION_BIGQUERY'
-    CONNECTION_BUILTIN_HIVE_METASTORE = 'CONNECTION_BUILTIN_HIVE_METASTORE'
-    CONNECTION_DATABRICKS = 'CONNECTION_DATABRICKS'
-    CONNECTION_EXTERNAL_HIVE_METASTORE = 'CONNECTION_EXTERNAL_HIVE_METASTORE'
-    CONNECTION_GLUE = 'CONNECTION_GLUE'
-    CONNECTION_HTTP_BEARER = 'CONNECTION_HTTP_BEARER'
-    CONNECTION_MYSQL = 'CONNECTION_MYSQL'
-    CONNECTION_ONLINE_CATALOG = 'CONNECTION_ONLINE_CATALOG'
-    CONNECTION_POSTGRESQL = 'CONNECTION_POSTGRESQL'
-    CONNECTION_REDSHIFT = 'CONNECTION_REDSHIFT'
-    CONNECTION_SNOWFLAKE = 'CONNECTION_SNOWFLAKE'
-    CONNECTION_SQLDW = 'CONNECTION_SQLDW'
-    CONNECTION_SQLSERVER = 'CONNECTION_SQLSERVER'
-
-
 class ConnectionType(Enum):
     """The type of connection."""
 
@@ -6269,20 +6221,21 @@ def from_dict(cls, d: Dict[str, any]) -> SchemaInfo:
 class SecurableType(Enum):
     """The type of Unity Catalog securable"""
 
-    CATALOG = 'catalog'
-    CONNECTION = 'connection'
-    CREDENTIAL = 'credential'
-    EXTERNAL_LOCATION = 'external_location'
-    FUNCTION = 'function'
-    METASTORE = 'metastore'
-    PIPELINE = 'pipeline'
-    PROVIDER = 'provider'
-    RECIPIENT = 'recipient'
-    SCHEMA = 'schema'
-    SHARE = 'share'
-    STORAGE_CREDENTIAL = 'storage_credential'
-    TABLE = 'table'
-    VOLUME = 'volume'
+    CATALOG = 'CATALOG'
+    CLEAN_ROOM = 'CLEAN_ROOM'
+    CONNECTION = 'CONNECTION'
+    CREDENTIAL = 'CREDENTIAL'
+    EXTERNAL_LOCATION = 'EXTERNAL_LOCATION'
+    FUNCTION = 'FUNCTION'
+    METASTORE = 'METASTORE'
+    PIPELINE = 'PIPELINE'
+    PROVIDER = 'PROVIDER'
+    RECIPIENT = 'RECIPIENT'
+    SCHEMA = 'SCHEMA'
+    SHARE = 'SHARE'
+    STORAGE_CREDENTIAL = 'STORAGE_CREDENTIAL'
+    TABLE = 'TABLE'
+    VOLUME = 'VOLUME'
 
 
 @dataclass
diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py
index 20d57527e..813ca5f37 100755
--- a/databricks/sdk/service/cleanrooms.py
+++ b/databricks/sdk/service/cleanrooms.py
@@ -289,11 +289,24 @@ class CleanRoomAssetNotebook:
     """Base 64 representation of the notebook contents. This is the same format as returned by
     :method:workspace/export with the format of **HTML**."""
 
+    review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None
+    """top-level status derived from all reviews"""
+
+    reviews: Optional[List[CleanRoomNotebookReview]] = None
+    """All existing approvals or rejections"""
+
+    runner_collaborators: Optional[List[CleanRoomCollaborator]] = None
+    """collaborators that can run the notebook"""
+
     def as_dict(self) -> dict:
         """Serializes the CleanRoomAssetNotebook into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.etag is not None: body['etag'] = self.etag
         if self.notebook_content is not None: body['notebook_content'] = self.notebook_content
+        if self.review_state is not None: body['review_state'] = self.review_state.value
+        if self.reviews: body['reviews'] = [v.as_dict() for v in self.reviews]
+        if self.runner_collaborators:
+            body['runner_collaborators'] = [v.as_dict() for v in self.runner_collaborators]
         return body
 
     def as_shallow_dict(self) -> dict:
@@ -301,12 +314,19 @@ def as_shallow_dict(self) -> dict:
         body = {}
         if self.etag is not None: body['etag'] = self.etag
         if self.notebook_content is not None: body['notebook_content'] = self.notebook_content
+        if self.review_state is not None: body['review_state'] = self.review_state
+        if self.reviews: body['reviews'] = self.reviews
+        if self.runner_collaborators: body['runner_collaborators'] = self.runner_collaborators
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetNotebook:
         """Deserializes the CleanRoomAssetNotebook from a dictionary."""
-        return cls(etag=d.get('etag', None), notebook_content=d.get('notebook_content', None))
+        return cls(etag=d.get('etag', None),
+                   notebook_content=d.get('notebook_content', None),
+                   review_state=_enum(d, 'review_state', CleanRoomNotebookReviewNotebookReviewState),
+                   reviews=_repeated_dict(d, 'reviews', CleanRoomNotebookReview),
+                   runner_collaborators=_repeated_dict(d, 'runner_collaborators', CleanRoomCollaborator))
 
 
 class CleanRoomAssetStatusEnum(Enum):
@@ -511,6 +531,56 @@ def from_dict(cls, d: Dict[str, any]) -> CleanRoomCollaborator:
                    organization_name=d.get('organization_name', None))
 
 
+@dataclass
+class CleanRoomNotebookReview:
+    comment: Optional[str] = None
+    """review comment"""
+
+    created_at_millis: Optional[int] = None
+    """timestamp of when the review was submitted"""
+
+    review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None
+    """review outcome"""
+
+    reviewer_collaborator_alias: Optional[str] = None
+    """collaborator alias of the reviewer"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomNotebookReview into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at_millis is not None: body['created_at_millis'] = self.created_at_millis
+        if self.review_state is not None: body['review_state'] = self.review_state.value
+        if self.reviewer_collaborator_alias is not None:
+            body['reviewer_collaborator_alias'] = self.reviewer_collaborator_alias
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomNotebookReview into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at_millis is not None: body['created_at_millis'] = self.created_at_millis
+        if self.review_state is not None: body['review_state'] = self.review_state
+        if self.reviewer_collaborator_alias is not None:
+            body['reviewer_collaborator_alias'] = self.reviewer_collaborator_alias
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomNotebookReview:
+        """Deserializes the CleanRoomNotebookReview from a dictionary."""
+        return cls(comment=d.get('comment', None),
+                   created_at_millis=d.get('created_at_millis', None),
+                   review_state=_enum(d, 'review_state', CleanRoomNotebookReviewNotebookReviewState),
+                   reviewer_collaborator_alias=d.get('reviewer_collaborator_alias', None))
+
+
+class CleanRoomNotebookReviewNotebookReviewState(Enum):
+
+    APPROVED = 'APPROVED'
+    PENDING = 'PENDING'
+    REJECTED = 'REJECTED'
+
+
 @dataclass
 class CleanRoomNotebookTaskRun:
     """Stores information about a single task run."""
diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py
index 53240b4ad..8a48b0cc0 100755
--- a/databricks/sdk/service/compute.py
+++ b/databricks/sdk/service/compute.py
@@ -2937,6 +2937,42 @@ def from_dict(cls, d: Dict[str, any]) -> Created:
         return cls(id=d.get('id', None))
 
 
+@dataclass
+class CustomPolicyTag:
+    key: str
+    """The key of the tag. - Must be unique among all custom tags of the same policy - Cannot be
+    “budget-policy-name”, “budget-policy-id” or "budget-policy-resolution-result" - these
+    tags are preserved.
+    
+    - Follows the regex pattern defined in cluster-common/conf/src/ClusterTagConstraints.scala
+    (https://src.dev.databricks.com/databricks/universe@1647196627c8dc7b4152ad098a94b86484b93a6c/-/blob/cluster-common/conf/src/ClusterTagConstraints.scala?L17)"""
+
+    value: Optional[str] = None
+    """The value of the tag.
+    
+    - Follows the regex pattern defined in cluster-common/conf/src/ClusterTagConstraints.scala
+    (https://src.dev.databricks.com/databricks/universe@1647196627c8dc7b4152ad098a94b86484b93a6c/-/blob/cluster-common/conf/src/ClusterTagConstraints.scala?L24)"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CustomPolicyTag into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CustomPolicyTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CustomPolicyTag:
+        """Deserializes the CustomPolicyTag from a dictionary."""
+        return cls(key=d.get('key', None), value=d.get('value', None))
+
+
 @dataclass
 class DataPlaneEventDetails:
     event_type: Optional[DataPlaneEventDetailsEventType] = None
diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py
index 221727230..fab54e84b 100755
--- a/databricks/sdk/service/dashboards.py
+++ b/databricks/sdk/service/dashboards.py
@@ -846,6 +846,8 @@ class QueryAttachment:
     query: Optional[str] = None
     """AI generated SQL query"""
 
+    statement_id: Optional[str] = None
+
     title: Optional[str] = None
     """Name of the query"""
 
@@ -860,6 +862,7 @@ def as_dict(self) -> dict:
         if self.last_updated_timestamp is not None:
             body['last_updated_timestamp'] = self.last_updated_timestamp
         if self.query is not None: body['query'] = self.query
+        if self.statement_id is not None: body['statement_id'] = self.statement_id
         if self.title is not None: body['title'] = self.title
         return body
 
@@ -874,6 +877,7 @@ def as_shallow_dict(self) -> dict:
         if self.last_updated_timestamp is not None:
             body['last_updated_timestamp'] = self.last_updated_timestamp
         if self.query is not None: body['query'] = self.query
+        if self.statement_id is not None: body['statement_id'] = self.statement_id
         if self.title is not None: body['title'] = self.title
         return body
 
@@ -887,6 +891,7 @@ def from_dict(cls, d: Dict[str, any]) -> QueryAttachment:
                    instruction_title=d.get('instruction_title', None),
                    last_updated_timestamp=d.get('last_updated_timestamp', None),
                    query=d.get('query', None),
+                   statement_id=d.get('statement_id', None),
                    title=d.get('title', None))
 
 
diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index dd4bc8075..44445d020 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -111,6 +111,12 @@ class BaseRun:
     description: Optional[str] = None
     """Description of the run"""
 
+    effective_performance_target: Optional[PerformanceTarget] = None
+    """effective_performance_target is the actual performance target used by the run during execution.
+    effective_performance_target can differ from performance_target depending on if the job was
+    eligible to be cost-optimized (e.g. contains at least 1 serverless task) or if we specifically
+    override the value for the run (ex. RunNow)."""
+
     end_time: Optional[int] = None
     """The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This
     field is set to 0 if the job is still running."""
@@ -240,6 +246,8 @@ def as_dict(self) -> dict:
         if self.cluster_spec: body['cluster_spec'] = self.cluster_spec.as_dict()
         if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
         if self.description is not None: body['description'] = self.description
+        if self.effective_performance_target is not None:
+            body['effective_performance_target'] = self.effective_performance_target.value
         if self.end_time is not None: body['end_time'] = self.end_time
         if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
         if self.git_source: body['git_source'] = self.git_source.as_dict()
@@ -278,6 +286,8 @@ def as_shallow_dict(self) -> dict:
         if self.cluster_spec: body['cluster_spec'] = self.cluster_spec
         if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
         if self.description is not None: body['description'] = self.description
+        if self.effective_performance_target is not None:
+            body['effective_performance_target'] = self.effective_performance_target
         if self.end_time is not None: body['end_time'] = self.end_time
         if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
         if self.git_source: body['git_source'] = self.git_source
@@ -316,6 +326,7 @@ def from_dict(cls, d: Dict[str, any]) -> BaseRun:
                    cluster_spec=_from_dict(d, 'cluster_spec', ClusterSpec),
                    creator_user_name=d.get('creator_user_name', None),
                    description=d.get('description', None),
+                   effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget),
                    end_time=d.get('end_time', None),
                    execution_duration=d.get('execution_duration', None),
                    git_source=_from_dict(d, 'git_source', GitSource),
@@ -834,6 +845,10 @@ class CreateJob:
     parameters: Optional[List[JobParameterDefinition]] = None
     """Job-level parameter definitions"""
 
+    performance_target: Optional[PerformanceTarget] = None
+    """PerformanceTarget defines how performant or cost efficient the execution of run on serverless
+    should be."""
+
     queue: Optional[QueueSettings] = None
     """The queue settings of the job."""
 
@@ -888,6 +903,7 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict()
         if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
+        if self.performance_target is not None: body['performance_target'] = self.performance_target.value
         if self.queue: body['queue'] = self.queue.as_dict()
         if self.run_as: body['run_as'] = self.run_as.as_dict()
         if self.schedule: body['schedule'] = self.schedule.as_dict()
@@ -917,6 +933,7 @@ def as_shallow_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         if self.notification_settings: body['notification_settings'] = self.notification_settings
         if self.parameters: body['parameters'] = self.parameters
+        if self.performance_target is not None: body['performance_target'] = self.performance_target
         if self.queue: body['queue'] = self.queue
         if self.run_as: body['run_as'] = self.run_as
         if self.schedule: body['schedule'] = self.schedule
@@ -946,6 +963,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateJob:
                    name=d.get('name', None),
                    notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings),
                    parameters=_repeated_dict(d, 'parameters', JobParameterDefinition),
+                   performance_target=_enum(d, 'performance_target', PerformanceTarget),
                    queue=_from_dict(d, 'queue', QueueSettings),
                    run_as=_from_dict(d, 'run_as', JobRunAs),
                    schedule=_from_dict(d, 'schedule', CronSchedule),
@@ -2463,6 +2481,10 @@ class JobSettings:
     parameters: Optional[List[JobParameterDefinition]] = None
     """Job-level parameter definitions"""
 
+    performance_target: Optional[PerformanceTarget] = None
+    """PerformanceTarget defines how performant or cost efficient the execution of run on serverless
+    should be."""
+
     queue: Optional[QueueSettings] = None
     """The queue settings of the job."""
 
@@ -2515,6 +2537,7 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict()
         if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
+        if self.performance_target is not None: body['performance_target'] = self.performance_target.value
         if self.queue: body['queue'] = self.queue.as_dict()
         if self.run_as: body['run_as'] = self.run_as.as_dict()
         if self.schedule: body['schedule'] = self.schedule.as_dict()
@@ -2543,6 +2566,7 @@ def as_shallow_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         if self.notification_settings: body['notification_settings'] = self.notification_settings
         if self.parameters: body['parameters'] = self.parameters
+        if self.performance_target is not None: body['performance_target'] = self.performance_target
         if self.queue: body['queue'] = self.queue
         if self.run_as: body['run_as'] = self.run_as
         if self.schedule: body['schedule'] = self.schedule
@@ -2571,6 +2595,7 @@ def from_dict(cls, d: Dict[str, any]) -> JobSettings:
                    name=d.get('name', None),
                    notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings),
                    parameters=_repeated_dict(d, 'parameters', JobParameterDefinition),
+                   performance_target=_enum(d, 'performance_target', PerformanceTarget),
                    queue=_from_dict(d, 'queue', QueueSettings),
                    run_as=_from_dict(d, 'run_as', JobRunAs),
                    schedule=_from_dict(d, 'schedule', CronSchedule),
@@ -2994,6 +3019,15 @@ class PauseStatus(Enum):
     UNPAUSED = 'UNPAUSED'
 
 
+class PerformanceTarget(Enum):
+    """PerformanceTarget defines how performant (lower latency) or cost efficient the execution of run
+    on serverless compute should be. The performance mode on the job or pipeline should map to a
+    performance setting that is passed to Cluster Manager (see cluster-common PerformanceTarget)."""
+
+    COST_OPTIMIZED = 'COST_OPTIMIZED'
+    PERFORMANCE_OPTIMIZED = 'PERFORMANCE_OPTIMIZED'
+
+
 @dataclass
 class PeriodicTriggerConfiguration:
     interval: int
@@ -3755,6 +3789,12 @@ class Run:
     description: Optional[str] = None
     """Description of the run"""
 
+    effective_performance_target: Optional[PerformanceTarget] = None
+    """effective_performance_target is the actual performance target used by the run during execution.
+    effective_performance_target can differ from performance_target depending on if the job was
+    eligible to be cost-optimized (e.g. contains at least 1 serverless task) or if we specifically
+    override the value for the run (ex. RunNow)."""
+
     end_time: Optional[int] = None
     """The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This
     field is set to 0 if the job is still running."""
@@ -3890,6 +3930,8 @@ def as_dict(self) -> dict:
         if self.cluster_spec: body['cluster_spec'] = self.cluster_spec.as_dict()
         if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
         if self.description is not None: body['description'] = self.description
+        if self.effective_performance_target is not None:
+            body['effective_performance_target'] = self.effective_performance_target.value
         if self.end_time is not None: body['end_time'] = self.end_time
         if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
         if self.git_source: body['git_source'] = self.git_source.as_dict()
@@ -3930,6 +3972,8 @@ def as_shallow_dict(self) -> dict:
         if self.cluster_spec: body['cluster_spec'] = self.cluster_spec
         if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
         if self.description is not None: body['description'] = self.description
+        if self.effective_performance_target is not None:
+            body['effective_performance_target'] = self.effective_performance_target
         if self.end_time is not None: body['end_time'] = self.end_time
         if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
         if self.git_source: body['git_source'] = self.git_source
@@ -3970,6 +4014,7 @@ def from_dict(cls, d: Dict[str, any]) -> Run:
                    cluster_spec=_from_dict(d, 'cluster_spec', ClusterSpec),
                    creator_user_name=d.get('creator_user_name', None),
                    description=d.get('description', None),
+                   effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget),
                    end_time=d.get('end_time', None),
                    execution_duration=d.get('execution_duration', None),
                    git_source=_from_dict(d, 'git_source', GitSource),
@@ -4356,6 +4401,11 @@ class RunNow:
     """A list of task keys to run inside of the job. If this field is not provided, all tasks in the
     job will be run."""
 
+    performance_target: Optional[PerformanceTarget] = None
+    """PerformanceTarget defines how performant or cost efficient the execution of run on serverless
+    compute should be. For RunNow request, the run will execute with this settings instead of ones
+    defined in job."""
+
     pipeline_params: Optional[PipelineParams] = None
     """Controls whether the pipeline should perform a full refresh"""
 
@@ -4411,6 +4461,7 @@ def as_dict(self) -> dict:
         if self.job_parameters: body['job_parameters'] = self.job_parameters
         if self.notebook_params: body['notebook_params'] = self.notebook_params
         if self.only: body['only'] = [v for v in self.only]
+        if self.performance_target is not None: body['performance_target'] = self.performance_target.value
         if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict()
         if self.python_named_params: body['python_named_params'] = self.python_named_params
         if self.python_params: body['python_params'] = [v for v in self.python_params]
@@ -4429,6 +4480,7 @@ def as_shallow_dict(self) -> dict:
         if self.job_parameters: body['job_parameters'] = self.job_parameters
         if self.notebook_params: body['notebook_params'] = self.notebook_params
         if self.only: body['only'] = self.only
+        if self.performance_target is not None: body['performance_target'] = self.performance_target
         if self.pipeline_params: body['pipeline_params'] = self.pipeline_params
         if self.python_named_params: body['python_named_params'] = self.python_named_params
         if self.python_params: body['python_params'] = self.python_params
@@ -4447,6 +4499,7 @@ def from_dict(cls, d: Dict[str, any]) -> RunNow:
                    job_parameters=d.get('job_parameters', None),
                    notebook_params=d.get('notebook_params', None),
                    only=d.get('only', None),
+                   performance_target=_enum(d, 'performance_target', PerformanceTarget),
                    pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams),
                    python_named_params=d.get('python_named_params', None),
                    python_params=d.get('python_params', None),
@@ -4862,6 +4915,12 @@ class RunTask:
     description: Optional[str] = None
     """An optional description for this task."""
 
+    effective_performance_target: Optional[PerformanceTarget] = None
+    """effective_performance_target is the actual performance target used by the run during execution.
+    effective_performance_target can differ from performance_target depending on if the job was
+    eligible to be cost-optimized (e.g. contains at least 1 serverless task) or if an override was
+    provided for the run (ex. RunNow)."""
+
     email_notifications: Optional[JobEmailNotifications] = None
     """An optional set of email addresses notified when the task run begins or completes. The default
     behavior is to not send any emails."""
@@ -5010,6 +5069,8 @@ def as_dict(self) -> dict:
         if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict()
         if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on]
         if self.description is not None: body['description'] = self.description
+        if self.effective_performance_target is not None:
+            body['effective_performance_target'] = self.effective_performance_target.value
         if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
         if self.end_time is not None: body['end_time'] = self.end_time
         if self.environment_key is not None: body['environment_key'] = self.environment_key
@@ -5055,6 +5116,8 @@ def as_shallow_dict(self) -> dict:
         if self.dbt_task: body['dbt_task'] = self.dbt_task
         if self.depends_on: body['depends_on'] = self.depends_on
         if self.description is not None: body['description'] = self.description
+        if self.effective_performance_target is not None:
+            body['effective_performance_target'] = self.effective_performance_target
         if self.email_notifications: body['email_notifications'] = self.email_notifications
         if self.end_time is not None: body['end_time'] = self.end_time
         if self.environment_key is not None: body['environment_key'] = self.environment_key
@@ -5101,6 +5164,7 @@ def from_dict(cls, d: Dict[str, any]) -> RunTask:
                    dbt_task=_from_dict(d, 'dbt_task', DbtTask),
                    depends_on=_repeated_dict(d, 'depends_on', TaskDependency),
                    description=d.get('description', None),
+                   effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget),
                    email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
                    end_time=d.get('end_time', None),
                    environment_key=d.get('environment_key', None),
@@ -5180,12 +5244,16 @@ class SparkJarTask:
     
     [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
 
+    run_as_repl: Optional[bool] = None
+    """Deprecated. A value of `false` is no longer supported."""
+
     def as_dict(self) -> dict:
         """Serializes the SparkJarTask into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.jar_uri is not None: body['jar_uri'] = self.jar_uri
         if self.main_class_name is not None: body['main_class_name'] = self.main_class_name
         if self.parameters: body['parameters'] = [v for v in self.parameters]
+        if self.run_as_repl is not None: body['run_as_repl'] = self.run_as_repl
         return body
 
     def as_shallow_dict(self) -> dict:
@@ -5194,6 +5262,7 @@ def as_shallow_dict(self) -> dict:
         if self.jar_uri is not None: body['jar_uri'] = self.jar_uri
         if self.main_class_name is not None: body['main_class_name'] = self.main_class_name
         if self.parameters: body['parameters'] = self.parameters
+        if self.run_as_repl is not None: body['run_as_repl'] = self.run_as_repl
         return body
 
     @classmethod
@@ -5201,7 +5270,8 @@ def from_dict(cls, d: Dict[str, any]) -> SparkJarTask:
         """Deserializes the SparkJarTask from a dictionary."""
         return cls(jar_uri=d.get('jar_uri', None),
                    main_class_name=d.get('main_class_name', None),
-                   parameters=d.get('parameters', None))
+                   parameters=d.get('parameters', None),
+                   run_as_repl=d.get('run_as_repl', None))
 
 
 @dataclass
@@ -6622,6 +6692,7 @@ class TerminationCodeCode(Enum):
     
     [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now"""
 
+    BUDGET_POLICY_LIMIT_EXCEEDED = 'BUDGET_POLICY_LIMIT_EXCEEDED'
     CANCELED = 'CANCELED'
     CLOUD_FAILURE = 'CLOUD_FAILURE'
     CLUSTER_ERROR = 'CLUSTER_ERROR'
@@ -7144,6 +7215,7 @@ def create(self,
                name: Optional[str] = None,
                notification_settings: Optional[JobNotificationSettings] = None,
                parameters: Optional[List[JobParameterDefinition]] = None,
+               performance_target: Optional[PerformanceTarget] = None,
                queue: Optional[QueueSettings] = None,
                run_as: Optional[JobRunAs] = None,
                schedule: Optional[CronSchedule] = None,
@@ -7216,6 +7288,9 @@ def create(self,
           `email_notifications` and `webhook_notifications` for this job.
         :param parameters: List[:class:`JobParameterDefinition`] (optional)
           Job-level parameter definitions
+        :param performance_target: :class:`PerformanceTarget` (optional)
+          PerformanceTarget defines how performant or cost efficient the execution of run on serverless should
+          be.
         :param queue: :class:`QueueSettings` (optional)
           The queue settings of the job.
         :param run_as: :class:`JobRunAs` (optional)
@@ -7263,6 +7338,7 @@ def create(self,
         if name is not None: body['name'] = name
         if notification_settings is not None: body['notification_settings'] = notification_settings.as_dict()
         if parameters is not None: body['parameters'] = [v.as_dict() for v in parameters]
+        if performance_target is not None: body['performance_target'] = performance_target.value
         if queue is not None: body['queue'] = queue.as_dict()
         if run_as is not None: body['run_as'] = run_as.as_dict()
         if schedule is not None: body['schedule'] = schedule.as_dict()
@@ -7761,6 +7837,7 @@ def run_now(self,
                 job_parameters: Optional[Dict[str, str]] = None,
                 notebook_params: Optional[Dict[str, str]] = None,
                 only: Optional[List[str]] = None,
+                performance_target: Optional[PerformanceTarget] = None,
                 pipeline_params: Optional[PipelineParams] = None,
                 python_named_params: Optional[Dict[str, str]] = None,
                 python_params: Optional[List[str]] = None,
@@ -7820,6 +7897,10 @@ def run_now(self,
         :param only: List[str] (optional)
           A list of task keys to run inside of the job. If this field is not provided, all tasks in the job
           will be run.
+        :param performance_target: :class:`PerformanceTarget` (optional)
+          PerformanceTarget defines how performant or cost efficient the execution of run on serverless
+          compute should be. For RunNow request, the run will execute with this settings instead of ones
+          defined in job.
         :param pipeline_params: :class:`PipelineParams` (optional)
           Controls whether the pipeline should perform a full refresh
         :param python_named_params: Dict[str,str] (optional)
@@ -7872,6 +7953,7 @@ def run_now(self,
         if job_parameters is not None: body['job_parameters'] = job_parameters
         if notebook_params is not None: body['notebook_params'] = notebook_params
         if only is not None: body['only'] = [v for v in only]
+        if performance_target is not None: body['performance_target'] = performance_target.value
         if pipeline_params is not None: body['pipeline_params'] = pipeline_params.as_dict()
         if python_named_params is not None: body['python_named_params'] = python_named_params
         if python_params is not None: body['python_params'] = [v for v in python_params]
@@ -7894,6 +7976,7 @@ def run_now_and_wait(self,
                          job_parameters: Optional[Dict[str, str]] = None,
                          notebook_params: Optional[Dict[str, str]] = None,
                          only: Optional[List[str]] = None,
+                         performance_target: Optional[PerformanceTarget] = None,
                          pipeline_params: Optional[PipelineParams] = None,
                          python_named_params: Optional[Dict[str, str]] = None,
                          python_params: Optional[List[str]] = None,
@@ -7908,6 +7991,7 @@ def run_now_and_wait(self,
                             job_parameters=job_parameters,
                             notebook_params=notebook_params,
                             only=only,
+                            performance_target=performance_target,
                             pipeline_params=pipeline_params,
                             python_named_params=python_named_params,
                             python_params=python_params,
diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py
index 7c98e4cd5..37d464af6 100755
--- a/databricks/sdk/service/oauth2.py
+++ b/databricks/sdk/service/oauth2.py
@@ -31,6 +31,10 @@ class CreateCustomAppIntegration:
     token_access_policy: Optional[TokenAccessPolicy] = None
     """Token access policy"""
 
+    user_authorized_scopes: Optional[List[str]] = None
+    """Scopes that will need to be consented by end user to mint the access token. If the user does not
+    authorize the access token will not be minted. Must be a subset of scopes."""
+
     def as_dict(self) -> dict:
         """Serializes the CreateCustomAppIntegration into a dictionary suitable for use as a JSON request body."""
         body = {}
@@ -39,6 +43,8 @@ def as_dict(self) -> dict:
         if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls]
         if self.scopes: body['scopes'] = [v for v in self.scopes]
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
+        if self.user_authorized_scopes:
+            body['user_authorized_scopes'] = [v for v in self.user_authorized_scopes]
         return body
 
     def as_shallow_dict(self) -> dict:
@@ -49,6 +55,7 @@ def as_shallow_dict(self) -> dict:
         if self.redirect_urls: body['redirect_urls'] = self.redirect_urls
         if self.scopes: body['scopes'] = self.scopes
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
+        if self.user_authorized_scopes: body['user_authorized_scopes'] = self.user_authorized_scopes
         return body
 
     @classmethod
@@ -58,7 +65,8 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCustomAppIntegration:
                    name=d.get('name', None),
                    redirect_urls=d.get('redirect_urls', None),
                    scopes=d.get('scopes', None),
-                   token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy))
+                   token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy),
+                   user_authorized_scopes=d.get('user_authorized_scopes', None))
 
 
 @dataclass
@@ -346,6 +354,10 @@ class GetCustomAppIntegrationOutput:
     token_access_policy: Optional[TokenAccessPolicy] = None
     """Token access policy"""
 
+    user_authorized_scopes: Optional[List[str]] = None
+    """Scopes that will need to be consented by end user to mint the access token. If the user does not
+    authorize the access token will not be minted. Must be a subset of scopes."""
+
     def as_dict(self) -> dict:
         """Serializes the GetCustomAppIntegrationOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
@@ -359,6 +371,8 @@ def as_dict(self) -> dict:
         if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls]
         if self.scopes: body['scopes'] = [v for v in self.scopes]
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
+        if self.user_authorized_scopes:
+            body['user_authorized_scopes'] = [v for v in self.user_authorized_scopes]
         return body
 
     def as_shallow_dict(self) -> dict:
@@ -374,6 +388,7 @@ def as_shallow_dict(self) -> dict:
         if self.redirect_urls: body['redirect_urls'] = self.redirect_urls
         if self.scopes: body['scopes'] = self.scopes
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
+        if self.user_authorized_scopes: body['user_authorized_scopes'] = self.user_authorized_scopes
         return body
 
     @classmethod
@@ -388,7 +403,8 @@ def from_dict(cls, d: Dict[str, any]) -> GetCustomAppIntegrationOutput:
                    name=d.get('name', None),
                    redirect_urls=d.get('redirect_urls', None),
                    scopes=d.get('scopes', None),
-                   token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy))
+                   token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy),
+                   user_authorized_scopes=d.get('user_authorized_scopes', None))
 
 
 @dataclass
@@ -798,6 +814,10 @@ class UpdateCustomAppIntegration:
     token_access_policy: Optional[TokenAccessPolicy] = None
     """Token access policy to be updated in the custom OAuth app integration"""
 
+    user_authorized_scopes: Optional[List[str]] = None
+    """Scopes that will need to be consented by end user to mint the access token. If the user does not
+    authorize the access token will not be minted. Must be a subset of scopes."""
+
     def as_dict(self) -> dict:
         """Serializes the UpdateCustomAppIntegration into a dictionary suitable for use as a JSON request body."""
         body = {}
@@ -805,6 +825,8 @@ def as_dict(self) -> dict:
         if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls]
         if self.scopes: body['scopes'] = [v for v in self.scopes]
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
+        if self.user_authorized_scopes:
+            body['user_authorized_scopes'] = [v for v in self.user_authorized_scopes]
         return body
 
     def as_shallow_dict(self) -> dict:
@@ -814,6 +836,7 @@ def as_shallow_dict(self) -> dict:
         if self.redirect_urls: body['redirect_urls'] = self.redirect_urls
         if self.scopes: body['scopes'] = self.scopes
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
+        if self.user_authorized_scopes: body['user_authorized_scopes'] = self.user_authorized_scopes
         return body
 
     @classmethod
@@ -822,7 +845,8 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateCustomAppIntegration:
         return cls(integration_id=d.get('integration_id', None),
                    redirect_urls=d.get('redirect_urls', None),
                    scopes=d.get('scopes', None),
-                   token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy))
+                   token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy),
+                   user_authorized_scopes=d.get('user_authorized_scopes', None))
 
 
 @dataclass
@@ -1066,7 +1090,8 @@ def create(self,
                name: Optional[str] = None,
                redirect_urls: Optional[List[str]] = None,
                scopes: Optional[List[str]] = None,
-               token_access_policy: Optional[TokenAccessPolicy] = None) -> CreateCustomAppIntegrationOutput:
+               token_access_policy: Optional[TokenAccessPolicy] = None,
+               user_authorized_scopes: Optional[List[str]] = None) -> CreateCustomAppIntegrationOutput:
         """Create Custom OAuth App Integration.
         
         Create Custom OAuth App Integration.
@@ -1084,6 +1109,9 @@ def create(self,
           profile, email.
         :param token_access_policy: :class:`TokenAccessPolicy` (optional)
           Token access policy
+        :param user_authorized_scopes: List[str] (optional)
+          Scopes that will need to be consented by end user to mint the access token. If the user does not
+          authorize the access token will not be minted. Must be a subset of scopes.
         
         :returns: :class:`CreateCustomAppIntegrationOutput`
         """
@@ -1093,6 +1121,8 @@ def create(self,
         if redirect_urls is not None: body['redirect_urls'] = [v for v in redirect_urls]
         if scopes is not None: body['scopes'] = [v for v in scopes]
         if token_access_policy is not None: body['token_access_policy'] = token_access_policy.as_dict()
+        if user_authorized_scopes is not None:
+            body['user_authorized_scopes'] = [v for v in user_authorized_scopes]
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST',
@@ -1177,7 +1207,8 @@ def update(self,
                *,
                redirect_urls: Optional[List[str]] = None,
                scopes: Optional[List[str]] = None,
-               token_access_policy: Optional[TokenAccessPolicy] = None):
+               token_access_policy: Optional[TokenAccessPolicy] = None,
+               user_authorized_scopes: Optional[List[str]] = None):
         """Updates Custom OAuth App Integration.
         
         Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration
@@ -1191,6 +1222,9 @@ def update(self,
           this will fully replace the existing values instead of appending
         :param token_access_policy: :class:`TokenAccessPolicy` (optional)
           Token access policy to be updated in the custom OAuth app integration
+        :param user_authorized_scopes: List[str] (optional)
+          Scopes that will need to be consented by end user to mint the access token. If the user does not
+          authorize the access token will not be minted. Must be a subset of scopes.
         
         
         """
@@ -1198,6 +1232,8 @@ def update(self,
         if redirect_urls is not None: body['redirect_urls'] = [v for v in redirect_urls]
         if scopes is not None: body['scopes'] = [v for v in scopes]
         if token_access_policy is not None: body['token_access_policy'] = token_access_policy.as_dict()
+        if user_authorized_scopes is not None:
+            body['user_authorized_scopes'] = [v for v in user_authorized_scopes]
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         self._api.do(
diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py
index 971e3fd7e..ae506d4b9 100755
--- a/databricks/sdk/service/serving.py
+++ b/databricks/sdk/service/serving.py
@@ -650,13 +650,13 @@ class CreateServingEndpoint:
     """The name of the serving endpoint. This field is required and must be unique across a Databricks
     workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores."""
 
-    config: EndpointCoreConfigInput
-    """The core config of the serving endpoint."""
-
     ai_gateway: Optional[AiGatewayConfig] = None
     """The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned
     throughput endpoints are currently supported."""
 
+    config: Optional[EndpointCoreConfigInput] = None
+    """The core config of the serving endpoint."""
+
     rate_limits: Optional[List[RateLimit]] = None
     """Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
     Gateway to manage rate limits."""
@@ -1242,34 +1242,6 @@ class ExternalFunctionRequestHttpMethod(Enum):
     PUT = 'PUT'
 
 
-@dataclass
-class ExternalFunctionResponse:
-    status_code: Optional[int] = None
-    """The HTTP status code of the response"""
-
-    text: Optional[str] = None
-    """The content of the response"""
-
-    def as_dict(self) -> dict:
-        """Serializes the ExternalFunctionResponse into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.status_code is not None: body['status_code'] = self.status_code
-        if self.text is not None: body['text'] = self.text
-        return body
-
-    def as_shallow_dict(self) -> dict:
-        """Serializes the ExternalFunctionResponse into a shallow dictionary of its immediate attributes."""
-        body = {}
-        if self.status_code is not None: body['status_code'] = self.status_code
-        if self.text is not None: body['text'] = self.text
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ExternalFunctionResponse:
-        """Deserializes the ExternalFunctionResponse from a dictionary."""
-        return cls(status_code=d.get('status_code', None), text=d.get('text', None))
-
-
 @dataclass
 class ExternalModel:
     provider: ExternalModelProvider
@@ -1550,6 +1522,28 @@ def from_dict(cls, d: Dict[str, any]) -> GoogleCloudVertexAiConfig:
                    region=d.get('region', None))
 
 
+@dataclass
+class HttpRequestResponse:
+    contents: Optional[BinaryIO] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the HttpRequestResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.contents: body['contents'] = self.contents
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the HttpRequestResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.contents: body['contents'] = self.contents
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> HttpRequestResponse:
+        """Deserializes the HttpRequestResponse from a dictionary."""
+        return cls(contents=d.get('contents', None))
+
+
 @dataclass
 class ListEndpointsResponse:
     endpoints: Optional[List[ServingEndpoint]] = None
@@ -3403,9 +3397,9 @@ def build_logs(self, name: str, served_model_name: str) -> BuildLogsResponse:
 
     def create(self,
                name: str,
-               config: EndpointCoreConfigInput,
                *,
                ai_gateway: Optional[AiGatewayConfig] = None,
+               config: Optional[EndpointCoreConfigInput] = None,
                rate_limits: Optional[List[RateLimit]] = None,
                route_optimized: Optional[bool] = None,
                tags: Optional[List[EndpointTag]] = None) -> Wait[ServingEndpointDetailed]:
@@ -3414,11 +3408,11 @@ def create(self,
         :param name: str
           The name of the serving endpoint. This field is required and must be unique across a Databricks
           workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.
-        :param config: :class:`EndpointCoreConfigInput`
-          The core config of the serving endpoint.
         :param ai_gateway: :class:`AiGatewayConfig` (optional)
           The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned
           throughput endpoints are currently supported.
+        :param config: :class:`EndpointCoreConfigInput` (optional)
+          The core config of the serving endpoint.
         :param rate_limits: List[:class:`RateLimit`] (optional)
           Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
           Gateway to manage rate limits.
@@ -3448,9 +3442,9 @@ def create(self,
     def create_and_wait(
         self,
         name: str,
-        config: EndpointCoreConfigInput,
         *,
         ai_gateway: Optional[AiGatewayConfig] = None,
+        config: Optional[EndpointCoreConfigInput] = None,
         rate_limits: Optional[List[RateLimit]] = None,
         route_optimized: Optional[bool] = None,
         tags: Optional[List[EndpointTag]] = None,
@@ -3568,7 +3562,7 @@ def http_request(self,
                      *,
                      headers: Optional[str] = None,
                      json: Optional[str] = None,
-                     params: Optional[str] = None) -> ExternalFunctionResponse:
+                     params: Optional[str] = None) -> HttpRequestResponse:
         """Make external services call using the credentials stored in UC Connection.
         
         :param connection_name: str
@@ -3585,7 +3579,7 @@ def http_request(self,
         :param params: str (optional)
           Query parameters for the request.
         
-        :returns: :class:`ExternalFunctionResponse`
+        :returns: :class:`HttpRequestResponse`
         """
         body = {}
         if connection_name is not None: body['connection_name'] = connection_name
@@ -3594,10 +3588,10 @@ def http_request(self,
         if method is not None: body['method'] = method.value
         if params is not None: body['params'] = params
         if path is not None: body['path'] = path
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+        headers = {'Accept': 'text/plain', 'Content-Type': 'application/json', }
 
-        res = self._api.do('POST', '/api/2.0/external-function', body=body, headers=headers)
-        return ExternalFunctionResponse.from_dict(res)
+        res = self._api.do('POST', '/api/2.0/external-function', body=body, headers=headers, raw=True)
+        return HttpRequestResponse.from_dict(res)
 
     def list(self) -> Iterator[ServingEndpoint]:
         """Get all serving endpoints.
diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py
index c3fba0ab3..488ab72b9 100755
--- a/databricks/sdk/service/settings.py
+++ b/databricks/sdk/service/settings.py
@@ -14,6 +14,48 @@
 # all definitions in this file are in alphabetical order
 
 
+@dataclass
+class AccountIpAccessEnable:
+    acct_ip_acl_enable: BooleanMessage
+
+    etag: Optional[str] = None
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+    etag from a GET request, and pass it with the PATCH request to identify the setting version you
+    are updating."""
+
+    setting_name: Optional[str] = None
+    """Name of the corresponding setting. This field is populated in the response, but it will not be
+    respected even if it's set in the request body. The setting name in the path parameter will be
+    respected instead. Setting name is required to be 'default' if the setting only has one instance
+    per workspace."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AccountIpAccessEnable into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.acct_ip_acl_enable: body['acct_ip_acl_enable'] = self.acct_ip_acl_enable.as_dict()
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountIpAccessEnable into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.acct_ip_acl_enable: body['acct_ip_acl_enable'] = self.acct_ip_acl_enable
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AccountIpAccessEnable:
+        """Deserializes the AccountIpAccessEnable from a dictionary."""
+        return cls(acct_ip_acl_enable=_from_dict(d, 'acct_ip_acl_enable', BooleanMessage),
+                   etag=d.get('etag', None),
+                   setting_name=d.get('setting_name', None))
+
+
 @dataclass
 class AibiDashboardEmbeddingAccessPolicy:
     access_policy_type: AibiDashboardEmbeddingAccessPolicyAccessPolicyType
@@ -991,6 +1033,36 @@ def from_dict(cls, d: Dict[str, any]) -> DefaultNamespaceSetting:
                    setting_name=d.get('setting_name', None))
 
 
+@dataclass
+class DeleteAccountIpAccessEnableResponse:
+    """The etag is returned."""
+
+    etag: str
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+    an etag from a GET request, and pass it with the DELETE request to identify the rule set version
+    you are deleting."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteAccountIpAccessEnableResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteAccountIpAccessEnableResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteAccountIpAccessEnableResponse:
+        """Deserializes the DeleteAccountIpAccessEnableResponse from a dictionary."""
+        return cls(etag=d.get('etag', None))
+
+
 @dataclass
 class DeleteAibiDashboardEmbeddingAccessPolicySettingResponse:
     """The etag is returned."""
@@ -3556,9 +3628,48 @@ class TokenType(Enum):
     """The type of token request. As of now, only `AZURE_ACTIVE_DIRECTORY_TOKEN` is supported."""
 
     ARCLIGHT_AZURE_EXCHANGE_TOKEN = 'ARCLIGHT_AZURE_EXCHANGE_TOKEN'
+    ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY = 'ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY'
     AZURE_ACTIVE_DIRECTORY_TOKEN = 'AZURE_ACTIVE_DIRECTORY_TOKEN'
 
 
+@dataclass
+class UpdateAccountIpAccessEnableRequest:
+    """Details required to update a setting."""
+
+    allow_missing: bool
+    """This should always be set to true for Settings API. Added for AIP compliance."""
+
+    setting: AccountIpAccessEnable
+
+    field_mask: str
+    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
+    the setting payload will be updated. The field mask needs to be supplied as single string. To
+    specify multiple fields in the field mask, use comma as the separator (no space)."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateAccountIpAccessEnableRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateAccountIpAccessEnableRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateAccountIpAccessEnableRequest:
+        """Deserializes the UpdateAccountIpAccessEnableRequest from a dictionary."""
+        return cls(allow_missing=d.get('allow_missing', None),
+                   field_mask=d.get('field_mask', None),
+                   setting=_from_dict(d, 'setting', AccountIpAccessEnable))
+
+
 @dataclass
 class UpdateAibiDashboardEmbeddingAccessPolicySettingRequest:
     """Details required to update a setting."""
@@ -4391,6 +4502,7 @@ def __init__(self, api_client):
 
         self._csp_enablement_account = CspEnablementAccountAPI(self._api)
         self._disable_legacy_features = DisableLegacyFeaturesAPI(self._api)
+        self._enable_ip_access_lists = EnableIpAccessListsAPI(self._api)
         self._esm_enablement_account = EsmEnablementAccountAPI(self._api)
         self._personal_compute = PersonalComputeAPI(self._api)
 
@@ -4404,6 +4516,11 @@ def disable_legacy_features(self) -> DisableLegacyFeaturesAPI:
         """Disable legacy features for new Databricks workspaces."""
         return self._disable_legacy_features
 
+    @property
+    def enable_ip_access_lists(self) -> EnableIpAccessListsAPI:
+        """Controls the enforcement of IP access lists for accessing the account console."""
+        return self._enable_ip_access_lists
+
     @property
     def esm_enablement_account(self) -> EsmEnablementAccountAPI:
         """The enhanced security monitoring setting at the account level controls whether to enable the feature on new workspaces."""
@@ -5203,6 +5320,95 @@ def update(self, allow_missing: bool, setting: DisableLegacyFeatures,
         return DisableLegacyFeatures.from_dict(res)
 
 
+class EnableIpAccessListsAPI:
+    """Controls the enforcement of IP access lists for accessing the account console. Allowing you to enable or
+    disable restricted access based on IP addresses."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def delete(self, *, etag: Optional[str] = None) -> DeleteAccountIpAccessEnableResponse:
+        """Delete the account IP access toggle setting.
+        
+        Reverts the value of the account IP access toggle setting to default (ON)
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteAccountIpAccessEnableResponse`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'DELETE',
+            f'/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default',
+            query=query,
+            headers=headers)
+        return DeleteAccountIpAccessEnableResponse.from_dict(res)
+
+    def get(self, *, etag: Optional[str] = None) -> AccountIpAccessEnable:
+        """Get the account IP access toggle setting.
+        
+        Gets the value of the account IP access toggle setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`AccountIpAccessEnable`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'GET',
+            f'/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default',
+            query=query,
+            headers=headers)
+        return AccountIpAccessEnable.from_dict(res)
+
+    def update(self, allow_missing: bool, setting: AccountIpAccessEnable,
+               field_mask: str) -> AccountIpAccessEnable:
+        """Update the account IP access toggle setting.
+        
+        Updates the value of the account IP access toggle setting.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`AccountIpAccessEnable`
+        :param field_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        
+        :returns: :class:`AccountIpAccessEnable`
+        """
+        body = {}
+        if allow_missing is not None: body['allow_missing'] = allow_missing
+        if field_mask is not None: body['field_mask'] = field_mask
+        if setting is not None: body['setting'] = setting.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do(
+            'PATCH',
+            f'/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default',
+            body=body,
+            headers=headers)
+        return AccountIpAccessEnable.from_dict(res)
+
+
 class EnhancedSecurityMonitoringAPI:
     """Controls whether enhanced security monitoring is enabled for the current workspace. If the compliance
     security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the
diff --git a/docs/account/billing/budget_policy.rst b/docs/account/billing/budget_policy.rst
new file mode 100644
index 000000000..06490428e
--- /dev/null
+++ b/docs/account/billing/budget_policy.rst
@@ -0,0 +1,86 @@
+``a.budget_policy``: Budget Policy
+==================================
+.. currentmodule:: databricks.sdk.service.billing
+
+.. py:class:: BudgetPolicyAPI
+
+    A service serves REST API about Budget policies
+
+    .. py:method:: create( [, custom_tags: Optional[List[compute.CustomPolicyTag]], policy_name: Optional[str], request_id: Optional[str]]) -> BudgetPolicy
+
+        Create a budget policy.
+        
+        Creates a new policy.
+        
+        :param custom_tags: List[:class:`CustomPolicyTag`] (optional)
+          A list of tags defined by the customer. At most 40 entries are allowed per policy.
+        :param policy_name: str (optional)
+          The name of the policy. - Must be unique among active policies. - Can contain only characters of
+          0-9, a-z, A-Z, -, =, ., :, /, @, _, +, whitespace.
+        :param request_id: str (optional)
+          A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is
+          recommended. This request is only idempotent if a `request_id` is provided.
+        
+        :returns: :class:`BudgetPolicy`
+        
+
+    .. py:method:: delete(policy_id: str)
+
+        Delete a budget policy.
+        
+        Deletes a policy
+        
+        :param policy_id: str
+          The Id of the policy.
+        
+        
+        
+
+    .. py:method:: get(policy_id: str) -> BudgetPolicy
+
+        Get a budget policy.
+        
+        Retrieves a policy by it's ID.
+        
+        :param policy_id: str
+          The Id of the policy.
+        
+        :returns: :class:`BudgetPolicy`
+        
+
+    .. py:method:: list( [, filter_by: Optional[Filter], page_size: Optional[int], page_token: Optional[str], sort_spec: Optional[SortSpec]]) -> Iterator[BudgetPolicy]
+
+        List policies.
+        
+        Lists all policies. Policies are returned in the alphabetically ascending order of their names.
+        
+        :param filter_by: :class:`Filter` (optional)
+          A filter to apply to the list of policies.
+        :param page_size: int (optional)
+          The maximum number of budget policies to return. If unspecified, at most 100 budget policies will be
+          returned. The maximum value is 1000; values above 1000 will be coerced to 1000.
+        :param page_token: str (optional)
+          A page token, received from a previous `ListServerlessPolicies` call. Provide this to retrieve the
+          subsequent page. If unspecified, the first page will be returned.
+          
+          When paginating, all other parameters provided to `ListServerlessPoliciesRequest` must match the
+          call that provided the page token.
+        :param sort_spec: :class:`SortSpec` (optional)
+          The sort specification.
+        
+        :returns: Iterator over :class:`BudgetPolicy`
+        
+
+    .. py:method:: update(policy_id: str [, policy: Optional[BudgetPolicy]]) -> BudgetPolicy
+
+        Update a budget policy.
+        
+        Updates a policy
+        
+        :param policy_id: str
+          The Id of the policy. This field is generated by Databricks and globally unique.
+        :param policy: :class:`BudgetPolicy` (optional)
+          Contains the BudgetPolicy details.
+        
+        :returns: :class:`BudgetPolicy`
+        
\ No newline at end of file
diff --git a/docs/account/billing/index.rst b/docs/account/billing/index.rst
index 0e07da594..b8b317616 100644
--- a/docs/account/billing/index.rst
+++ b/docs/account/billing/index.rst
@@ -8,6 +8,7 @@ Configure different aspects of Databricks billing and usage.
    :maxdepth: 1
 
    billable_usage
+   budget_policy
    budgets
    log_delivery
    usage_dashboards
\ No newline at end of file
diff --git a/docs/account/oauth2/custom_app_integration.rst b/docs/account/oauth2/custom_app_integration.rst
index 9868a288b..7043a343b 100644
--- a/docs/account/oauth2/custom_app_integration.rst
+++ b/docs/account/oauth2/custom_app_integration.rst
@@ -7,7 +7,7 @@
     These APIs enable administrators to manage custom OAuth app integrations, which is required for
     adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
 
-    .. py:method:: create( [, confidential: Optional[bool], name: Optional[str], redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy]]) -> CreateCustomAppIntegrationOutput
+    .. py:method:: create( [, confidential: Optional[bool], name: Optional[str], redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy], user_authorized_scopes: Optional[List[str]]]) -> CreateCustomAppIntegrationOutput
 
         Create Custom OAuth App Integration.
         
@@ -26,6 +26,9 @@
           profile, email.
         :param token_access_policy: :class:`TokenAccessPolicy` (optional)
           Token access policy
+        :param user_authorized_scopes: List[str] (optional)
+          Scopes that will need to be consented by end user to mint the access token. If the user does not
+          authorize the access token will not be minted. Must be a subset of scopes.
         
         :returns: :class:`CreateCustomAppIntegrationOutput`
         
@@ -67,7 +70,7 @@
         :returns: Iterator over :class:`GetCustomAppIntegrationOutput`
         
 
-    .. py:method:: update(integration_id: str [, redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy]])
+    .. py:method:: update(integration_id: str [, redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy], user_authorized_scopes: Optional[List[str]]])
 
         Updates Custom OAuth App Integration.
         
@@ -82,6 +85,9 @@
           this will fully replace the existing values instead of appending
         :param token_access_policy: :class:`TokenAccessPolicy` (optional)
           Token access policy to be updated in the custom OAuth app integration
+        :param user_authorized_scopes: List[str] (optional)
+          Scopes that will need to be consented by end user to mint the access token. If the user does not
+          authorize the access token will not be minted. Must be a subset of scopes.
         
         
         
\ No newline at end of file
diff --git a/docs/account/settings/enable_ip_access_lists.rst b/docs/account/settings/enable_ip_access_lists.rst
new file mode 100644
index 000000000..3d32a762b
--- /dev/null
+++ b/docs/account/settings/enable_ip_access_lists.rst
@@ -0,0 +1,57 @@
+``a.settings.enable_ip_access_lists``: Enable Account IP Access Lists
+=====================================================================
+.. currentmodule:: databricks.sdk.service.settings
+
+.. py:class:: EnableIpAccessListsAPI
+
+    Controls the enforcement of IP access lists for accessing the account console. Allowing you to enable or
+    disable restricted access based on IP addresses.
+
+    .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAccountIpAccessEnableResponse
+
+        Delete the account IP access toggle setting.
+        
+        Reverts the value of the account IP access toggle setting to default (ON)
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteAccountIpAccessEnableResponse`
+        
+
+    .. py:method:: get( [, etag: Optional[str]]) -> AccountIpAccessEnable
+
+        Get the account IP access toggle setting.
+        
+        Gets the value of the account IP access toggle setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`AccountIpAccessEnable`
+        
+
+    .. py:method:: update(allow_missing: bool, setting: AccountIpAccessEnable, field_mask: str) -> AccountIpAccessEnable
+
+        Update the account IP access toggle setting.
+        
+        Updates the value of the account IP access toggle setting.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`AccountIpAccessEnable`
+        :param field_mask: str
+          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
+          setting payload will be updated. The field mask needs to be supplied as single string. To specify
+          multiple fields in the field mask, use comma as the separator (no space).
+        
+        :returns: :class:`AccountIpAccessEnable`
+        
\ No newline at end of file
diff --git a/docs/account/settings/index.rst b/docs/account/settings/index.rst
index abf97c6a0..9ffe7694e 100644
--- a/docs/account/settings/index.rst
+++ b/docs/account/settings/index.rst
@@ -12,5 +12,6 @@ Manage security settings for Accounts and Workspaces
    settings
    csp_enablement_account
    disable_legacy_features
+   enable_ip_access_lists
    esm_enablement_account
    personal_compute
\ No newline at end of file
diff --git a/docs/account/settings/settings.rst b/docs/account/settings/settings.rst
index 3df647279..abf1c0e45 100644
--- a/docs/account/settings/settings.rst
+++ b/docs/account/settings/settings.rst
@@ -25,6 +25,12 @@
         provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions
         prior to 13.3LTS.
 
+    .. py:property:: enable_ip_access_lists
+        :type: EnableIpAccessListsAPI
+
+        Controls the enforcement of IP access lists for accessing the account console. Allowing you to enable or
+        disable restricted access based on IP addresses.
+
     .. py:property:: esm_enablement_account
         :type: EsmEnablementAccountAPI
 
diff --git a/docs/dbdataclasses/billing.rst b/docs/dbdataclasses/billing.rst
index 25deb0a18..2e788ec97 100644
--- a/docs/dbdataclasses/billing.rst
+++ b/docs/dbdataclasses/billing.rst
@@ -57,6 +57,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: BudgetPolicy
+   :members:
+   :undoc-members:
+
 .. autoclass:: CreateBillingUsageDashboardRequest
    :members:
    :undoc-members:
@@ -85,6 +89,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: CreateBudgetPolicyRequest
+   :members:
+   :undoc-members:
+
 .. autoclass:: CreateLogDeliveryConfigurationParams
    :members:
    :undoc-members:
@@ -93,6 +101,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: DeleteResponse
+   :members:
+   :undoc-members:
+
 .. py:class:: DeliveryStatus
 
    The status string for log delivery. Possible values are: * `CREATED`: There were no log delivery attempts since the config was created. * `SUCCEEDED`: The latest attempt of log delivery has succeeded completely. * `USER_FAILURE`: The latest attempt of log delivery failed because of misconfiguration of customer provided permissions on role or storage. * `SYSTEM_FAILURE`: The latest attempt of log delivery failed because of an Databricks internal error. Contact support if it doesn't go away soon. * `NOT_FOUND`: The log delivery status as the configuration has been disabled since the release of this feature or there are no workspaces in the account.
@@ -116,6 +128,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: Filter
+   :members:
+   :undoc-members:
+
 .. autoclass:: GetBillingUsageDashboardResponse
    :members:
    :undoc-members:
@@ -128,6 +144,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: ListBudgetPoliciesResponse
+   :members:
+   :undoc-members:
+
 .. py:class:: LogDeliveryConfigStatus
 
    Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed.
@@ -175,6 +195,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: SortSpec
+   :members:
+   :undoc-members:
+
+.. py:class:: SortSpecField
+
+   .. py:attribute:: POLICY_NAME
+      :value: "POLICY_NAME"
+
 .. autoclass:: UpdateBudgetConfigurationBudget
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst
index d1e89277f..17d23b223 100644
--- a/docs/dbdataclasses/catalog.rst
+++ b/docs/dbdataclasses/catalog.rst
@@ -113,49 +113,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. py:class:: CatalogInfoSecurableKind
-
-   Kind of catalog securable.
-
-   .. py:attribute:: CATALOG_DELTASHARING
-      :value: "CATALOG_DELTASHARING"
-
-   .. py:attribute:: CATALOG_FOREIGN_BIGQUERY
-      :value: "CATALOG_FOREIGN_BIGQUERY"
-
-   .. py:attribute:: CATALOG_FOREIGN_DATABRICKS
-      :value: "CATALOG_FOREIGN_DATABRICKS"
-
-   .. py:attribute:: CATALOG_FOREIGN_MYSQL
-      :value: "CATALOG_FOREIGN_MYSQL"
-
-   .. py:attribute:: CATALOG_FOREIGN_POSTGRESQL
-      :value: "CATALOG_FOREIGN_POSTGRESQL"
-
-   .. py:attribute:: CATALOG_FOREIGN_REDSHIFT
-      :value: "CATALOG_FOREIGN_REDSHIFT"
-
-   .. py:attribute:: CATALOG_FOREIGN_SNOWFLAKE
-      :value: "CATALOG_FOREIGN_SNOWFLAKE"
-
-   .. py:attribute:: CATALOG_FOREIGN_SQLDW
-      :value: "CATALOG_FOREIGN_SQLDW"
-
-   .. py:attribute:: CATALOG_FOREIGN_SQLSERVER
-      :value: "CATALOG_FOREIGN_SQLSERVER"
-
-   .. py:attribute:: CATALOG_INTERNAL
-      :value: "CATALOG_INTERNAL"
-
-   .. py:attribute:: CATALOG_STANDARD
-      :value: "CATALOG_STANDARD"
-
-   .. py:attribute:: CATALOG_SYSTEM
-      :value: "CATALOG_SYSTEM"
-
-   .. py:attribute:: CATALOG_SYSTEM_DELTASHARING
-      :value: "CATALOG_SYSTEM_DELTASHARING"
-
 .. py:class:: CatalogIsolationMode
 
    Whether the current securable is accessible from all workspaces or a specific set of workspaces.
@@ -263,49 +220,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. py:class:: ConnectionInfoSecurableKind
-
-   Kind of connection securable.
-
-   .. py:attribute:: CONNECTION_BIGQUERY
-      :value: "CONNECTION_BIGQUERY"
-
-   .. py:attribute:: CONNECTION_BUILTIN_HIVE_METASTORE
-      :value: "CONNECTION_BUILTIN_HIVE_METASTORE"
-
-   .. py:attribute:: CONNECTION_DATABRICKS
-      :value: "CONNECTION_DATABRICKS"
-
-   .. py:attribute:: CONNECTION_EXTERNAL_HIVE_METASTORE
-      :value: "CONNECTION_EXTERNAL_HIVE_METASTORE"
-
-   .. py:attribute:: CONNECTION_GLUE
-      :value: "CONNECTION_GLUE"
-
-   .. py:attribute:: CONNECTION_HTTP_BEARER
-      :value: "CONNECTION_HTTP_BEARER"
-
-   .. py:attribute:: CONNECTION_MYSQL
-      :value: "CONNECTION_MYSQL"
-
-   .. py:attribute:: CONNECTION_ONLINE_CATALOG
-      :value: "CONNECTION_ONLINE_CATALOG"
-
-   .. py:attribute:: CONNECTION_POSTGRESQL
-      :value: "CONNECTION_POSTGRESQL"
-
-   .. py:attribute:: CONNECTION_REDSHIFT
-      :value: "CONNECTION_REDSHIFT"
-
-   .. py:attribute:: CONNECTION_SNOWFLAKE
-      :value: "CONNECTION_SNOWFLAKE"
-
-   .. py:attribute:: CONNECTION_SQLDW
-      :value: "CONNECTION_SQLDW"
-
-   .. py:attribute:: CONNECTION_SQLSERVER
-      :value: "CONNECTION_SQLSERVER"
-
 .. py:class:: ConnectionType
 
    The type of connection.
@@ -1296,6 +1210,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CATALOG
       :value: "CATALOG"
 
+   .. py:attribute:: CLEAN_ROOM
+      :value: "CLEAN_ROOM"
+
    .. py:attribute:: CONNECTION
       :value: "CONNECTION"
 
diff --git a/docs/dbdataclasses/cleanrooms.rst b/docs/dbdataclasses/cleanrooms.rst
index 85ec98250..bdea23775 100644
--- a/docs/dbdataclasses/cleanrooms.rst
+++ b/docs/dbdataclasses/cleanrooms.rst
@@ -84,6 +84,21 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: CleanRoomNotebookReview
+   :members:
+   :undoc-members:
+
+.. py:class:: CleanRoomNotebookReviewNotebookReviewState
+
+   .. py:attribute:: APPROVED
+      :value: "APPROVED"
+
+   .. py:attribute:: PENDING
+      :value: "PENDING"
+
+   .. py:attribute:: REJECTED
+      :value: "REJECTED"
+
 .. autoclass:: CleanRoomNotebookTaskRun
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst
index 6a9a06671..b90ec99f7 100644
--- a/docs/dbdataclasses/compute.rst
+++ b/docs/dbdataclasses/compute.rst
@@ -299,6 +299,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: CustomPolicyTag
+   :members:
+   :undoc-members:
+
 .. autoclass:: DataPlaneEventDetails
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst
index 3996fa511..e85322a66 100644
--- a/docs/dbdataclasses/jobs.rst
+++ b/docs/dbdataclasses/jobs.rst
@@ -470,6 +470,16 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: UNPAUSED
       :value: "UNPAUSED"
 
+.. py:class:: PerformanceTarget
+
+   PerformanceTarget defines how performant (lower latency) or cost efficient the execution of run on serverless compute should be. The performance mode on the job or pipeline should map to a performance setting that is passed to Cluster Manager (see cluster-common PerformanceTarget).
+
+   .. py:attribute:: COST_OPTIMIZED
+      :value: "COST_OPTIMIZED"
+
+   .. py:attribute:: PERFORMANCE_OPTIMIZED
+      :value: "PERFORMANCE_OPTIMIZED"
+
 .. autoclass:: PeriodicTriggerConfiguration
    :members:
    :undoc-members:
@@ -895,6 +905,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    The code indicates why the run was terminated. Additional codes might be introduced in future releases. * `SUCCESS`: The run was completed successfully. * `USER_CANCELED`: The run was successfully canceled during execution by a user. * `CANCELED`: The run was canceled during execution by the Databricks platform; for example, if the maximum run duration was exceeded. * `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the dependency type condition was not met, or there were no material tasks to execute. * `INTERNAL_ERROR`: The run encountered an unexpected error. Refer to the state message for further details. * `DRIVER_ERROR`: The run encountered an error while communicating with the Spark Driver. * `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state message for further details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due to an error when communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The run failed because it issued an invalid request to start the cluster. * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The workspace has reached the quota for the maximum number of concurrent active runs. Consider scheduling the runs over a larger time frame. * `FEATURE_DISABLED`: The run failed because it tried to access a feature unavailable for the workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The number of cluster creation, start, and upsize requests have exceeded the allotted rate limit. Consider spreading the run execution over a larger time frame. * `STORAGE_ACCESS_ERROR`: The run failed due to an error when accessing the customer blob storage. Refer to the state message for further details. * `RUN_EXECUTION_ERROR`: The run was completed with task failures. For more details, refer to the state message or run output. * `UNAUTHORIZED_ERROR`: The run failed due to a permission issue while accessing a resource. Refer to the state message for further details. * `LIBRARY_INSTALLATION_ERROR`: The run failed while installing the user-requested library. Refer to the state message for further details. The causes might include, but are not limited to: The provided library is invalid, there are insufficient permissions to install the library, and so forth. * `MAX_CONCURRENT_RUNS_EXCEEDED`: The scheduled run exceeds the limit of maximum concurrent runs set for the job. * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a cluster that has already reached the maximum number of contexts it is configured to create. See: [Link]. * `RESOURCE_NOT_FOUND`: A resource necessary for run execution does not exist. Refer to the state message for further details. * `INVALID_RUN_CONFIGURATION`: The run failed due to an invalid configuration. Refer to the state message for further details. * `CLOUD_FAILURE`: The run failed due to a cloud provider issue. Refer to the state message for further details. * `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size limit.
    [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now
 
+   .. py:attribute:: BUDGET_POLICY_LIMIT_EXCEEDED
+      :value: "BUDGET_POLICY_LIMIT_EXCEEDED"
+
    .. py:attribute:: CANCELED
       :value: "CANCELED"
 
diff --git a/docs/dbdataclasses/serving.rst b/docs/dbdataclasses/serving.rst
index af4772f77..abaeb5355 100644
--- a/docs/dbdataclasses/serving.rst
+++ b/docs/dbdataclasses/serving.rst
@@ -222,10 +222,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: PUT
       :value: "PUT"
 
-.. autoclass:: ExternalFunctionResponse
-   :members:
-   :undoc-members:
-
 .. autoclass:: ExternalModel
    :members:
    :undoc-members:
@@ -276,6 +272,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: HttpRequestResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: ListEndpointsResponse
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst
index 572a0d6c4..b6fb0be58 100644
--- a/docs/dbdataclasses/settings.rst
+++ b/docs/dbdataclasses/settings.rst
@@ -4,6 +4,10 @@ Settings
 These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.settings`` module.
 
 .. py:currentmodule:: databricks.sdk.service.settings
+.. autoclass:: AccountIpAccessEnable
+   :members:
+   :undoc-members:
+
 .. autoclass:: AibiDashboardEmbeddingAccessPolicy
    :members:
    :undoc-members:
@@ -215,6 +219,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: DeleteAccountIpAccessEnableResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: DeleteAibiDashboardEmbeddingAccessPolicySettingResponse
    :members:
    :undoc-members:
@@ -652,9 +660,16 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: ARCLIGHT_AZURE_EXCHANGE_TOKEN
       :value: "ARCLIGHT_AZURE_EXCHANGE_TOKEN"
 
+   .. py:attribute:: ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY
+      :value: "ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY"
+
    .. py:attribute:: AZURE_ACTIVE_DIRECTORY_TOKEN
       :value: "AZURE_ACTIVE_DIRECTORY_TOKEN"
 
+.. autoclass:: UpdateAccountIpAccessEnableRequest
+   :members:
+   :undoc-members:
+
 .. autoclass:: UpdateAibiDashboardEmbeddingAccessPolicySettingRequest
    :members:
    :undoc-members:
diff --git a/docs/workspace/catalog/credentials.rst b/docs/workspace/catalog/credentials.rst
new file mode 100644
index 000000000..54b55516b
--- /dev/null
+++ b/docs/workspace/catalog/credentials.rst
@@ -0,0 +1,68 @@
+``w.credentials``: Credentials
+==============================
+.. currentmodule:: databricks.sdk.service.catalog
+
+.. py:class:: CredentialsAPI
+
+    These APIs manage credential configurations for this workspace. Databricks needs access to a cross-account
+    service IAM role in your AWS account so that Databricks can deploy clusters in the appropriate VPC for the
+    new workspace. A credential configuration encapsulates this role information, and its ID is used when
+    creating a new workspace.
+
+    .. py:method:: create(credentials_name: str, aws_credentials: CreateCredentialAwsCredentials) -> Credential
+
+        Create credential configuration.
+        
+        Creates a Databricks credential configuration that represents cloud cross-account credentials for a
+        specified account. Databricks uses this to set up network infrastructure properly to host Databricks
+        clusters. For your AWS IAM role, you need to trust the External ID (the Databricks Account API account
+        ID) in the returned credential object, and configure the required access policy.
+        
+        Save the response's `credentials_id` field, which is the ID for your new credential configuration
+        object.
+        
+        For information about how to create a new workspace with this API, see [Create a new workspace using
+        the Account API]
+        
+        [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
+        
+        :param credentials_name: str
+          The human-readable name of the credential configuration object.
+        :param aws_credentials: :class:`CreateCredentialAwsCredentials`
+        
+        :returns: :class:`Credential`
+        
+
+    .. py:method:: delete(credentials_id: str)
+
+        Delete credential configuration.
+        
+        Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot
+        delete a credential that is associated with any workspace.
+        
+        :param credentials_id: str
+          Databricks Account API credential configuration ID
+        
+        
+        
+
+    .. py:method:: get(credentials_id: str) -> Credential
+
+        Get credential configuration.
+        
+        Gets a Databricks credential configuration object for an account, both specified by ID.
+        
+        :param credentials_id: str
+          Databricks Account API credential configuration ID
+        
+        :returns: :class:`Credential`
+        
+
+    .. py:method:: list() -> Iterator[Credential]
+
+        Get all credential configurations.
+        
+        Gets all Databricks credential configurations associated with an account specified by ID.
+        
+        :returns: Iterator over :class:`Credential`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/index.rst b/docs/workspace/catalog/index.rst
index 1372ca5a1..471804098 100644
--- a/docs/workspace/catalog/index.rst
+++ b/docs/workspace/catalog/index.rst
@@ -10,6 +10,7 @@ Configure data governance with Unity Catalog for metastores, catalogs, schemas,
    artifact_allowlists
    catalogs
    connections
+   credentials
    external_locations
    functions
    grants
diff --git a/docs/workspace/index.rst b/docs/workspace/index.rst
index 667f6c18f..dc86a0e78 100644
--- a/docs/workspace/index.rst
+++ b/docs/workspace/index.rst
@@ -18,7 +18,6 @@ These APIs are available from WorkspaceClient
    marketplace/index
    ml/index
    pipelines/index
-   provisioning/index
    serving/index
    settings/index
    sharing/index
diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst
index 49bebe60d..d54bc088d 100644
--- a/docs/workspace/jobs/jobs.rst
+++ b/docs/workspace/jobs/jobs.rst
@@ -120,7 +120,7 @@
     .. py:method:: cancel_run_and_wait(run_id: int, timeout: datetime.timedelta = 0:20:00) -> Run
 
 
-    .. py:method:: create( [, access_control_list: Optional[List[JobAccessControlRequest]], budget_policy_id: Optional[str], continuous: Optional[Continuous], deployment: Optional[JobDeployment], description: Optional[str], edit_mode: Optional[JobEditMode], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], format: Optional[Format], git_source: Optional[GitSource], health: Optional[JobsHealthRules], job_clusters: Optional[List[JobCluster]], max_concurrent_runs: Optional[int], name: Optional[str], notification_settings: Optional[JobNotificationSettings], parameters: Optional[List[JobParameterDefinition]], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], schedule: Optional[CronSchedule], tags: Optional[Dict[str, str]], tasks: Optional[List[Task]], timeout_seconds: Optional[int], trigger: Optional[TriggerSettings], webhook_notifications: Optional[WebhookNotifications]]) -> CreateResponse
+    .. py:method:: create( [, access_control_list: Optional[List[JobAccessControlRequest]], budget_policy_id: Optional[str], continuous: Optional[Continuous], deployment: Optional[JobDeployment], description: Optional[str], edit_mode: Optional[JobEditMode], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], format: Optional[Format], git_source: Optional[GitSource], health: Optional[JobsHealthRules], job_clusters: Optional[List[JobCluster]], max_concurrent_runs: Optional[int], name: Optional[str], notification_settings: Optional[JobNotificationSettings], parameters: Optional[List[JobParameterDefinition]], performance_target: Optional[PerformanceTarget], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], schedule: Optional[CronSchedule], tags: Optional[Dict[str, str]], tasks: Optional[List[Task]], timeout_seconds: Optional[int], trigger: Optional[TriggerSettings], webhook_notifications: Optional[WebhookNotifications]]) -> CreateResponse
 
 
         Usage:
@@ -216,6 +216,9 @@
           `email_notifications` and `webhook_notifications` for this job.
         :param parameters: List[:class:`JobParameterDefinition`] (optional)
           Job-level parameter definitions
+        :param performance_target: :class:`PerformanceTarget` (optional)
+          PerformanceTarget defines how performant or cost efficient the execution of run on serverless should
+          be.
         :param queue: :class:`QueueSettings` (optional)
           The queue settings of the job.
         :param run_as: :class:`JobRunAs` (optional)
@@ -802,7 +805,7 @@
         
         
 
-    .. py:method:: run_now(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], only: Optional[List[str]], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]]]) -> Wait[Run]
+    .. py:method:: run_now(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], only: Optional[List[str]], performance_target: Optional[PerformanceTarget], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]]]) -> Wait[Run]
 
 
         Usage:
@@ -889,6 +892,10 @@
         :param only: List[str] (optional)
           A list of task keys to run inside of the job. If this field is not provided, all tasks in the job
           will be run.
+        :param performance_target: :class:`PerformanceTarget` (optional)
+          PerformanceTarget defines how performant or cost efficient the execution of run on serverless
+          compute should be. For RunNow request, the run will execute with this settings instead of ones
+          defined in job.
         :param pipeline_params: :class:`PipelineParams` (optional)
           Controls whether the pipeline should perform a full refresh
         :param python_named_params: Dict[str,str] (optional)
@@ -934,7 +941,7 @@
           See :method:wait_get_run_job_terminated_or_skipped for more details.
         
 
-    .. py:method:: run_now_and_wait(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], only: Optional[List[str]], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]], timeout: datetime.timedelta = 0:20:00]) -> Run
+    .. py:method:: run_now_and_wait(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], only: Optional[List[str]], performance_target: Optional[PerformanceTarget], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]], timeout: datetime.timedelta = 0:20:00]) -> Run
 
 
     .. py:method:: set_permissions(job_id: str [, access_control_list: Optional[List[JobAccessControlRequest]]]) -> JobPermissions
diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst
index d9c806489..687976f5d 100644
--- a/docs/workspace/serving/serving_endpoints.rst
+++ b/docs/workspace/serving/serving_endpoints.rst
@@ -29,18 +29,18 @@
         :returns: :class:`BuildLogsResponse`
         
 
-    .. py:method:: create(name: str, config: EndpointCoreConfigInput [, ai_gateway: Optional[AiGatewayConfig], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]]]) -> Wait[ServingEndpointDetailed]
+    .. py:method:: create(name: str [, ai_gateway: Optional[AiGatewayConfig], config: Optional[EndpointCoreConfigInput], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]]]) -> Wait[ServingEndpointDetailed]
 
         Create a new serving endpoint.
         
         :param name: str
           The name of the serving endpoint. This field is required and must be unique across a Databricks
           workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.
-        :param config: :class:`EndpointCoreConfigInput`
-          The core config of the serving endpoint.
         :param ai_gateway: :class:`AiGatewayConfig` (optional)
           The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned
           throughput endpoints are currently supported.
+        :param config: :class:`EndpointCoreConfigInput` (optional)
+          The core config of the serving endpoint.
         :param rate_limits: List[:class:`RateLimit`] (optional)
           Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
           Gateway to manage rate limits.
@@ -54,7 +54,7 @@
           See :method:wait_get_serving_endpoint_not_updating for more details.
         
 
-    .. py:method:: create_and_wait(name: str, config: EndpointCoreConfigInput [, ai_gateway: Optional[AiGatewayConfig], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]], timeout: datetime.timedelta = 0:20:00]) -> ServingEndpointDetailed
+    .. py:method:: create_and_wait(name: str [, ai_gateway: Optional[AiGatewayConfig], config: Optional[EndpointCoreConfigInput], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]], timeout: datetime.timedelta = 0:20:00]) -> ServingEndpointDetailed
 
 
     .. py:method:: delete(name: str)
@@ -135,7 +135,7 @@
         :returns: :class:`ServingEndpointPermissions`
         
 
-    .. py:method:: http_request(conn: str, method: ExternalFunctionRequestHttpMethod, path: str [, headers: typing.Dict[str, str], json: typing.Dict[str, str], params: typing.Dict[str, str]]) -> ExternalFunctionResponse
+    .. py:method:: http_request(conn: str, method: ExternalFunctionRequestHttpMethod, path: str [, headers: typing.Dict[str, str], json: typing.Dict[str, str], params: typing.Dict[str, str]]) -> Response
 
         Make external services call using the credentials stored in UC Connection.
         **NOTE:** Experimental: This API may change or be removed in a future release without warning.
@@ -152,7 +152,7 @@
           JSON payload for the request.
         :param params: Dict[str,str] (optional)
           Query parameters for the request.
-        :returns: :class:`ExternalFunctionResponse`
+        :returns: :class:`Response`
         
 
     .. py:method:: list() -> Iterator[ServingEndpoint]
diff --git a/tests/test_open_ai_mixin.py b/tests/test_open_ai_mixin.py
index 1858c66cb..e503da073 100644
--- a/tests/test_open_ai_mixin.py
+++ b/tests/test_open_ai_mixin.py
@@ -1,8 +1,10 @@
 import sys
+from io import BytesIO
 
 import pytest
 
 from databricks.sdk.core import Config
+from databricks.sdk.service.serving import ExternalFunctionRequestHttpMethod
 
 
 def test_open_ai_client(monkeypatch):
@@ -28,3 +30,22 @@ def test_langchain_open_ai_client(monkeypatch):
 
     assert client.openai_api_base == "https://test_host/serving-endpoints"
     assert client.model_name == "databricks-meta-llama-3-1-70b-instruct"
+
+
+def test_http_request(w, requests_mock):
+    headers = {"Accept": "text/plain", "Content-Type": "application/json", }
+    mocked_url = "http://localhost/api/2.0/external-function"
+    blob_response = BytesIO(b"The request was successful")
+
+    requests_mock.post(mocked_url,
+                       request_headers=headers,
+                       content=blob_response.getvalue(),
+                       status_code=200,
+                       )
+    response = w.serving_endpoints.http_request(conn="test_conn",
+                                                method=ExternalFunctionRequestHttpMethod.GET,
+                                                path="test_path")
+    assert requests_mock.call_count == 1
+    assert requests_mock.called
+    assert response.status_code == 200 # Verify the response status
+    assert (response.text == "The request was successful") # Ensure the response body matches the mocked data
\ No newline at end of file

From 95277c8625cff51099eca4a8ca129c4865b9776e Mon Sep 17 00:00:00 2001
From: Kirill Safonov <122353021+ksafonov-db@users.noreply.github.com>
Date: Thu, 30 Jan 2025 14:04:29 +0100
Subject: [PATCH 091/136] [Internal] Add unit tests for retriable requests
 (#879)

## What changes are proposed in this pull request?

Improving tests for retriable requests, ensuring we're covering retries
from error response and connection exceptions.
Now we have one unified test which is parameterized by:
1) data passed to be uploaded
2) type of failure (retriable response or retriable exception)

## How is this tested?

This PR only changes tests.
---
 tests/test_base_client.py | 207 +++++++++++++++++++++++---------------
 1 file changed, 127 insertions(+), 80 deletions(-)

diff --git a/tests/test_base_client.py b/tests/test_base_client.py
index a9a9d5cc6..16a8ecfc4 100644
--- a/tests/test_base_client.py
+++ b/tests/test_base_client.py
@@ -1,10 +1,11 @@
 import io
 import random
 from http.server import BaseHTTPRequestHandler
-from typing import Iterator, List
+from typing import Callable, Iterator, List, Optional, Tuple, Type
 from unittest.mock import Mock
 
 import pytest
+from requests import PreparedRequest, Response, Timeout
 
 from databricks.sdk import errors, useragent
 from databricks.sdk._base_client import (_BaseClient, _RawResponse,
@@ -357,91 +358,137 @@ def tell(self):
     assert client._is_seekable_stream(CustomSeekableStream())
 
 
-@pytest.mark.parametrize(
-    'input_data',
-    [
-        b"0123456789", # bytes -> BytesIO
-        "0123456789", # str -> BytesIO
-        io.BytesIO(b"0123456789"), # BytesIO directly
-        io.StringIO("0123456789"), # StringIO
-    ])
-def test_reset_seekable_stream_on_retry(input_data):
-    received_data = []
-
-    # Retry two times before succeeding.
-    def inner(h: BaseHTTPRequestHandler):
-        if len(received_data) == 2:
-            h.send_response(200)
-            h.end_headers()
-        else:
-            h.send_response(429)
-            h.end_headers()
-
-        content_length = int(h.headers.get('Content-Length', 0))
-        if content_length > 0:
-            received_data.append(h.rfile.read(content_length))
-
-    with http_fixture_server(inner) as host:
-        client = _BaseClient()
-
-        # Retries should reset the stream.
-        client.do('POST', f'{host}/foo', data=input_data)
-
-        assert received_data == [b"0123456789", b"0123456789", b"0123456789"]
-
-
-def test_reset_seekable_stream_to_their_initial_position_on_retry():
-    received_data = []
-
-    # Retry two times before succeeding.
-    def inner(h: BaseHTTPRequestHandler):
-        if len(received_data) == 2:
-            h.send_response(200)
-            h.end_headers()
+class RetryTestCase:
+
+    def __init__(self, data_provider: Callable, offset: Optional[int], expected_failure: bool,
+                 expected_result: bytes):
+        self._data_provider = data_provider
+        self._offset = offset
+        self._expected_result = expected_result
+        self._expected_failure = expected_failure
+
+    def get_data(self):
+        data = self._data_provider()
+        if self._offset is not None:
+            data.seek(self._offset)
+        return data
+
+    @classmethod
+    def create_non_seekable_stream(cls, data: bytes):
+        result = io.BytesIO(data)
+        result.seekable = lambda: False # makes the stream appear non-seekable
+        return result
+
+
+class MockSession:
+
+    def __init__(self, failure_count: int, failure_provider: Callable[[], Response]):
+        self._failure_count = failure_count
+        self._received_requests: List[bytes] = []
+        self._failure_provider = failure_provider
+
+    @classmethod
+    def raise_timeout_exception(cls):
+        raise Timeout("Fake timeout")
+
+    @classmethod
+    def return_retryable_response(cls):
+        # fill response fields so that logging does not fail
+        response = Response()
+        response._content = b''
+        response.status_code = 429
+        response.headers = {'Retry-After': '1'}
+        response.url = 'http://test.com/'
+
+        response.request = PreparedRequest()
+        response.request.url = response.url
+        response.request.method = 'POST'
+        response.request.headers = None
+        response.request.body = b''
+        return response
+
+    # following the signature of Session.request()
+    def request(self,
+                method,
+                url,
+                params=None,
+                data=None,
+                headers=None,
+                cookies=None,
+                files=None,
+                auth=None,
+                timeout=None,
+                allow_redirects=True,
+                proxies=None,
+                hooks=None,
+                stream=None,
+                verify=None,
+                cert=None,
+                json=None):
+        request_body = data.read()
+
+        if isinstance(request_body, str):
+            request_body = request_body.encode('utf-8') # to be able to compare with expected bytes
+
+        self._received_requests.append(request_body)
+        if self._failure_count > 0:
+            self._failure_count -= 1
+            return self._failure_provider()
+            #
         else:
-            h.send_response(429)
-            h.end_headers()
-
-        content_length = int(h.headers.get('Content-Length', 0))
-        if content_length > 0:
-            received_data.append(h.rfile.read(content_length))
-
-    input_data = io.BytesIO(b"0123456789")
-    input_data.seek(4)
+            # fill response fields so that logging does not fail
+            response = Response()
+            response._content = b''
+            response.status_code = 200
+            response.reason = 'OK'
+            response.url = url
 
-    with http_fixture_server(inner) as host:
-        client = _BaseClient()
-
-        # Retries should reset the stream.
-        client.do('POST', f'{host}/foo', data=input_data)
-
-        assert received_data == [b"456789", b"456789", b"456789"]
-        assert input_data.tell() == 10 # EOF
+            response.request = PreparedRequest()
+            response.request.url = url
+            response.request.method = method
+            response.request.headers = headers
+            response.request.body = data
+            return response
 
 
-def test_no_retry_or_reset_on_non_seekable_stream():
-    requests = []
-
-    # Always respond with a response that triggers a retry.
-    def inner(h: BaseHTTPRequestHandler):
-        content_length = int(h.headers.get('Content-Length', 0))
-        if content_length > 0:
-            requests.append(h.rfile.read(content_length))
+@pytest.mark.parametrize(
+    'test_case',
+    [
+        # bytes -> BytesIO
+        RetryTestCase(lambda: b"0123456789", None, False, b"0123456789"),
+        # str -> BytesIO
+        RetryTestCase(lambda: "0123456789", None, False, b"0123456789"),
+        # BytesIO directly
+        RetryTestCase(lambda: io.BytesIO(b"0123456789"), None, False, b"0123456789"),
+        # BytesIO directly with offset
+        RetryTestCase(lambda: io.BytesIO(b"0123456789"), 4, False, b"456789"),
+        # StringIO
+        RetryTestCase(lambda: io.StringIO("0123456789"), None, False, b"0123456789"),
+        # Non-seekable
+        RetryTestCase(lambda: RetryTestCase.create_non_seekable_stream(b"0123456789"), None, True,
+                      b"0123456789")
+    ])
+@pytest.mark.parametrize('failure', [[MockSession.raise_timeout_exception, Timeout],
+                                     [MockSession.return_retryable_response, errors.TooManyRequests]])
+def test_rewind_seekable_stream(test_case: RetryTestCase, failure: Tuple[Callable[[], Response], Type]):
+    failure_count = 2
 
-        h.send_response(429)
-        h.send_header('Retry-After', '1')
-        h.end_headers()
+    data = test_case.get_data()
 
-    input_data = io.BytesIO(b"0123456789")
-    input_data.seekable = lambda: False # makes the stream appear non-seekable
+    session = MockSession(failure_count, failure[0])
+    client = _BaseClient()
+    client._session = session
 
-    with http_fixture_server(inner) as host:
-        client = _BaseClient()
+    def do():
+        client.do('POST', f'test.com/foo', data=data)
 
-        # Should raise error immediately without retry.
-        with pytest.raises(DatabricksError):
-            client.do('POST', f'{host}/foo', data=input_data)
+    if test_case._expected_failure:
+        expected_attempts_made = 1
+        exception_class = failure[1]
+        with pytest.raises(exception_class):
+            do()
+    else:
+        expected_attempts_made = failure_count + 1
+        do()
 
-        # Verify that only one request was made (no retries).
-        assert requests == [b"0123456789"]
-        assert input_data.tell() == 10 # EOF
+    assert session._received_requests == [test_case._expected_result for _ in range(expected_attempts_made)]

From aa4f0f3fb01f208f5ebdcd6570bcb1e728c10aec Mon Sep 17 00:00:00 2001
From: Ilia Babanov 
Date: Thu, 30 Jan 2025 15:02:03 +0100
Subject: [PATCH 092/136] [Fix] Exclude localhost from hitting proxies for
 metadata service requests (#877)

## What changes are proposed in this pull request?

Exclude localhost from hitting proxies for metadata service requests

`requests` package doesn't do it by default.

Relevant issue:
https://github.com/databricks/databricks-vscode/issues/916#issuecomment-2619382290

## How is this tested?

Tested manually by running this code against real metadata service of
the logged in databricks vscode extension:
```python
import requests
import os

os.environ["HTTP_PROXY"] = "http://test.com"

resp = requests.get("http://127.0.0.1:53468/redacted",
                    timeout=10000,
                    headers={
                        "X-Databricks-Metadata-Version": "1",
                        "X-Databricks-Host": "https://redacted.databricks.com/"
                    }, proxies={"no_proxy": "localhost,127.0.0.1"})

print(resp.text)
```
The code fails without the no_proxy option
---
 databricks/sdk/credentials_provider.py | 18 ++++++++++++------
 1 file changed, 12 insertions(+), 6 deletions(-)

diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py
index 1604fbcb3..9a64a4fc3 100644
--- a/databricks/sdk/credentials_provider.py
+++ b/databricks/sdk/credentials_provider.py
@@ -676,12 +676,18 @@ def __init__(self, cfg: 'Config'):
         self.host = cfg.host
 
     def refresh(self) -> Token:
-        resp = requests.get(self.url,
-                            timeout=self._metadata_service_timeout,
-                            headers={
-                                self.METADATA_SERVICE_VERSION_HEADER: self.METADATA_SERVICE_VERSION,
-                                self.METADATA_SERVICE_HOST_HEADER: self.host
-                            })
+        resp = requests.get(
+            self.url,
+            timeout=self._metadata_service_timeout,
+            headers={
+                self.METADATA_SERVICE_VERSION_HEADER: self.METADATA_SERVICE_VERSION,
+                self.METADATA_SERVICE_HOST_HEADER: self.host
+            },
+            proxies={
+                # Explicitly exclude localhost from being proxied. This is necessary
+                # for Metadata URLs which typically point to localhost.
+                "no_proxy": "localhost,127.0.0.1"
+            })
         json_resp: dict[str, Union[str, float]] = resp.json()
         access_token = json_resp.get("access_token", None)
         if access_token is None:

From 6f094aafffc13fbe5c236e6a7735b1b239b1ab78 Mon Sep 17 00:00:00 2001
From: Renaud Hartert 
Date: Thu, 30 Jan 2025 17:00:53 +0100
Subject: [PATCH 093/136] [Release] Release v0.42.0 (#880)

### Bug Fixes

* Fix docs generation when two services have the same name
([#872](https://github.com/databricks/databricks-sdk-py/pull/872)).


### Internal Changes

* Add CICD environment to the User Agent
([#866](https://github.com/databricks/databricks-sdk-py/pull/866)).
* Add unit tests for retriable requests
([#879](https://github.com/databricks/databricks-sdk-py/pull/879)).
* Extract "before retry" handler, use it to rewind the stream
([#878](https://github.com/databricks/databricks-sdk-py/pull/878)).
* Update Model Serving `http_request` mixin to correctly use the
underlying API.
([#876](https://github.com/databricks/databricks-sdk-py/pull/876)).

### Backward Incompatible Changes

* Changed `create()` method for
[w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html)
workspace-level service with new required argument order.
* Changed `http_request()` method for
[w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html)
workspace-level service to type `http_request()` method for
[w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html)
workspace-level service.
* Changed `http_request()` method for
[w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html)
workspace-level service to return
`databricks.sdk.service.serving.HttpRequestResponse` dataclass.
* Changed `config` field for
`databricks.sdk.service.serving.CreateServingEndpoint` to no longer be
required.
* Removed `securable_kind` field for
`databricks.sdk.service.catalog.CatalogInfo`.
* Removed `securable_kind` field for
`databricks.sdk.service.catalog.ConnectionInfo`.
* Removed `status_code` and `text` fields for
`databricks.sdk.service.serving.ExternalFunctionResponse`.

### API Changes:

* Added
[a.budget_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/billing/budget_policy.html)
account-level service.
* Added
[a.enable_ip_access_lists](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/settings/enable_ip_access_lists.html)
account-level service.
* Added `review_state`, `reviews` and `runner_collaborators` fields for
`databricks.sdk.service.cleanrooms.CleanRoomAssetNotebook`.
* Added `statement_id` field for
`databricks.sdk.service.dashboards.QueryAttachment`.
* Added `effective_performance_target` field for
`databricks.sdk.service.jobs.BaseRun`.
* Added `performance_target` field for
`databricks.sdk.service.jobs.CreateJob`.
* Added `performance_target` field for
`databricks.sdk.service.jobs.JobSettings`.
* Added `effective_performance_target` field for
`databricks.sdk.service.jobs.Run`.
* Added `performance_target` field for
`databricks.sdk.service.jobs.RunNow`.
* Added `effective_performance_target` field for
`databricks.sdk.service.jobs.RunTask`.
* Added `run_as_repl` field for
`databricks.sdk.service.jobs.SparkJarTask`.
* Added `user_authorized_scopes` field for
`databricks.sdk.service.oauth2.CreateCustomAppIntegration`.
* Added `user_authorized_scopes` field for
`databricks.sdk.service.oauth2.GetCustomAppIntegrationOutput`.
* Added `user_authorized_scopes` field for
`databricks.sdk.service.oauth2.UpdateCustomAppIntegration`.
* Added `contents` field for
`databricks.sdk.service.serving.HttpRequestResponse`.
* Added `clean_room` enum value for
`databricks.sdk.service.catalog.SecurableType`.
* Added `budget_policy_limit_exceeded` enum value for
`databricks.sdk.service.jobs.TerminationCodeCode`.
* Added `arclight_azure_exchange_token_with_user_delegation_key` enum
value for `databricks.sdk.service.settings.TokenType`.

OpenAPI SHA: 840c660106f820a1a5dff931d51fa5f65cd9fdd9, Date: 2025-01-28

---------

Signed-off-by: Renaud Hartert 
---
 CHANGELOG.md                           |  44 +++
 databricks/sdk/__init__.py             | 481 +++++++++++++------------
 databricks/sdk/version.py              |   2 +-
 docs/workspace/catalog/credentials.rst | 199 ++++++++--
 4 files changed, 451 insertions(+), 275 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index ba19e3ef5..b2b31f7a5 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,49 @@
 # Version changelog
 
+### Bug Fixes
+
+ * Fix docs generation when two services have the same name ([#872](https://github.com/databricks/databricks-sdk-py/pull/872)).
+
+### Internal Changes
+
+ * Add CICD environment to the User Agent ([#866](https://github.com/databricks/databricks-sdk-py/pull/866)).
+ * Add unit tests for retriable requests ([#879](https://github.com/databricks/databricks-sdk-py/pull/879)).
+ * Extract "before retry" handler, use it to rewind the stream ([#878](https://github.com/databricks/databricks-sdk-py/pull/878)).
+ * Update Model Serving `http_request` mixin to correctly use the underlying API.  ([#876](https://github.com/databricks/databricks-sdk-py/pull/876)).
+
+### Backward Incompatible Changes
+
+* Changed `create()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html) workspace-level service with new required argument order.
+* Changed `http_request()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html) workspace-level service to type `http_request()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html) workspace-level service.
+* Changed `http_request()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html) workspace-level service to return `databricks.sdk.service.serving.HttpRequestResponse` dataclass.
+* Changed `config` field for `databricks.sdk.service.serving.CreateServingEndpoint` to no longer be required.
+* Removed `securable_kind` field for `databricks.sdk.service.catalog.CatalogInfo`.
+* Removed `securable_kind` field for `databricks.sdk.service.catalog.ConnectionInfo`.
+* Removed `status_code` and `text` fields for `databricks.sdk.service.serving.ExternalFunctionResponse`.
+
+### API Changes:
+
+* Added [a.budget_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/billing/budget_policy.html) account-level service.
+* Added [a.enable_ip_access_lists](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/settings/enable_ip_access_lists.html) account-level service.
+* Added `review_state`, `reviews` and `runner_collaborators` fields for `databricks.sdk.service.cleanrooms.CleanRoomAssetNotebook`.
+* Added `statement_id` field for `databricks.sdk.service.dashboards.QueryAttachment`.
+* Added `effective_performance_target` field for `databricks.sdk.service.jobs.BaseRun`.
+* Added `performance_target` field for `databricks.sdk.service.jobs.CreateJob`.
+* Added `performance_target` field for `databricks.sdk.service.jobs.JobSettings`.
+* Added `effective_performance_target` field for `databricks.sdk.service.jobs.Run`.
+* Added `performance_target` field for `databricks.sdk.service.jobs.RunNow`.
+* Added `effective_performance_target` field for `databricks.sdk.service.jobs.RunTask`.
+* Added `run_as_repl` field for `databricks.sdk.service.jobs.SparkJarTask`.
+* Added `user_authorized_scopes` field for `databricks.sdk.service.oauth2.CreateCustomAppIntegration`.
+* Added `user_authorized_scopes` field for `databricks.sdk.service.oauth2.GetCustomAppIntegrationOutput`.
+* Added `user_authorized_scopes` field for `databricks.sdk.service.oauth2.UpdateCustomAppIntegration`.
+* Added `contents` field for `databricks.sdk.service.serving.HttpRequestResponse`.
+* Added `clean_room` enum value for `databricks.sdk.service.catalog.SecurableType`.
+* Added `budget_policy_limit_exceeded` enum value for `databricks.sdk.service.jobs.TerminationCodeCode`.
+* Added `arclight_azure_exchange_token_with_user_delegation_key` enum value for `databricks.sdk.service.settings.TokenType`.
+
+OpenAPI SHA: 840c660106f820a1a5dff931d51fa5f65cd9fdd9, Date: 2025-01-28
+
 ## [Release] Release v0.41.0
 
 ### New Features and Improvements
diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py
index c7f9295a0..1892069c2 100755
--- a/databricks/sdk/__init__.py
+++ b/databricks/sdk/__init__.py
@@ -5,6 +5,7 @@
 
 import databricks.sdk.core as client
 import databricks.sdk.dbutils as dbutils
+import databricks.sdk.service as service
 from databricks.sdk import azure
 from databricks.sdk.credentials_provider import CredentialsStrategy
 from databricks.sdk.mixins.compute import ClustersExt
@@ -190,102 +191,106 @@ def __init__(self,
         self._dbutils = _make_dbutils(self._config)
         self._api_client = client.ApiClient(self._config)
         serving_endpoints = ServingEndpointsExt(self._api_client)
-        self._access_control = AccessControlAPI(self._api_client)
-        self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client)
-        self._alerts = AlertsAPI(self._api_client)
-        self._alerts_legacy = AlertsLegacyAPI(self._api_client)
-        self._apps = AppsAPI(self._api_client)
-        self._artifact_allowlists = ArtifactAllowlistsAPI(self._api_client)
-        self._catalogs = CatalogsAPI(self._api_client)
-        self._clean_room_assets = CleanRoomAssetsAPI(self._api_client)
-        self._clean_room_task_runs = CleanRoomTaskRunsAPI(self._api_client)
-        self._clean_rooms = CleanRoomsAPI(self._api_client)
-        self._cluster_policies = ClusterPoliciesAPI(self._api_client)
+        self._access_control = service.iam.AccessControlAPI(self._api_client)
+        self._account_access_control_proxy = service.iam.AccountAccessControlProxyAPI(self._api_client)
+        self._alerts = service.sql.AlertsAPI(self._api_client)
+        self._alerts_legacy = service.sql.AlertsLegacyAPI(self._api_client)
+        self._apps = service.apps.AppsAPI(self._api_client)
+        self._artifact_allowlists = service.catalog.ArtifactAllowlistsAPI(self._api_client)
+        self._catalogs = service.catalog.CatalogsAPI(self._api_client)
+        self._clean_room_assets = service.cleanrooms.CleanRoomAssetsAPI(self._api_client)
+        self._clean_room_task_runs = service.cleanrooms.CleanRoomTaskRunsAPI(self._api_client)
+        self._clean_rooms = service.cleanrooms.CleanRoomsAPI(self._api_client)
+        self._cluster_policies = service.compute.ClusterPoliciesAPI(self._api_client)
         self._clusters = ClustersExt(self._api_client)
-        self._command_execution = CommandExecutionAPI(self._api_client)
-        self._connections = ConnectionsAPI(self._api_client)
-        self._consumer_fulfillments = ConsumerFulfillmentsAPI(self._api_client)
-        self._consumer_installations = ConsumerInstallationsAPI(self._api_client)
-        self._consumer_listings = ConsumerListingsAPI(self._api_client)
-        self._consumer_personalization_requests = ConsumerPersonalizationRequestsAPI(self._api_client)
-        self._consumer_providers = ConsumerProvidersAPI(self._api_client)
-        self._credentials = CredentialsAPI(self._api_client)
-        self._credentials_manager = CredentialsManagerAPI(self._api_client)
-        self._current_user = CurrentUserAPI(self._api_client)
-        self._dashboard_widgets = DashboardWidgetsAPI(self._api_client)
-        self._dashboards = DashboardsAPI(self._api_client)
-        self._data_sources = DataSourcesAPI(self._api_client)
+        self._command_execution = service.compute.CommandExecutionAPI(self._api_client)
+        self._connections = service.catalog.ConnectionsAPI(self._api_client)
+        self._consumer_fulfillments = service.marketplace.ConsumerFulfillmentsAPI(self._api_client)
+        self._consumer_installations = service.marketplace.ConsumerInstallationsAPI(self._api_client)
+        self._consumer_listings = service.marketplace.ConsumerListingsAPI(self._api_client)
+        self._consumer_personalization_requests = service.marketplace.ConsumerPersonalizationRequestsAPI(
+            self._api_client)
+        self._consumer_providers = service.marketplace.ConsumerProvidersAPI(self._api_client)
+        self._credentials = service.catalog.CredentialsAPI(self._api_client)
+        self._credentials_manager = service.settings.CredentialsManagerAPI(self._api_client)
+        self._current_user = service.iam.CurrentUserAPI(self._api_client)
+        self._dashboard_widgets = service.sql.DashboardWidgetsAPI(self._api_client)
+        self._dashboards = service.sql.DashboardsAPI(self._api_client)
+        self._data_sources = service.sql.DataSourcesAPI(self._api_client)
         self._dbfs = DbfsExt(self._api_client)
-        self._dbsql_permissions = DbsqlPermissionsAPI(self._api_client)
-        self._experiments = ExperimentsAPI(self._api_client)
-        self._external_locations = ExternalLocationsAPI(self._api_client)
+        self._dbsql_permissions = service.sql.DbsqlPermissionsAPI(self._api_client)
+        self._experiments = service.ml.ExperimentsAPI(self._api_client)
+        self._external_locations = service.catalog.ExternalLocationsAPI(self._api_client)
         self._files = _make_files_client(self._api_client, self._config)
-        self._functions = FunctionsAPI(self._api_client)
-        self._genie = GenieAPI(self._api_client)
-        self._git_credentials = GitCredentialsAPI(self._api_client)
-        self._global_init_scripts = GlobalInitScriptsAPI(self._api_client)
-        self._grants = GrantsAPI(self._api_client)
-        self._groups = GroupsAPI(self._api_client)
-        self._instance_pools = InstancePoolsAPI(self._api_client)
-        self._instance_profiles = InstanceProfilesAPI(self._api_client)
-        self._ip_access_lists = IpAccessListsAPI(self._api_client)
+        self._functions = service.catalog.FunctionsAPI(self._api_client)
+        self._genie = service.dashboards.GenieAPI(self._api_client)
+        self._git_credentials = service.workspace.GitCredentialsAPI(self._api_client)
+        self._global_init_scripts = service.compute.GlobalInitScriptsAPI(self._api_client)
+        self._grants = service.catalog.GrantsAPI(self._api_client)
+        self._groups = service.iam.GroupsAPI(self._api_client)
+        self._instance_pools = service.compute.InstancePoolsAPI(self._api_client)
+        self._instance_profiles = service.compute.InstanceProfilesAPI(self._api_client)
+        self._ip_access_lists = service.settings.IpAccessListsAPI(self._api_client)
         self._jobs = JobsExt(self._api_client)
-        self._lakeview = LakeviewAPI(self._api_client)
-        self._libraries = LibrariesAPI(self._api_client)
-        self._metastores = MetastoresAPI(self._api_client)
-        self._model_registry = ModelRegistryAPI(self._api_client)
-        self._model_versions = ModelVersionsAPI(self._api_client)
-        self._notification_destinations = NotificationDestinationsAPI(self._api_client)
-        self._online_tables = OnlineTablesAPI(self._api_client)
-        self._permission_migration = PermissionMigrationAPI(self._api_client)
-        self._permissions = PermissionsAPI(self._api_client)
-        self._pipelines = PipelinesAPI(self._api_client)
-        self._policy_compliance_for_clusters = PolicyComplianceForClustersAPI(self._api_client)
-        self._policy_compliance_for_jobs = PolicyComplianceForJobsAPI(self._api_client)
-        self._policy_families = PolicyFamiliesAPI(self._api_client)
-        self._provider_exchange_filters = ProviderExchangeFiltersAPI(self._api_client)
-        self._provider_exchanges = ProviderExchangesAPI(self._api_client)
-        self._provider_files = ProviderFilesAPI(self._api_client)
-        self._provider_listings = ProviderListingsAPI(self._api_client)
-        self._provider_personalization_requests = ProviderPersonalizationRequestsAPI(self._api_client)
-        self._provider_provider_analytics_dashboards = ProviderProviderAnalyticsDashboardsAPI(
+        self._lakeview = service.dashboards.LakeviewAPI(self._api_client)
+        self._libraries = service.compute.LibrariesAPI(self._api_client)
+        self._metastores = service.catalog.MetastoresAPI(self._api_client)
+        self._model_registry = service.ml.ModelRegistryAPI(self._api_client)
+        self._model_versions = service.catalog.ModelVersionsAPI(self._api_client)
+        self._notification_destinations = service.settings.NotificationDestinationsAPI(self._api_client)
+        self._online_tables = service.catalog.OnlineTablesAPI(self._api_client)
+        self._permission_migration = service.iam.PermissionMigrationAPI(self._api_client)
+        self._permissions = service.iam.PermissionsAPI(self._api_client)
+        self._pipelines = service.pipelines.PipelinesAPI(self._api_client)
+        self._policy_compliance_for_clusters = service.compute.PolicyComplianceForClustersAPI(
+            self._api_client)
+        self._policy_compliance_for_jobs = service.jobs.PolicyComplianceForJobsAPI(self._api_client)
+        self._policy_families = service.compute.PolicyFamiliesAPI(self._api_client)
+        self._provider_exchange_filters = service.marketplace.ProviderExchangeFiltersAPI(self._api_client)
+        self._provider_exchanges = service.marketplace.ProviderExchangesAPI(self._api_client)
+        self._provider_files = service.marketplace.ProviderFilesAPI(self._api_client)
+        self._provider_listings = service.marketplace.ProviderListingsAPI(self._api_client)
+        self._provider_personalization_requests = service.marketplace.ProviderPersonalizationRequestsAPI(
             self._api_client)
-        self._provider_providers = ProviderProvidersAPI(self._api_client)
-        self._providers = ProvidersAPI(self._api_client)
-        self._quality_monitors = QualityMonitorsAPI(self._api_client)
-        self._queries = QueriesAPI(self._api_client)
-        self._queries_legacy = QueriesLegacyAPI(self._api_client)
-        self._query_history = QueryHistoryAPI(self._api_client)
-        self._query_visualizations = QueryVisualizationsAPI(self._api_client)
-        self._query_visualizations_legacy = QueryVisualizationsLegacyAPI(self._api_client)
-        self._recipient_activation = RecipientActivationAPI(self._api_client)
-        self._recipients = RecipientsAPI(self._api_client)
-        self._registered_models = RegisteredModelsAPI(self._api_client)
-        self._repos = ReposAPI(self._api_client)
-        self._resource_quotas = ResourceQuotasAPI(self._api_client)
-        self._schemas = SchemasAPI(self._api_client)
-        self._secrets = SecretsAPI(self._api_client)
-        self._service_principals = ServicePrincipalsAPI(self._api_client)
+        self._provider_provider_analytics_dashboards = service.marketplace.ProviderProviderAnalyticsDashboardsAPI(
+            self._api_client)
+        self._provider_providers = service.marketplace.ProviderProvidersAPI(self._api_client)
+        self._providers = service.sharing.ProvidersAPI(self._api_client)
+        self._quality_monitors = service.catalog.QualityMonitorsAPI(self._api_client)
+        self._queries = service.sql.QueriesAPI(self._api_client)
+        self._queries_legacy = service.sql.QueriesLegacyAPI(self._api_client)
+        self._query_history = service.sql.QueryHistoryAPI(self._api_client)
+        self._query_visualizations = service.sql.QueryVisualizationsAPI(self._api_client)
+        self._query_visualizations_legacy = service.sql.QueryVisualizationsLegacyAPI(self._api_client)
+        self._recipient_activation = service.sharing.RecipientActivationAPI(self._api_client)
+        self._recipients = service.sharing.RecipientsAPI(self._api_client)
+        self._registered_models = service.catalog.RegisteredModelsAPI(self._api_client)
+        self._repos = service.workspace.ReposAPI(self._api_client)
+        self._resource_quotas = service.catalog.ResourceQuotasAPI(self._api_client)
+        self._schemas = service.catalog.SchemasAPI(self._api_client)
+        self._secrets = service.workspace.SecretsAPI(self._api_client)
+        self._service_principals = service.iam.ServicePrincipalsAPI(self._api_client)
         self._serving_endpoints = serving_endpoints
-        self._serving_endpoints_data_plane = ServingEndpointsDataPlaneAPI(self._api_client, serving_endpoints)
-        self._settings = SettingsAPI(self._api_client)
-        self._shares = SharesAPI(self._api_client)
-        self._statement_execution = StatementExecutionAPI(self._api_client)
-        self._storage_credentials = StorageCredentialsAPI(self._api_client)
-        self._system_schemas = SystemSchemasAPI(self._api_client)
-        self._table_constraints = TableConstraintsAPI(self._api_client)
-        self._tables = TablesAPI(self._api_client)
-        self._temporary_table_credentials = TemporaryTableCredentialsAPI(self._api_client)
-        self._token_management = TokenManagementAPI(self._api_client)
-        self._tokens = TokensAPI(self._api_client)
-        self._users = UsersAPI(self._api_client)
-        self._vector_search_endpoints = VectorSearchEndpointsAPI(self._api_client)
-        self._vector_search_indexes = VectorSearchIndexesAPI(self._api_client)
-        self._volumes = VolumesAPI(self._api_client)
-        self._warehouses = WarehousesAPI(self._api_client)
+        self._serving_endpoints_data_plane = service.serving.ServingEndpointsDataPlaneAPI(
+            self._api_client, serving_endpoints)
+        self._settings = service.settings.SettingsAPI(self._api_client)
+        self._shares = service.sharing.SharesAPI(self._api_client)
+        self._statement_execution = service.sql.StatementExecutionAPI(self._api_client)
+        self._storage_credentials = service.catalog.StorageCredentialsAPI(self._api_client)
+        self._system_schemas = service.catalog.SystemSchemasAPI(self._api_client)
+        self._table_constraints = service.catalog.TableConstraintsAPI(self._api_client)
+        self._tables = service.catalog.TablesAPI(self._api_client)
+        self._temporary_table_credentials = service.catalog.TemporaryTableCredentialsAPI(self._api_client)
+        self._token_management = service.settings.TokenManagementAPI(self._api_client)
+        self._tokens = service.settings.TokensAPI(self._api_client)
+        self._users = service.iam.UsersAPI(self._api_client)
+        self._vector_search_endpoints = service.vectorsearch.VectorSearchEndpointsAPI(self._api_client)
+        self._vector_search_indexes = service.vectorsearch.VectorSearchIndexesAPI(self._api_client)
+        self._volumes = service.catalog.VolumesAPI(self._api_client)
+        self._warehouses = service.sql.WarehousesAPI(self._api_client)
         self._workspace = WorkspaceExt(self._api_client)
-        self._workspace_bindings = WorkspaceBindingsAPI(self._api_client)
-        self._workspace_conf = WorkspaceConfAPI(self._api_client)
+        self._workspace_bindings = service.catalog.WorkspaceBindingsAPI(self._api_client)
+        self._workspace_conf = service.settings.WorkspaceConfAPI(self._api_client)
 
     @property
     def config(self) -> client.Config:
@@ -300,57 +305,57 @@ def dbutils(self) -> dbutils.RemoteDbUtils:
         return self._dbutils
 
     @property
-    def access_control(self) -> AccessControlAPI:
+    def access_control(self) -> service.iam.AccessControlAPI:
         """Rule based Access Control for Databricks Resources."""
         return self._access_control
 
     @property
-    def account_access_control_proxy(self) -> AccountAccessControlProxyAPI:
+    def account_access_control_proxy(self) -> service.iam.AccountAccessControlProxyAPI:
         """These APIs manage access rules on resources in an account."""
         return self._account_access_control_proxy
 
     @property
-    def alerts(self) -> AlertsAPI:
+    def alerts(self) -> service.sql.AlertsAPI:
         """The alerts API can be used to perform CRUD operations on alerts."""
         return self._alerts
 
     @property
-    def alerts_legacy(self) -> AlertsLegacyAPI:
+    def alerts_legacy(self) -> service.sql.AlertsLegacyAPI:
         """The alerts API can be used to perform CRUD operations on alerts."""
         return self._alerts_legacy
 
     @property
-    def apps(self) -> AppsAPI:
+    def apps(self) -> service.apps.AppsAPI:
         """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on."""
         return self._apps
 
     @property
-    def artifact_allowlists(self) -> ArtifactAllowlistsAPI:
+    def artifact_allowlists(self) -> service.catalog.ArtifactAllowlistsAPI:
         """In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the `allowlist` in UC so that users can leverage these artifacts on compute configured with shared access mode."""
         return self._artifact_allowlists
 
     @property
-    def catalogs(self) -> CatalogsAPI:
+    def catalogs(self) -> service.catalog.CatalogsAPI:
         """A catalog is the first layer of Unity Catalog’s three-level namespace."""
         return self._catalogs
 
     @property
-    def clean_room_assets(self) -> CleanRoomAssetsAPI:
+    def clean_room_assets(self) -> service.cleanrooms.CleanRoomAssetsAPI:
         """Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the clean room."""
         return self._clean_room_assets
 
     @property
-    def clean_room_task_runs(self) -> CleanRoomTaskRunsAPI:
+    def clean_room_task_runs(self) -> service.cleanrooms.CleanRoomTaskRunsAPI:
         """Clean room task runs are the executions of notebooks in a clean room."""
         return self._clean_room_task_runs
 
     @property
-    def clean_rooms(self) -> CleanRoomsAPI:
+    def clean_rooms(self) -> service.cleanrooms.CleanRoomsAPI:
         """A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each other’s data."""
         return self._clean_rooms
 
     @property
-    def cluster_policies(self) -> ClusterPoliciesAPI:
+    def cluster_policies(self) -> service.compute.ClusterPoliciesAPI:
         """You can use cluster policies to control users' ability to configure clusters based on a set of rules."""
         return self._cluster_policies
 
@@ -360,67 +365,67 @@ def clusters(self) -> ClustersExt:
         return self._clusters
 
     @property
-    def command_execution(self) -> CommandExecutionAPI:
+    def command_execution(self) -> service.compute.CommandExecutionAPI:
         """This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters."""
         return self._command_execution
 
     @property
-    def connections(self) -> ConnectionsAPI:
+    def connections(self) -> service.catalog.ConnectionsAPI:
         """Connections allow for creating a connection to an external data source."""
         return self._connections
 
     @property
-    def consumer_fulfillments(self) -> ConsumerFulfillmentsAPI:
+    def consumer_fulfillments(self) -> service.marketplace.ConsumerFulfillmentsAPI:
         """Fulfillments are entities that allow consumers to preview installations."""
         return self._consumer_fulfillments
 
     @property
-    def consumer_installations(self) -> ConsumerInstallationsAPI:
+    def consumer_installations(self) -> service.marketplace.ConsumerInstallationsAPI:
         """Installations are entities that allow consumers to interact with Databricks Marketplace listings."""
         return self._consumer_installations
 
     @property
-    def consumer_listings(self) -> ConsumerListingsAPI:
+    def consumer_listings(self) -> service.marketplace.ConsumerListingsAPI:
         """Listings are the core entities in the Marketplace."""
         return self._consumer_listings
 
     @property
-    def consumer_personalization_requests(self) -> ConsumerPersonalizationRequestsAPI:
+    def consumer_personalization_requests(self) -> service.marketplace.ConsumerPersonalizationRequestsAPI:
         """Personalization Requests allow customers to interact with the individualized Marketplace listing flow."""
         return self._consumer_personalization_requests
 
     @property
-    def consumer_providers(self) -> ConsumerProvidersAPI:
+    def consumer_providers(self) -> service.marketplace.ConsumerProvidersAPI:
         """Providers are the entities that publish listings to the Marketplace."""
         return self._consumer_providers
 
     @property
-    def credentials(self) -> CredentialsAPI:
+    def credentials(self) -> service.catalog.CredentialsAPI:
         """A credential represents an authentication and authorization mechanism for accessing services on your cloud tenant."""
         return self._credentials
 
     @property
-    def credentials_manager(self) -> CredentialsManagerAPI:
+    def credentials_manager(self) -> service.settings.CredentialsManagerAPI:
         """Credentials manager interacts with with Identity Providers to to perform token exchanges using stored credentials and refresh tokens."""
         return self._credentials_manager
 
     @property
-    def current_user(self) -> CurrentUserAPI:
+    def current_user(self) -> service.iam.CurrentUserAPI:
         """This API allows retrieving information about currently authenticated user or service principal."""
         return self._current_user
 
     @property
-    def dashboard_widgets(self) -> DashboardWidgetsAPI:
+    def dashboard_widgets(self) -> service.sql.DashboardWidgetsAPI:
         """This is an evolving API that facilitates the addition and removal of widgets from existing dashboards within the Databricks Workspace."""
         return self._dashboard_widgets
 
     @property
-    def dashboards(self) -> DashboardsAPI:
+    def dashboards(self) -> service.sql.DashboardsAPI:
         """In general, there is little need to modify dashboards using the API."""
         return self._dashboards
 
     @property
-    def data_sources(self) -> DataSourcesAPI:
+    def data_sources(self) -> service.sql.DataSourcesAPI:
         """This API is provided to assist you in making new query objects."""
         return self._data_sources
 
@@ -430,67 +435,67 @@ def dbfs(self) -> DbfsExt:
         return self._dbfs
 
     @property
-    def dbsql_permissions(self) -> DbsqlPermissionsAPI:
+    def dbsql_permissions(self) -> service.sql.DbsqlPermissionsAPI:
         """The SQL Permissions API is similar to the endpoints of the :method:permissions/set."""
         return self._dbsql_permissions
 
     @property
-    def experiments(self) -> ExperimentsAPI:
+    def experiments(self) -> service.ml.ExperimentsAPI:
         """Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment."""
         return self._experiments
 
     @property
-    def external_locations(self) -> ExternalLocationsAPI:
+    def external_locations(self) -> service.catalog.ExternalLocationsAPI:
         """An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path."""
         return self._external_locations
 
     @property
-    def files(self) -> FilesAPI:
+    def files(self) -> service.files.FilesAPI:
         """The Files API is a standard HTTP API that allows you to read, write, list, and delete files and directories by referring to their URI."""
         return self._files
 
     @property
-    def functions(self) -> FunctionsAPI:
+    def functions(self) -> service.catalog.FunctionsAPI:
         """Functions implement User-Defined Functions (UDFs) in Unity Catalog."""
         return self._functions
 
     @property
-    def genie(self) -> GenieAPI:
+    def genie(self) -> service.dashboards.GenieAPI:
         """Genie provides a no-code experience for business users, powered by AI/BI."""
         return self._genie
 
     @property
-    def git_credentials(self) -> GitCredentialsAPI:
+    def git_credentials(self) -> service.workspace.GitCredentialsAPI:
         """Registers personal access token for Databricks to do operations on behalf of the user."""
         return self._git_credentials
 
     @property
-    def global_init_scripts(self) -> GlobalInitScriptsAPI:
+    def global_init_scripts(self) -> service.compute.GlobalInitScriptsAPI:
         """The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace."""
         return self._global_init_scripts
 
     @property
-    def grants(self) -> GrantsAPI:
+    def grants(self) -> service.catalog.GrantsAPI:
         """In Unity Catalog, data is secure by default."""
         return self._grants
 
     @property
-    def groups(self) -> GroupsAPI:
+    def groups(self) -> service.iam.GroupsAPI:
         """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects."""
         return self._groups
 
     @property
-    def instance_pools(self) -> InstancePoolsAPI:
+    def instance_pools(self) -> service.compute.InstancePoolsAPI:
         """Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times."""
         return self._instance_pools
 
     @property
-    def instance_profiles(self) -> InstanceProfilesAPI:
+    def instance_profiles(self) -> service.compute.InstanceProfilesAPI:
         """The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with."""
         return self._instance_profiles
 
     @property
-    def ip_access_lists(self) -> IpAccessListsAPI:
+    def ip_access_lists(self) -> service.settings.IpAccessListsAPI:
         """IP Access List enables admins to configure IP access lists."""
         return self._ip_access_lists
 
@@ -500,177 +505,178 @@ def jobs(self) -> JobsExt:
         return self._jobs
 
     @property
-    def lakeview(self) -> LakeviewAPI:
+    def lakeview(self) -> service.dashboards.LakeviewAPI:
         """These APIs provide specific management operations for Lakeview dashboards."""
         return self._lakeview
 
     @property
-    def libraries(self) -> LibrariesAPI:
+    def libraries(self) -> service.compute.LibrariesAPI:
         """The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster."""
         return self._libraries
 
     @property
-    def metastores(self) -> MetastoresAPI:
+    def metastores(self) -> service.catalog.MetastoresAPI:
         """A metastore is the top-level container of objects in Unity Catalog."""
         return self._metastores
 
     @property
-    def model_registry(self) -> ModelRegistryAPI:
+    def model_registry(self) -> service.ml.ModelRegistryAPI:
         """Note: This API reference documents APIs for the Workspace Model Registry."""
         return self._model_registry
 
     @property
-    def model_versions(self) -> ModelVersionsAPI:
+    def model_versions(self) -> service.catalog.ModelVersionsAPI:
         """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog."""
         return self._model_versions
 
     @property
-    def notification_destinations(self) -> NotificationDestinationsAPI:
+    def notification_destinations(self) -> service.settings.NotificationDestinationsAPI:
         """The notification destinations API lets you programmatically manage a workspace's notification destinations."""
         return self._notification_destinations
 
     @property
-    def online_tables(self) -> OnlineTablesAPI:
+    def online_tables(self) -> service.catalog.OnlineTablesAPI:
         """Online tables provide lower latency and higher QPS access to data from Delta tables."""
         return self._online_tables
 
     @property
-    def permission_migration(self) -> PermissionMigrationAPI:
+    def permission_migration(self) -> service.iam.PermissionMigrationAPI:
         """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx."""
         return self._permission_migration
 
     @property
-    def permissions(self) -> PermissionsAPI:
+    def permissions(self) -> service.iam.PermissionsAPI:
         """Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints."""
         return self._permissions
 
     @property
-    def pipelines(self) -> PipelinesAPI:
+    def pipelines(self) -> service.pipelines.PipelinesAPI:
         """The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines."""
         return self._pipelines
 
     @property
-    def policy_compliance_for_clusters(self) -> PolicyComplianceForClustersAPI:
+    def policy_compliance_for_clusters(self) -> service.compute.PolicyComplianceForClustersAPI:
         """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace."""
         return self._policy_compliance_for_clusters
 
     @property
-    def policy_compliance_for_jobs(self) -> PolicyComplianceForJobsAPI:
+    def policy_compliance_for_jobs(self) -> service.jobs.PolicyComplianceForJobsAPI:
         """The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace."""
         return self._policy_compliance_for_jobs
 
     @property
-    def policy_families(self) -> PolicyFamiliesAPI:
+    def policy_families(self) -> service.compute.PolicyFamiliesAPI:
         """View available policy families."""
         return self._policy_families
 
     @property
-    def provider_exchange_filters(self) -> ProviderExchangeFiltersAPI:
+    def provider_exchange_filters(self) -> service.marketplace.ProviderExchangeFiltersAPI:
         """Marketplace exchanges filters curate which groups can access an exchange."""
         return self._provider_exchange_filters
 
     @property
-    def provider_exchanges(self) -> ProviderExchangesAPI:
+    def provider_exchanges(self) -> service.marketplace.ProviderExchangesAPI:
         """Marketplace exchanges allow providers to share their listings with a curated set of customers."""
         return self._provider_exchanges
 
     @property
-    def provider_files(self) -> ProviderFilesAPI:
+    def provider_files(self) -> service.marketplace.ProviderFilesAPI:
         """Marketplace offers a set of file APIs for various purposes such as preview notebooks and provider icons."""
         return self._provider_files
 
     @property
-    def provider_listings(self) -> ProviderListingsAPI:
+    def provider_listings(self) -> service.marketplace.ProviderListingsAPI:
         """Listings are the core entities in the Marketplace."""
         return self._provider_listings
 
     @property
-    def provider_personalization_requests(self) -> ProviderPersonalizationRequestsAPI:
+    def provider_personalization_requests(self) -> service.marketplace.ProviderPersonalizationRequestsAPI:
         """Personalization requests are an alternate to instantly available listings."""
         return self._provider_personalization_requests
 
     @property
-    def provider_provider_analytics_dashboards(self) -> ProviderProviderAnalyticsDashboardsAPI:
+    def provider_provider_analytics_dashboards(
+            self) -> service.marketplace.ProviderProviderAnalyticsDashboardsAPI:
         """Manage templated analytics solution for providers."""
         return self._provider_provider_analytics_dashboards
 
     @property
-    def provider_providers(self) -> ProviderProvidersAPI:
+    def provider_providers(self) -> service.marketplace.ProviderProvidersAPI:
         """Providers are entities that manage assets in Marketplace."""
         return self._provider_providers
 
     @property
-    def providers(self) -> ProvidersAPI:
+    def providers(self) -> service.sharing.ProvidersAPI:
         """A data provider is an object representing the organization in the real world who shares the data."""
         return self._providers
 
     @property
-    def quality_monitors(self) -> QualityMonitorsAPI:
+    def quality_monitors(self) -> service.catalog.QualityMonitorsAPI:
         """A monitor computes and monitors data or model quality metrics for a table over time."""
         return self._quality_monitors
 
     @property
-    def queries(self) -> QueriesAPI:
+    def queries(self) -> service.sql.QueriesAPI:
         """The queries API can be used to perform CRUD operations on queries."""
         return self._queries
 
     @property
-    def queries_legacy(self) -> QueriesLegacyAPI:
+    def queries_legacy(self) -> service.sql.QueriesLegacyAPI:
         """These endpoints are used for CRUD operations on query definitions."""
         return self._queries_legacy
 
     @property
-    def query_history(self) -> QueryHistoryAPI:
+    def query_history(self) -> service.sql.QueryHistoryAPI:
         """A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute."""
         return self._query_history
 
     @property
-    def query_visualizations(self) -> QueryVisualizationsAPI:
+    def query_visualizations(self) -> service.sql.QueryVisualizationsAPI:
         """This is an evolving API that facilitates the addition and removal of visualizations from existing queries in the Databricks Workspace."""
         return self._query_visualizations
 
     @property
-    def query_visualizations_legacy(self) -> QueryVisualizationsLegacyAPI:
+    def query_visualizations_legacy(self) -> service.sql.QueryVisualizationsLegacyAPI:
         """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace."""
         return self._query_visualizations_legacy
 
     @property
-    def recipient_activation(self) -> RecipientActivationAPI:
+    def recipient_activation(self) -> service.sharing.RecipientActivationAPI:
         """The Recipient Activation API is only applicable in the open sharing model where the recipient object has the authentication type of `TOKEN`."""
         return self._recipient_activation
 
     @property
-    def recipients(self) -> RecipientsAPI:
+    def recipients(self) -> service.sharing.RecipientsAPI:
         """A recipient is an object you create using :method:recipients/create to represent an organization which you want to allow access shares."""
         return self._recipients
 
     @property
-    def registered_models(self) -> RegisteredModelsAPI:
+    def registered_models(self) -> service.catalog.RegisteredModelsAPI:
         """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog."""
         return self._registered_models
 
     @property
-    def repos(self) -> ReposAPI:
+    def repos(self) -> service.workspace.ReposAPI:
         """The Repos API allows users to manage their git repos."""
         return self._repos
 
     @property
-    def resource_quotas(self) -> ResourceQuotasAPI:
+    def resource_quotas(self) -> service.catalog.ResourceQuotasAPI:
         """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created."""
         return self._resource_quotas
 
     @property
-    def schemas(self) -> SchemasAPI:
+    def schemas(self) -> service.catalog.SchemasAPI:
         """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace."""
         return self._schemas
 
     @property
-    def secrets(self) -> SecretsAPI:
+    def secrets(self) -> service.workspace.SecretsAPI:
         """The Secrets API allows you to manage secrets, secret scopes, and access permissions."""
         return self._secrets
 
     @property
-    def service_principals(self) -> ServicePrincipalsAPI:
+    def service_principals(self) -> service.iam.ServicePrincipalsAPI:
         """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms."""
         return self._service_principals
 
@@ -680,82 +686,82 @@ def serving_endpoints(self) -> ServingEndpointsExt:
         return self._serving_endpoints
 
     @property
-    def serving_endpoints_data_plane(self) -> ServingEndpointsDataPlaneAPI:
+    def serving_endpoints_data_plane(self) -> service.serving.ServingEndpointsDataPlaneAPI:
         """Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving endpoints service."""
         return self._serving_endpoints_data_plane
 
     @property
-    def settings(self) -> SettingsAPI:
+    def settings(self) -> service.settings.SettingsAPI:
         """Workspace Settings API allows users to manage settings at the workspace level."""
         return self._settings
 
     @property
-    def shares(self) -> SharesAPI:
+    def shares(self) -> service.sharing.SharesAPI:
         """A share is a container instantiated with :method:shares/create."""
         return self._shares
 
     @property
-    def statement_execution(self) -> StatementExecutionAPI:
+    def statement_execution(self) -> service.sql.StatementExecutionAPI:
         """The Databricks SQL Statement Execution API can be used to execute SQL statements on a SQL warehouse and fetch the result."""
         return self._statement_execution
 
     @property
-    def storage_credentials(self) -> StorageCredentialsAPI:
+    def storage_credentials(self) -> service.catalog.StorageCredentialsAPI:
         """A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant."""
         return self._storage_credentials
 
     @property
-    def system_schemas(self) -> SystemSchemasAPI:
+    def system_schemas(self) -> service.catalog.SystemSchemasAPI:
         """A system schema is a schema that lives within the system catalog."""
         return self._system_schemas
 
     @property
-    def table_constraints(self) -> TableConstraintsAPI:
+    def table_constraints(self) -> service.catalog.TableConstraintsAPI:
         """Primary key and foreign key constraints encode relationships between fields in tables."""
         return self._table_constraints
 
     @property
-    def tables(self) -> TablesAPI:
+    def tables(self) -> service.catalog.TablesAPI:
         """A table resides in the third layer of Unity Catalog’s three-level namespace."""
         return self._tables
 
     @property
-    def temporary_table_credentials(self) -> TemporaryTableCredentialsAPI:
+    def temporary_table_credentials(self) -> service.catalog.TemporaryTableCredentialsAPI:
         """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage locationswhere table data is stored in Databricks."""
         return self._temporary_table_credentials
 
     @property
-    def token_management(self) -> TokenManagementAPI:
+    def token_management(self) -> service.settings.TokenManagementAPI:
         """Enables administrators to get all tokens and delete tokens for other users."""
         return self._token_management
 
     @property
-    def tokens(self) -> TokensAPI:
+    def tokens(self) -> service.settings.TokensAPI:
         """The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs."""
         return self._tokens
 
     @property
-    def users(self) -> UsersAPI:
+    def users(self) -> service.iam.UsersAPI:
         """User identities recognized by Databricks and represented by email addresses."""
         return self._users
 
     @property
-    def vector_search_endpoints(self) -> VectorSearchEndpointsAPI:
+    def vector_search_endpoints(self) -> service.vectorsearch.VectorSearchEndpointsAPI:
         """**Endpoint**: Represents the compute resources to host vector search indexes."""
         return self._vector_search_endpoints
 
     @property
-    def vector_search_indexes(self) -> VectorSearchIndexesAPI:
+    def vector_search_indexes(self) -> service.vectorsearch.VectorSearchIndexesAPI:
         """**Index**: An efficient representation of your embedding vectors that supports real-time and efficient approximate nearest neighbor (ANN) search queries."""
         return self._vector_search_indexes
 
     @property
-    def volumes(self) -> VolumesAPI:
+    def volumes(self) -> service.catalog.VolumesAPI:
         """Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files."""
         return self._volumes
 
     @property
-    def warehouses(self) -> WarehousesAPI:
+    def warehouses(self) -> service.sql.WarehousesAPI:
         """A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL."""
         return self._warehouses
 
@@ -765,12 +771,12 @@ def workspace(self) -> WorkspaceExt:
         return self._workspace
 
     @property
-    def workspace_bindings(self) -> WorkspaceBindingsAPI:
+    def workspace_bindings(self) -> service.catalog.WorkspaceBindingsAPI:
         """A securable in Databricks can be configured as __OPEN__ or __ISOLATED__."""
         return self._workspace_bindings
 
     @property
-    def workspace_conf(self) -> WorkspaceConfAPI:
+    def workspace_conf(self) -> service.settings.WorkspaceConfAPI:
         """This API allows updating known workspace settings for advanced users."""
         return self._workspace_conf
 
@@ -844,35 +850,36 @@ def __init__(self,
                                    product_version=product_version)
         self._config = config.copy()
         self._api_client = client.ApiClient(self._config)
-        self._access_control = AccountAccessControlAPI(self._api_client)
-        self._billable_usage = BillableUsageAPI(self._api_client)
-        self._budget_policy = BudgetPolicyAPI(self._api_client)
-        self._credentials = CredentialsAPI(self._api_client)
-        self._custom_app_integration = CustomAppIntegrationAPI(self._api_client)
-        self._encryption_keys = EncryptionKeysAPI(self._api_client)
-        self._federation_policy = AccountFederationPolicyAPI(self._api_client)
-        self._groups = AccountGroupsAPI(self._api_client)
-        self._ip_access_lists = AccountIpAccessListsAPI(self._api_client)
-        self._log_delivery = LogDeliveryAPI(self._api_client)
-        self._metastore_assignments = AccountMetastoreAssignmentsAPI(self._api_client)
-        self._metastores = AccountMetastoresAPI(self._api_client)
-        self._network_connectivity = NetworkConnectivityAPI(self._api_client)
-        self._networks = NetworksAPI(self._api_client)
-        self._o_auth_published_apps = OAuthPublishedAppsAPI(self._api_client)
-        self._private_access = PrivateAccessAPI(self._api_client)
-        self._published_app_integration = PublishedAppIntegrationAPI(self._api_client)
-        self._service_principal_federation_policy = ServicePrincipalFederationPolicyAPI(self._api_client)
-        self._service_principal_secrets = ServicePrincipalSecretsAPI(self._api_client)
-        self._service_principals = AccountServicePrincipalsAPI(self._api_client)
-        self._settings = AccountSettingsAPI(self._api_client)
-        self._storage = StorageAPI(self._api_client)
-        self._storage_credentials = AccountStorageCredentialsAPI(self._api_client)
-        self._usage_dashboards = UsageDashboardsAPI(self._api_client)
-        self._users = AccountUsersAPI(self._api_client)
-        self._vpc_endpoints = VpcEndpointsAPI(self._api_client)
-        self._workspace_assignment = WorkspaceAssignmentAPI(self._api_client)
-        self._workspaces = WorkspacesAPI(self._api_client)
-        self._budgets = BudgetsAPI(self._api_client)
+        self._access_control = service.iam.AccountAccessControlAPI(self._api_client)
+        self._billable_usage = service.billing.BillableUsageAPI(self._api_client)
+        self._budget_policy = service.billing.BudgetPolicyAPI(self._api_client)
+        self._credentials = service.provisioning.CredentialsAPI(self._api_client)
+        self._custom_app_integration = service.oauth2.CustomAppIntegrationAPI(self._api_client)
+        self._encryption_keys = service.provisioning.EncryptionKeysAPI(self._api_client)
+        self._federation_policy = service.oauth2.AccountFederationPolicyAPI(self._api_client)
+        self._groups = service.iam.AccountGroupsAPI(self._api_client)
+        self._ip_access_lists = service.settings.AccountIpAccessListsAPI(self._api_client)
+        self._log_delivery = service.billing.LogDeliveryAPI(self._api_client)
+        self._metastore_assignments = service.catalog.AccountMetastoreAssignmentsAPI(self._api_client)
+        self._metastores = service.catalog.AccountMetastoresAPI(self._api_client)
+        self._network_connectivity = service.settings.NetworkConnectivityAPI(self._api_client)
+        self._networks = service.provisioning.NetworksAPI(self._api_client)
+        self._o_auth_published_apps = service.oauth2.OAuthPublishedAppsAPI(self._api_client)
+        self._private_access = service.provisioning.PrivateAccessAPI(self._api_client)
+        self._published_app_integration = service.oauth2.PublishedAppIntegrationAPI(self._api_client)
+        self._service_principal_federation_policy = service.oauth2.ServicePrincipalFederationPolicyAPI(
+            self._api_client)
+        self._service_principal_secrets = service.oauth2.ServicePrincipalSecretsAPI(self._api_client)
+        self._service_principals = service.iam.AccountServicePrincipalsAPI(self._api_client)
+        self._settings = service.settings.AccountSettingsAPI(self._api_client)
+        self._storage = service.provisioning.StorageAPI(self._api_client)
+        self._storage_credentials = service.catalog.AccountStorageCredentialsAPI(self._api_client)
+        self._usage_dashboards = service.billing.UsageDashboardsAPI(self._api_client)
+        self._users = service.iam.AccountUsersAPI(self._api_client)
+        self._vpc_endpoints = service.provisioning.VpcEndpointsAPI(self._api_client)
+        self._workspace_assignment = service.iam.WorkspaceAssignmentAPI(self._api_client)
+        self._workspaces = service.provisioning.WorkspacesAPI(self._api_client)
+        self._budgets = service.billing.BudgetsAPI(self._api_client)
 
     @property
     def config(self) -> client.Config:
@@ -883,147 +890,147 @@ def api_client(self) -> client.ApiClient:
         return self._api_client
 
     @property
-    def access_control(self) -> AccountAccessControlAPI:
+    def access_control(self) -> service.iam.AccountAccessControlAPI:
         """These APIs manage access rules on resources in an account."""
         return self._access_control
 
     @property
-    def billable_usage(self) -> BillableUsageAPI:
+    def billable_usage(self) -> service.billing.BillableUsageAPI:
         """This API allows you to download billable usage logs for the specified account and date range."""
         return self._billable_usage
 
     @property
-    def budget_policy(self) -> BudgetPolicyAPI:
+    def budget_policy(self) -> service.billing.BudgetPolicyAPI:
         """A service serves REST API about Budget policies."""
         return self._budget_policy
 
     @property
-    def credentials(self) -> CredentialsAPI:
+    def credentials(self) -> service.provisioning.CredentialsAPI:
         """These APIs manage credential configurations for this workspace."""
         return self._credentials
 
     @property
-    def custom_app_integration(self) -> CustomAppIntegrationAPI:
+    def custom_app_integration(self) -> service.oauth2.CustomAppIntegrationAPI:
         """These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud."""
         return self._custom_app_integration
 
     @property
-    def encryption_keys(self) -> EncryptionKeysAPI:
+    def encryption_keys(self) -> service.provisioning.EncryptionKeysAPI:
         """These APIs manage encryption key configurations for this workspace (optional)."""
         return self._encryption_keys
 
     @property
-    def federation_policy(self) -> AccountFederationPolicyAPI:
+    def federation_policy(self) -> service.oauth2.AccountFederationPolicyAPI:
         """These APIs manage account federation policies."""
         return self._federation_policy
 
     @property
-    def groups(self) -> AccountGroupsAPI:
+    def groups(self) -> service.iam.AccountGroupsAPI:
         """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects."""
         return self._groups
 
     @property
-    def ip_access_lists(self) -> AccountIpAccessListsAPI:
+    def ip_access_lists(self) -> service.settings.AccountIpAccessListsAPI:
         """The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console."""
         return self._ip_access_lists
 
     @property
-    def log_delivery(self) -> LogDeliveryAPI:
+    def log_delivery(self) -> service.billing.LogDeliveryAPI:
         """These APIs manage log delivery configurations for this account."""
         return self._log_delivery
 
     @property
-    def metastore_assignments(self) -> AccountMetastoreAssignmentsAPI:
+    def metastore_assignments(self) -> service.catalog.AccountMetastoreAssignmentsAPI:
         """These APIs manage metastore assignments to a workspace."""
         return self._metastore_assignments
 
     @property
-    def metastores(self) -> AccountMetastoresAPI:
+    def metastores(self) -> service.catalog.AccountMetastoresAPI:
         """These APIs manage Unity Catalog metastores for an account."""
         return self._metastores
 
     @property
-    def network_connectivity(self) -> NetworkConnectivityAPI:
+    def network_connectivity(self) -> service.settings.NetworkConnectivityAPI:
         """These APIs provide configurations for the network connectivity of your workspaces for serverless compute resources."""
         return self._network_connectivity
 
     @property
-    def networks(self) -> NetworksAPI:
+    def networks(self) -> service.provisioning.NetworksAPI:
         """These APIs manage network configurations for customer-managed VPCs (optional)."""
         return self._networks
 
     @property
-    def o_auth_published_apps(self) -> OAuthPublishedAppsAPI:
+    def o_auth_published_apps(self) -> service.oauth2.OAuthPublishedAppsAPI:
         """These APIs enable administrators to view all the available published OAuth applications in Databricks."""
         return self._o_auth_published_apps
 
     @property
-    def private_access(self) -> PrivateAccessAPI:
+    def private_access(self) -> service.provisioning.PrivateAccessAPI:
         """These APIs manage private access settings for this account."""
         return self._private_access
 
     @property
-    def published_app_integration(self) -> PublishedAppIntegrationAPI:
+    def published_app_integration(self) -> service.oauth2.PublishedAppIntegrationAPI:
         """These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud."""
         return self._published_app_integration
 
     @property
-    def service_principal_federation_policy(self) -> ServicePrincipalFederationPolicyAPI:
+    def service_principal_federation_policy(self) -> service.oauth2.ServicePrincipalFederationPolicyAPI:
         """These APIs manage service principal federation policies."""
         return self._service_principal_federation_policy
 
     @property
-    def service_principal_secrets(self) -> ServicePrincipalSecretsAPI:
+    def service_principal_secrets(self) -> service.oauth2.ServicePrincipalSecretsAPI:
         """These APIs enable administrators to manage service principal secrets."""
         return self._service_principal_secrets
 
     @property
-    def service_principals(self) -> AccountServicePrincipalsAPI:
+    def service_principals(self) -> service.iam.AccountServicePrincipalsAPI:
         """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms."""
         return self._service_principals
 
     @property
-    def settings(self) -> AccountSettingsAPI:
+    def settings(self) -> service.settings.AccountSettingsAPI:
         """Accounts Settings API allows users to manage settings at the account level."""
         return self._settings
 
     @property
-    def storage(self) -> StorageAPI:
+    def storage(self) -> service.provisioning.StorageAPI:
         """These APIs manage storage configurations for this workspace."""
         return self._storage
 
     @property
-    def storage_credentials(self) -> AccountStorageCredentialsAPI:
+    def storage_credentials(self) -> service.catalog.AccountStorageCredentialsAPI:
         """These APIs manage storage credentials for a particular metastore."""
         return self._storage_credentials
 
     @property
-    def usage_dashboards(self) -> UsageDashboardsAPI:
+    def usage_dashboards(self) -> service.billing.UsageDashboardsAPI:
         """These APIs manage usage dashboards for this account."""
         return self._usage_dashboards
 
     @property
-    def users(self) -> AccountUsersAPI:
+    def users(self) -> service.iam.AccountUsersAPI:
         """User identities recognized by Databricks and represented by email addresses."""
         return self._users
 
     @property
-    def vpc_endpoints(self) -> VpcEndpointsAPI:
+    def vpc_endpoints(self) -> service.provisioning.VpcEndpointsAPI:
         """These APIs manage VPC endpoint configurations for this account."""
         return self._vpc_endpoints
 
     @property
-    def workspace_assignment(self) -> WorkspaceAssignmentAPI:
+    def workspace_assignment(self) -> service.iam.WorkspaceAssignmentAPI:
         """The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account."""
         return self._workspace_assignment
 
     @property
-    def workspaces(self) -> WorkspacesAPI:
+    def workspaces(self) -> service.provisioning.WorkspacesAPI:
         """These APIs manage workspaces for this account."""
         return self._workspaces
 
     @property
-    def budgets(self) -> BudgetsAPI:
+    def budgets(self) -> service.billing.BudgetsAPI:
         """These APIs manage budget configurations for this account."""
         return self._budgets
 
diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py
index 9f86a39e2..ccd8b38ef 100644
--- a/databricks/sdk/version.py
+++ b/databricks/sdk/version.py
@@ -1 +1 @@
-__version__ = '0.41.0'
+__version__ = '0.42.0'
diff --git a/docs/workspace/catalog/credentials.rst b/docs/workspace/catalog/credentials.rst
index 54b55516b..3927e6351 100644
--- a/docs/workspace/catalog/credentials.rst
+++ b/docs/workspace/catalog/credentials.rst
@@ -4,65 +4,190 @@
 
 .. py:class:: CredentialsAPI
 
-    These APIs manage credential configurations for this workspace. Databricks needs access to a cross-account
-    service IAM role in your AWS account so that Databricks can deploy clusters in the appropriate VPC for the
-    new workspace. A credential configuration encapsulates this role information, and its ID is used when
-    creating a new workspace.
+    A credential represents an authentication and authorization mechanism for accessing services on your cloud
+    tenant. Each credential is subject to Unity Catalog access-control policies that control which users and
+    groups can access the credential.
+    
+    To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE CREDENTIAL`
+    privilege. The user who creates the credential can delegate ownership to another user or group to manage
+    permissions on it.
 
-    .. py:method:: create(credentials_name: str, aws_credentials: CreateCredentialAwsCredentials) -> Credential
+    .. py:method:: create_credential(name: str [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], azure_service_principal: Optional[AzureServicePrincipal], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], purpose: Optional[CredentialPurpose], read_only: Optional[bool], skip_validation: Optional[bool]]) -> CredentialInfo
 
-        Create credential configuration.
+        Create a credential.
+        
+        Creates a new credential. The type of credential to be created is determined by the **purpose** field,
+        which should be either **SERVICE** or **STORAGE**.
+        
+        The caller must be a metastore admin or have the metastore privilege **CREATE_STORAGE_CREDENTIAL** for
+        storage credentials, or **CREATE_SERVICE_CREDENTIAL** for service credentials.
+        
+        :param name: str
+          The credential name. The name must be unique among storage and service credentials within the
+          metastore.
+        :param aws_iam_role: :class:`AwsIamRole` (optional)
+          The AWS IAM role configuration
+        :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
+          The Azure managed identity configuration.
+        :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
+          The Azure service principal configuration. Only applicable when purpose is **STORAGE**.
+        :param comment: str (optional)
+          Comment associated with the credential.
+        :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional)
+          GCP long-lived credential. Databricks-created Google Cloud Storage service account.
+        :param purpose: :class:`CredentialPurpose` (optional)
+          Indicates the purpose of the credential.
+        :param read_only: bool (optional)
+          Whether the credential is usable only for read operations. Only applicable when purpose is
+          **STORAGE**.
+        :param skip_validation: bool (optional)
+          Optional. Supplying true to this argument skips validation of the created set of credentials.
+        
+        :returns: :class:`CredentialInfo`
         
-        Creates a Databricks credential configuration that represents cloud cross-account credentials for a
-        specified account. Databricks uses this to set up network infrastructure properly to host Databricks
-        clusters. For your AWS IAM role, you need to trust the External ID (the Databricks Account API account
-        ID) in the returned credential object, and configure the required access policy.
-        
-        Save the response's `credentials_id` field, which is the ID for your new credential configuration
-        object.
+
+    .. py:method:: delete_credential(name_arg: str [, force: Optional[bool]])
+
+        Delete a credential.
         
-        For information about how to create a new workspace with this API, see [Create a new workspace using
-        the Account API]
+        Deletes a service or storage credential from the metastore. The caller must be an owner of the
+        credential.
         
-        [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
+        :param name_arg: str
+          Name of the credential.
+        :param force: bool (optional)
+          Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
+          external locations and external tables (when purpose is **STORAGE**).
         
-        :param credentials_name: str
-          The human-readable name of the credential configuration object.
-        :param aws_credentials: :class:`CreateCredentialAwsCredentials`
         
-        :returns: :class:`Credential`
         
 
-    .. py:method:: delete(credentials_id: str)
+    .. py:method:: generate_temporary_service_credential(credential_name: str [, azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions], gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions]]) -> TemporaryCredentials
 
-        Delete credential configuration.
+        Generate a temporary service credential.
         
-        Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot
-        delete a credential that is associated with any workspace.
-        
-        :param credentials_id: str
-          Databricks Account API credential configuration ID
+        Returns a set of temporary credentials generated using the specified service credential. The caller
+        must be a metastore admin or have the metastore privilege **ACCESS** on the service credential.
         
+        :param credential_name: str
+          The name of the service credential used to generate a temporary credential
+        :param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional)
+          The Azure cloud options to customize the requested temporary credential
+        :param gcp_options: :class:`GenerateTemporaryServiceCredentialGcpOptions` (optional)
+          The GCP cloud options to customize the requested temporary credential
         
+        :returns: :class:`TemporaryCredentials`
         
 
-    .. py:method:: get(credentials_id: str) -> Credential
+    .. py:method:: get_credential(name_arg: str) -> CredentialInfo
 
-        Get credential configuration.
+        Get a credential.
         
-        Gets a Databricks credential configuration object for an account, both specified by ID.
+        Gets a service or storage credential from the metastore. The caller must be a metastore admin, the
+        owner of the credential, or have any permission on the credential.
         
-        :param credentials_id: str
-          Databricks Account API credential configuration ID
+        :param name_arg: str
+          Name of the credential.
         
-        :returns: :class:`Credential`
+        :returns: :class:`CredentialInfo`
         
 
-    .. py:method:: list() -> Iterator[Credential]
+    .. py:method:: list_credentials( [, max_results: Optional[int], page_token: Optional[str], purpose: Optional[CredentialPurpose]]) -> Iterator[CredentialInfo]
 
-        Get all credential configurations.
+        List credentials.
+        
+        Gets an array of credentials (as __CredentialInfo__ objects).
+        
+        The array is limited to only the credentials that the caller has permission to access. If the caller
+        is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific
+        ordering of the elements in the array.
         
-        Gets all Databricks credential configurations associated with an account specified by ID.
+        :param max_results: int (optional)
+          Maximum number of credentials to return. - If not set, the default max page size is used. - When set
+          to a value greater than 0, the page length is the minimum of this value and a server-configured
+          value. - When set to 0, the page length is set to a server-configured value (recommended). - When
+          set to a value less than 0, an invalid parameter error is returned.
+        :param page_token: str (optional)
+          Opaque token to retrieve the next page of results.
+        :param purpose: :class:`CredentialPurpose` (optional)
+          Return only credentials for the specified purpose.
         
-        :returns: Iterator over :class:`Credential`
+        :returns: Iterator over :class:`CredentialInfo`
+        
+
+    .. py:method:: update_credential(name_arg: str [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], azure_service_principal: Optional[AzureServicePrincipal], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], force: Optional[bool], isolation_mode: Optional[IsolationMode], new_name: Optional[str], owner: Optional[str], read_only: Optional[bool], skip_validation: Optional[bool]]) -> CredentialInfo
+
+        Update a credential.
+        
+        Updates a service or storage credential on the metastore.
+        
+        The caller must be the owner of the credential or a metastore admin or have the `MANAGE` permission.
+        If the caller is a metastore admin, only the __owner__ field can be changed.
+        
+        :param name_arg: str
+          Name of the credential.
+        :param aws_iam_role: :class:`AwsIamRole` (optional)
+          The AWS IAM role configuration
+        :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
+          The Azure managed identity configuration.
+        :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
+          The Azure service principal configuration. Only applicable when purpose is **STORAGE**.
+        :param comment: str (optional)
+          Comment associated with the credential.
+        :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional)
+          GCP long-lived credential. Databricks-created Google Cloud Storage service account.
+        :param force: bool (optional)
+          Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
+          external locations and external tables (when purpose is **STORAGE**).
+        :param isolation_mode: :class:`IsolationMode` (optional)
+          Whether the current securable is accessible from all workspaces or a specific set of workspaces.
+        :param new_name: str (optional)
+          New name of credential.
+        :param owner: str (optional)
+          Username of current owner of credential.
+        :param read_only: bool (optional)
+          Whether the credential is usable only for read operations. Only applicable when purpose is
+          **STORAGE**.
+        :param skip_validation: bool (optional)
+          Supply true to this argument to skip validation of the updated credential.
+        
+        :returns: :class:`CredentialInfo`
+        
+
+    .. py:method:: validate_credential( [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], credential_name: Optional[str], external_location_name: Optional[str], purpose: Optional[CredentialPurpose], read_only: Optional[bool], url: Optional[str]]) -> ValidateCredentialResponse
+
+        Validate a credential.
+        
+        Validates a credential.
+        
+        For service credentials (purpose is **SERVICE**), either the __credential_name__ or the cloud-specific
+        credential must be provided.
+        
+        For storage credentials (purpose is **STORAGE**), at least one of __external_location_name__ and
+        __url__ need to be provided. If only one of them is provided, it will be used for validation. And if
+        both are provided, the __url__ will be used for validation, and __external_location_name__ will be
+        ignored when checking overlapping urls. Either the __credential_name__ or the cloud-specific
+        credential must be provided.
+        
+        The caller must be a metastore admin or the credential owner or have the required permission on the
+        metastore and the credential (e.g., **CREATE_EXTERNAL_LOCATION** when purpose is **STORAGE**).
+        
+        :param aws_iam_role: :class:`AwsIamRole` (optional)
+          The AWS IAM role configuration
+        :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
+          The Azure managed identity configuration.
+        :param credential_name: str (optional)
+          Required. The name of an existing credential or long-lived cloud credential to validate.
+        :param external_location_name: str (optional)
+          The name of an existing external location to validate. Only applicable for storage credentials
+          (purpose is **STORAGE**.)
+        :param purpose: :class:`CredentialPurpose` (optional)
+          The purpose of the credential. This should only be used when the credential is specified.
+        :param read_only: bool (optional)
+          Whether the credential is only usable for read operations. Only applicable for storage credentials
+          (purpose is **STORAGE**.)
+        :param url: str (optional)
+          The external location url to validate. Only applicable when purpose is **STORAGE**.
+        
+        :returns: :class:`ValidateCredentialResponse`
         
\ No newline at end of file

From b64ef183e4840a0ead31a7a5a9f10c505a91e66e Mon Sep 17 00:00:00 2001
From: Miles Yucht 
Date: Mon, 3 Feb 2025 18:15:54 +0100
Subject: [PATCH 094/136] [Release] Release v0.43.0 (#883)

### API Changes:

* Added
[w.lakeview_embedded](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/lakeview_embedded.html)
workspace-level service and
[w.query_execution](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_execution.html)
workspace-level service.
* Added
[w.redash_config](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/redash_config.html)
workspace-level service.
* Added `gcp_oauth_token` field for
`databricks.sdk.service.catalog.TemporaryCredentials`.
* Added `options` field for
`databricks.sdk.service.catalog.UpdateCatalog`.
 * Added `disabled` field for `databricks.sdk.service.jobs.RunTask`.

OpenAPI SHA: c72c58f97b950fcb924a90ef164bcb10cfcd5ece, Date: 2025-02-03
---
 .codegen/_openapi_sha                         |   2 +-
 CHANGELOG.md                                  |  12 +
 databricks/sdk/__init__.py                    | 506 +++++++++---------
 databricks/sdk/service/catalog.py             |  19 +-
 databricks/sdk/service/cleanrooms.py          |   5 +-
 databricks/sdk/service/dashboards.py          | 410 ++++++++++++++
 databricks/sdk/service/jobs.py                |   7 +
 databricks/sdk/service/settings.py            | 338 +++++++++---
 databricks/sdk/service/sql.py                 | 163 +++++-
 databricks/sdk/version.py                     |   2 +-
 .../settings/csp_enablement_account.rst       |  12 +-
 .../settings/disable_legacy_features.rst      |  12 +-
 .../settings/enable_ip_access_lists.rst       |  12 +-
 .../settings/esm_enablement_account.rst       |  12 +-
 docs/account/settings/personal_compute.rst    |  12 +-
 docs/dbdataclasses/dashboards.rst             |  44 ++
 docs/dbdataclasses/settings.rst               |   6 +
 docs/dbdataclasses/sql.rst                    |   4 +
 docs/workspace/catalog/catalogs.rst           |   4 +-
 docs/workspace/catalog/credentials.rst        | 199 ++-----
 docs/workspace/cleanrooms/clean_rooms.rst     |   5 +-
 docs/workspace/dashboards/index.rst           |   4 +-
 .../dashboards/lakeview_embedded.rst          |  19 +
 docs/workspace/dashboards/query_execution.rst |  46 ++
 docs/workspace/jobs/jobs.rst                  |   4 +-
 ...aibi_dashboard_embedding_access_policy.rst |  12 +-
 ...i_dashboard_embedding_approved_domains.rst |  12 +-
 .../settings/automatic_cluster_update.rst     |  12 +-
 .../settings/compliance_security_profile.rst  |  12 +-
 docs/workspace/settings/default_namespace.rst |  12 +-
 .../settings/disable_legacy_access.rst        |  12 +-
 .../settings/disable_legacy_dbfs.rst          |  12 +-
 .../settings/enhanced_security_monitoring.rst |  12 +-
 .../settings/restrict_workspace_admins.rst    |  12 +-
 docs/workspace/sql/alerts.rst                 |  12 +-
 docs/workspace/sql/index.rst                  |   1 +
 docs/workspace/sql/queries.rst                |  12 +-
 docs/workspace/sql/query_visualizations.rst   |  12 +-
 docs/workspace/sql/redash_config.rst          |  14 +
 39 files changed, 1446 insertions(+), 572 deletions(-)
 create mode 100644 docs/workspace/dashboards/lakeview_embedded.rst
 create mode 100644 docs/workspace/dashboards/query_execution.rst
 create mode 100644 docs/workspace/sql/redash_config.rst

diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 722bd2c6c..9a95107e8 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-840c660106f820a1a5dff931d51fa5f65cd9fdd9
\ No newline at end of file
+c72c58f97b950fcb924a90ef164bcb10cfcd5ece
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index b2b31f7a5..cd073f71c 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,17 @@
 # Version changelog
 
+## [Release] Release v0.43.0
+
+### API Changes:
+
+ * Added [w.lakeview_embedded](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/lakeview_embedded.html) workspace-level service and [w.query_execution](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_execution.html) workspace-level service.
+ * Added [w.redash_config](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/redash_config.html) workspace-level service.
+ * Added `gcp_oauth_token` field for `databricks.sdk.service.catalog.TemporaryCredentials`.
+ * Added `options` field for `databricks.sdk.service.catalog.UpdateCatalog`.
+ * Added `disabled` field for `databricks.sdk.service.jobs.RunTask`.
+
+OpenAPI SHA: c72c58f97b950fcb924a90ef164bcb10cfcd5ece, Date: 2025-02-03
+
 ### Bug Fixes
 
  * Fix docs generation when two services have the same name ([#872](https://github.com/databricks/databricks-sdk-py/pull/872)).
diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py
index 1892069c2..9f67adc59 100755
--- a/databricks/sdk/__init__.py
+++ b/databricks/sdk/__init__.py
@@ -5,7 +5,6 @@
 
 import databricks.sdk.core as client
 import databricks.sdk.dbutils as dbutils
-import databricks.sdk.service as service
 from databricks.sdk import azure
 from databricks.sdk.credentials_provider import CredentialsStrategy
 from databricks.sdk.mixins.compute import ClustersExt
@@ -43,7 +42,9 @@
                                             InstanceProfilesAPI, LibrariesAPI,
                                             PolicyComplianceForClustersAPI,
                                             PolicyFamiliesAPI)
-from databricks.sdk.service.dashboards import GenieAPI, LakeviewAPI
+from databricks.sdk.service.dashboards import (GenieAPI, LakeviewAPI,
+                                               LakeviewEmbeddedAPI,
+                                               QueryExecutionAPI)
 from databricks.sdk.service.files import DbfsAPI, FilesAPI
 from databricks.sdk.service.iam import (AccessControlAPI,
                                         AccountAccessControlAPI,
@@ -97,7 +98,8 @@
                                         QueryHistoryAPI,
                                         QueryVisualizationsAPI,
                                         QueryVisualizationsLegacyAPI,
-                                        StatementExecutionAPI, WarehousesAPI)
+                                        RedashConfigAPI, StatementExecutionAPI,
+                                        WarehousesAPI)
 from databricks.sdk.service.vectorsearch import (VectorSearchEndpointsAPI,
                                                  VectorSearchIndexesAPI)
 from databricks.sdk.service.workspace import (GitCredentialsAPI, ReposAPI,
@@ -191,106 +193,105 @@ def __init__(self,
         self._dbutils = _make_dbutils(self._config)
         self._api_client = client.ApiClient(self._config)
         serving_endpoints = ServingEndpointsExt(self._api_client)
-        self._access_control = service.iam.AccessControlAPI(self._api_client)
-        self._account_access_control_proxy = service.iam.AccountAccessControlProxyAPI(self._api_client)
-        self._alerts = service.sql.AlertsAPI(self._api_client)
-        self._alerts_legacy = service.sql.AlertsLegacyAPI(self._api_client)
-        self._apps = service.apps.AppsAPI(self._api_client)
-        self._artifact_allowlists = service.catalog.ArtifactAllowlistsAPI(self._api_client)
-        self._catalogs = service.catalog.CatalogsAPI(self._api_client)
-        self._clean_room_assets = service.cleanrooms.CleanRoomAssetsAPI(self._api_client)
-        self._clean_room_task_runs = service.cleanrooms.CleanRoomTaskRunsAPI(self._api_client)
-        self._clean_rooms = service.cleanrooms.CleanRoomsAPI(self._api_client)
-        self._cluster_policies = service.compute.ClusterPoliciesAPI(self._api_client)
+        self._access_control = AccessControlAPI(self._api_client)
+        self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client)
+        self._alerts = AlertsAPI(self._api_client)
+        self._alerts_legacy = AlertsLegacyAPI(self._api_client)
+        self._apps = AppsAPI(self._api_client)
+        self._artifact_allowlists = ArtifactAllowlistsAPI(self._api_client)
+        self._catalogs = CatalogsAPI(self._api_client)
+        self._clean_room_assets = CleanRoomAssetsAPI(self._api_client)
+        self._clean_room_task_runs = CleanRoomTaskRunsAPI(self._api_client)
+        self._clean_rooms = CleanRoomsAPI(self._api_client)
+        self._cluster_policies = ClusterPoliciesAPI(self._api_client)
         self._clusters = ClustersExt(self._api_client)
-        self._command_execution = service.compute.CommandExecutionAPI(self._api_client)
-        self._connections = service.catalog.ConnectionsAPI(self._api_client)
-        self._consumer_fulfillments = service.marketplace.ConsumerFulfillmentsAPI(self._api_client)
-        self._consumer_installations = service.marketplace.ConsumerInstallationsAPI(self._api_client)
-        self._consumer_listings = service.marketplace.ConsumerListingsAPI(self._api_client)
-        self._consumer_personalization_requests = service.marketplace.ConsumerPersonalizationRequestsAPI(
-            self._api_client)
-        self._consumer_providers = service.marketplace.ConsumerProvidersAPI(self._api_client)
-        self._credentials = service.catalog.CredentialsAPI(self._api_client)
-        self._credentials_manager = service.settings.CredentialsManagerAPI(self._api_client)
-        self._current_user = service.iam.CurrentUserAPI(self._api_client)
-        self._dashboard_widgets = service.sql.DashboardWidgetsAPI(self._api_client)
-        self._dashboards = service.sql.DashboardsAPI(self._api_client)
-        self._data_sources = service.sql.DataSourcesAPI(self._api_client)
+        self._command_execution = CommandExecutionAPI(self._api_client)
+        self._connections = ConnectionsAPI(self._api_client)
+        self._consumer_fulfillments = ConsumerFulfillmentsAPI(self._api_client)
+        self._consumer_installations = ConsumerInstallationsAPI(self._api_client)
+        self._consumer_listings = ConsumerListingsAPI(self._api_client)
+        self._consumer_personalization_requests = ConsumerPersonalizationRequestsAPI(self._api_client)
+        self._consumer_providers = ConsumerProvidersAPI(self._api_client)
+        self._credentials = CredentialsAPI(self._api_client)
+        self._credentials_manager = CredentialsManagerAPI(self._api_client)
+        self._current_user = CurrentUserAPI(self._api_client)
+        self._dashboard_widgets = DashboardWidgetsAPI(self._api_client)
+        self._dashboards = DashboardsAPI(self._api_client)
+        self._data_sources = DataSourcesAPI(self._api_client)
         self._dbfs = DbfsExt(self._api_client)
-        self._dbsql_permissions = service.sql.DbsqlPermissionsAPI(self._api_client)
-        self._experiments = service.ml.ExperimentsAPI(self._api_client)
-        self._external_locations = service.catalog.ExternalLocationsAPI(self._api_client)
+        self._dbsql_permissions = DbsqlPermissionsAPI(self._api_client)
+        self._experiments = ExperimentsAPI(self._api_client)
+        self._external_locations = ExternalLocationsAPI(self._api_client)
         self._files = _make_files_client(self._api_client, self._config)
-        self._functions = service.catalog.FunctionsAPI(self._api_client)
-        self._genie = service.dashboards.GenieAPI(self._api_client)
-        self._git_credentials = service.workspace.GitCredentialsAPI(self._api_client)
-        self._global_init_scripts = service.compute.GlobalInitScriptsAPI(self._api_client)
-        self._grants = service.catalog.GrantsAPI(self._api_client)
-        self._groups = service.iam.GroupsAPI(self._api_client)
-        self._instance_pools = service.compute.InstancePoolsAPI(self._api_client)
-        self._instance_profiles = service.compute.InstanceProfilesAPI(self._api_client)
-        self._ip_access_lists = service.settings.IpAccessListsAPI(self._api_client)
+        self._functions = FunctionsAPI(self._api_client)
+        self._genie = GenieAPI(self._api_client)
+        self._git_credentials = GitCredentialsAPI(self._api_client)
+        self._global_init_scripts = GlobalInitScriptsAPI(self._api_client)
+        self._grants = GrantsAPI(self._api_client)
+        self._groups = GroupsAPI(self._api_client)
+        self._instance_pools = InstancePoolsAPI(self._api_client)
+        self._instance_profiles = InstanceProfilesAPI(self._api_client)
+        self._ip_access_lists = IpAccessListsAPI(self._api_client)
         self._jobs = JobsExt(self._api_client)
-        self._lakeview = service.dashboards.LakeviewAPI(self._api_client)
-        self._libraries = service.compute.LibrariesAPI(self._api_client)
-        self._metastores = service.catalog.MetastoresAPI(self._api_client)
-        self._model_registry = service.ml.ModelRegistryAPI(self._api_client)
-        self._model_versions = service.catalog.ModelVersionsAPI(self._api_client)
-        self._notification_destinations = service.settings.NotificationDestinationsAPI(self._api_client)
-        self._online_tables = service.catalog.OnlineTablesAPI(self._api_client)
-        self._permission_migration = service.iam.PermissionMigrationAPI(self._api_client)
-        self._permissions = service.iam.PermissionsAPI(self._api_client)
-        self._pipelines = service.pipelines.PipelinesAPI(self._api_client)
-        self._policy_compliance_for_clusters = service.compute.PolicyComplianceForClustersAPI(
-            self._api_client)
-        self._policy_compliance_for_jobs = service.jobs.PolicyComplianceForJobsAPI(self._api_client)
-        self._policy_families = service.compute.PolicyFamiliesAPI(self._api_client)
-        self._provider_exchange_filters = service.marketplace.ProviderExchangeFiltersAPI(self._api_client)
-        self._provider_exchanges = service.marketplace.ProviderExchangesAPI(self._api_client)
-        self._provider_files = service.marketplace.ProviderFilesAPI(self._api_client)
-        self._provider_listings = service.marketplace.ProviderListingsAPI(self._api_client)
-        self._provider_personalization_requests = service.marketplace.ProviderPersonalizationRequestsAPI(
-            self._api_client)
-        self._provider_provider_analytics_dashboards = service.marketplace.ProviderProviderAnalyticsDashboardsAPI(
+        self._lakeview = LakeviewAPI(self._api_client)
+        self._lakeview_embedded = LakeviewEmbeddedAPI(self._api_client)
+        self._libraries = LibrariesAPI(self._api_client)
+        self._metastores = MetastoresAPI(self._api_client)
+        self._model_registry = ModelRegistryAPI(self._api_client)
+        self._model_versions = ModelVersionsAPI(self._api_client)
+        self._notification_destinations = NotificationDestinationsAPI(self._api_client)
+        self._online_tables = OnlineTablesAPI(self._api_client)
+        self._permission_migration = PermissionMigrationAPI(self._api_client)
+        self._permissions = PermissionsAPI(self._api_client)
+        self._pipelines = PipelinesAPI(self._api_client)
+        self._policy_compliance_for_clusters = PolicyComplianceForClustersAPI(self._api_client)
+        self._policy_compliance_for_jobs = PolicyComplianceForJobsAPI(self._api_client)
+        self._policy_families = PolicyFamiliesAPI(self._api_client)
+        self._provider_exchange_filters = ProviderExchangeFiltersAPI(self._api_client)
+        self._provider_exchanges = ProviderExchangesAPI(self._api_client)
+        self._provider_files = ProviderFilesAPI(self._api_client)
+        self._provider_listings = ProviderListingsAPI(self._api_client)
+        self._provider_personalization_requests = ProviderPersonalizationRequestsAPI(self._api_client)
+        self._provider_provider_analytics_dashboards = ProviderProviderAnalyticsDashboardsAPI(
             self._api_client)
-        self._provider_providers = service.marketplace.ProviderProvidersAPI(self._api_client)
-        self._providers = service.sharing.ProvidersAPI(self._api_client)
-        self._quality_monitors = service.catalog.QualityMonitorsAPI(self._api_client)
-        self._queries = service.sql.QueriesAPI(self._api_client)
-        self._queries_legacy = service.sql.QueriesLegacyAPI(self._api_client)
-        self._query_history = service.sql.QueryHistoryAPI(self._api_client)
-        self._query_visualizations = service.sql.QueryVisualizationsAPI(self._api_client)
-        self._query_visualizations_legacy = service.sql.QueryVisualizationsLegacyAPI(self._api_client)
-        self._recipient_activation = service.sharing.RecipientActivationAPI(self._api_client)
-        self._recipients = service.sharing.RecipientsAPI(self._api_client)
-        self._registered_models = service.catalog.RegisteredModelsAPI(self._api_client)
-        self._repos = service.workspace.ReposAPI(self._api_client)
-        self._resource_quotas = service.catalog.ResourceQuotasAPI(self._api_client)
-        self._schemas = service.catalog.SchemasAPI(self._api_client)
-        self._secrets = service.workspace.SecretsAPI(self._api_client)
-        self._service_principals = service.iam.ServicePrincipalsAPI(self._api_client)
+        self._provider_providers = ProviderProvidersAPI(self._api_client)
+        self._providers = ProvidersAPI(self._api_client)
+        self._quality_monitors = QualityMonitorsAPI(self._api_client)
+        self._queries = QueriesAPI(self._api_client)
+        self._queries_legacy = QueriesLegacyAPI(self._api_client)
+        self._query_execution = QueryExecutionAPI(self._api_client)
+        self._query_history = QueryHistoryAPI(self._api_client)
+        self._query_visualizations = QueryVisualizationsAPI(self._api_client)
+        self._query_visualizations_legacy = QueryVisualizationsLegacyAPI(self._api_client)
+        self._recipient_activation = RecipientActivationAPI(self._api_client)
+        self._recipients = RecipientsAPI(self._api_client)
+        self._redash_config = RedashConfigAPI(self._api_client)
+        self._registered_models = RegisteredModelsAPI(self._api_client)
+        self._repos = ReposAPI(self._api_client)
+        self._resource_quotas = ResourceQuotasAPI(self._api_client)
+        self._schemas = SchemasAPI(self._api_client)
+        self._secrets = SecretsAPI(self._api_client)
+        self._service_principals = ServicePrincipalsAPI(self._api_client)
         self._serving_endpoints = serving_endpoints
-        self._serving_endpoints_data_plane = service.serving.ServingEndpointsDataPlaneAPI(
-            self._api_client, serving_endpoints)
-        self._settings = service.settings.SettingsAPI(self._api_client)
-        self._shares = service.sharing.SharesAPI(self._api_client)
-        self._statement_execution = service.sql.StatementExecutionAPI(self._api_client)
-        self._storage_credentials = service.catalog.StorageCredentialsAPI(self._api_client)
-        self._system_schemas = service.catalog.SystemSchemasAPI(self._api_client)
-        self._table_constraints = service.catalog.TableConstraintsAPI(self._api_client)
-        self._tables = service.catalog.TablesAPI(self._api_client)
-        self._temporary_table_credentials = service.catalog.TemporaryTableCredentialsAPI(self._api_client)
-        self._token_management = service.settings.TokenManagementAPI(self._api_client)
-        self._tokens = service.settings.TokensAPI(self._api_client)
-        self._users = service.iam.UsersAPI(self._api_client)
-        self._vector_search_endpoints = service.vectorsearch.VectorSearchEndpointsAPI(self._api_client)
-        self._vector_search_indexes = service.vectorsearch.VectorSearchIndexesAPI(self._api_client)
-        self._volumes = service.catalog.VolumesAPI(self._api_client)
-        self._warehouses = service.sql.WarehousesAPI(self._api_client)
+        self._serving_endpoints_data_plane = ServingEndpointsDataPlaneAPI(self._api_client, serving_endpoints)
+        self._settings = SettingsAPI(self._api_client)
+        self._shares = SharesAPI(self._api_client)
+        self._statement_execution = StatementExecutionAPI(self._api_client)
+        self._storage_credentials = StorageCredentialsAPI(self._api_client)
+        self._system_schemas = SystemSchemasAPI(self._api_client)
+        self._table_constraints = TableConstraintsAPI(self._api_client)
+        self._tables = TablesAPI(self._api_client)
+        self._temporary_table_credentials = TemporaryTableCredentialsAPI(self._api_client)
+        self._token_management = TokenManagementAPI(self._api_client)
+        self._tokens = TokensAPI(self._api_client)
+        self._users = UsersAPI(self._api_client)
+        self._vector_search_endpoints = VectorSearchEndpointsAPI(self._api_client)
+        self._vector_search_indexes = VectorSearchIndexesAPI(self._api_client)
+        self._volumes = VolumesAPI(self._api_client)
+        self._warehouses = WarehousesAPI(self._api_client)
         self._workspace = WorkspaceExt(self._api_client)
-        self._workspace_bindings = service.catalog.WorkspaceBindingsAPI(self._api_client)
-        self._workspace_conf = service.settings.WorkspaceConfAPI(self._api_client)
+        self._workspace_bindings = WorkspaceBindingsAPI(self._api_client)
+        self._workspace_conf = WorkspaceConfAPI(self._api_client)
 
     @property
     def config(self) -> client.Config:
@@ -305,57 +306,57 @@ def dbutils(self) -> dbutils.RemoteDbUtils:
         return self._dbutils
 
     @property
-    def access_control(self) -> service.iam.AccessControlAPI:
+    def access_control(self) -> AccessControlAPI:
         """Rule based Access Control for Databricks Resources."""
         return self._access_control
 
     @property
-    def account_access_control_proxy(self) -> service.iam.AccountAccessControlProxyAPI:
+    def account_access_control_proxy(self) -> AccountAccessControlProxyAPI:
         """These APIs manage access rules on resources in an account."""
         return self._account_access_control_proxy
 
     @property
-    def alerts(self) -> service.sql.AlertsAPI:
+    def alerts(self) -> AlertsAPI:
         """The alerts API can be used to perform CRUD operations on alerts."""
         return self._alerts
 
     @property
-    def alerts_legacy(self) -> service.sql.AlertsLegacyAPI:
+    def alerts_legacy(self) -> AlertsLegacyAPI:
         """The alerts API can be used to perform CRUD operations on alerts."""
         return self._alerts_legacy
 
     @property
-    def apps(self) -> service.apps.AppsAPI:
+    def apps(self) -> AppsAPI:
         """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on."""
         return self._apps
 
     @property
-    def artifact_allowlists(self) -> service.catalog.ArtifactAllowlistsAPI:
+    def artifact_allowlists(self) -> ArtifactAllowlistsAPI:
         """In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the `allowlist` in UC so that users can leverage these artifacts on compute configured with shared access mode."""
         return self._artifact_allowlists
 
     @property
-    def catalogs(self) -> service.catalog.CatalogsAPI:
+    def catalogs(self) -> CatalogsAPI:
         """A catalog is the first layer of Unity Catalog’s three-level namespace."""
         return self._catalogs
 
     @property
-    def clean_room_assets(self) -> service.cleanrooms.CleanRoomAssetsAPI:
+    def clean_room_assets(self) -> CleanRoomAssetsAPI:
         """Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the clean room."""
         return self._clean_room_assets
 
     @property
-    def clean_room_task_runs(self) -> service.cleanrooms.CleanRoomTaskRunsAPI:
+    def clean_room_task_runs(self) -> CleanRoomTaskRunsAPI:
         """Clean room task runs are the executions of notebooks in a clean room."""
         return self._clean_room_task_runs
 
     @property
-    def clean_rooms(self) -> service.cleanrooms.CleanRoomsAPI:
+    def clean_rooms(self) -> CleanRoomsAPI:
         """A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each other’s data."""
         return self._clean_rooms
 
     @property
-    def cluster_policies(self) -> service.compute.ClusterPoliciesAPI:
+    def cluster_policies(self) -> ClusterPoliciesAPI:
         """You can use cluster policies to control users' ability to configure clusters based on a set of rules."""
         return self._cluster_policies
 
@@ -365,67 +366,67 @@ def clusters(self) -> ClustersExt:
         return self._clusters
 
     @property
-    def command_execution(self) -> service.compute.CommandExecutionAPI:
+    def command_execution(self) -> CommandExecutionAPI:
         """This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters."""
         return self._command_execution
 
     @property
-    def connections(self) -> service.catalog.ConnectionsAPI:
+    def connections(self) -> ConnectionsAPI:
         """Connections allow for creating a connection to an external data source."""
         return self._connections
 
     @property
-    def consumer_fulfillments(self) -> service.marketplace.ConsumerFulfillmentsAPI:
+    def consumer_fulfillments(self) -> ConsumerFulfillmentsAPI:
         """Fulfillments are entities that allow consumers to preview installations."""
         return self._consumer_fulfillments
 
     @property
-    def consumer_installations(self) -> service.marketplace.ConsumerInstallationsAPI:
+    def consumer_installations(self) -> ConsumerInstallationsAPI:
         """Installations are entities that allow consumers to interact with Databricks Marketplace listings."""
         return self._consumer_installations
 
     @property
-    def consumer_listings(self) -> service.marketplace.ConsumerListingsAPI:
+    def consumer_listings(self) -> ConsumerListingsAPI:
         """Listings are the core entities in the Marketplace."""
         return self._consumer_listings
 
     @property
-    def consumer_personalization_requests(self) -> service.marketplace.ConsumerPersonalizationRequestsAPI:
+    def consumer_personalization_requests(self) -> ConsumerPersonalizationRequestsAPI:
         """Personalization Requests allow customers to interact with the individualized Marketplace listing flow."""
         return self._consumer_personalization_requests
 
     @property
-    def consumer_providers(self) -> service.marketplace.ConsumerProvidersAPI:
+    def consumer_providers(self) -> ConsumerProvidersAPI:
         """Providers are the entities that publish listings to the Marketplace."""
         return self._consumer_providers
 
     @property
-    def credentials(self) -> service.catalog.CredentialsAPI:
+    def credentials(self) -> CredentialsAPI:
         """A credential represents an authentication and authorization mechanism for accessing services on your cloud tenant."""
         return self._credentials
 
     @property
-    def credentials_manager(self) -> service.settings.CredentialsManagerAPI:
+    def credentials_manager(self) -> CredentialsManagerAPI:
         """Credentials manager interacts with with Identity Providers to to perform token exchanges using stored credentials and refresh tokens."""
         return self._credentials_manager
 
     @property
-    def current_user(self) -> service.iam.CurrentUserAPI:
+    def current_user(self) -> CurrentUserAPI:
         """This API allows retrieving information about currently authenticated user or service principal."""
         return self._current_user
 
     @property
-    def dashboard_widgets(self) -> service.sql.DashboardWidgetsAPI:
+    def dashboard_widgets(self) -> DashboardWidgetsAPI:
         """This is an evolving API that facilitates the addition and removal of widgets from existing dashboards within the Databricks Workspace."""
         return self._dashboard_widgets
 
     @property
-    def dashboards(self) -> service.sql.DashboardsAPI:
+    def dashboards(self) -> DashboardsAPI:
         """In general, there is little need to modify dashboards using the API."""
         return self._dashboards
 
     @property
-    def data_sources(self) -> service.sql.DataSourcesAPI:
+    def data_sources(self) -> DataSourcesAPI:
         """This API is provided to assist you in making new query objects."""
         return self._data_sources
 
@@ -435,67 +436,67 @@ def dbfs(self) -> DbfsExt:
         return self._dbfs
 
     @property
-    def dbsql_permissions(self) -> service.sql.DbsqlPermissionsAPI:
+    def dbsql_permissions(self) -> DbsqlPermissionsAPI:
         """The SQL Permissions API is similar to the endpoints of the :method:permissions/set."""
         return self._dbsql_permissions
 
     @property
-    def experiments(self) -> service.ml.ExperimentsAPI:
+    def experiments(self) -> ExperimentsAPI:
         """Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment."""
         return self._experiments
 
     @property
-    def external_locations(self) -> service.catalog.ExternalLocationsAPI:
+    def external_locations(self) -> ExternalLocationsAPI:
         """An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path."""
         return self._external_locations
 
     @property
-    def files(self) -> service.files.FilesAPI:
+    def files(self) -> FilesAPI:
         """The Files API is a standard HTTP API that allows you to read, write, list, and delete files and directories by referring to their URI."""
         return self._files
 
     @property
-    def functions(self) -> service.catalog.FunctionsAPI:
+    def functions(self) -> FunctionsAPI:
         """Functions implement User-Defined Functions (UDFs) in Unity Catalog."""
         return self._functions
 
     @property
-    def genie(self) -> service.dashboards.GenieAPI:
+    def genie(self) -> GenieAPI:
         """Genie provides a no-code experience for business users, powered by AI/BI."""
         return self._genie
 
     @property
-    def git_credentials(self) -> service.workspace.GitCredentialsAPI:
+    def git_credentials(self) -> GitCredentialsAPI:
         """Registers personal access token for Databricks to do operations on behalf of the user."""
         return self._git_credentials
 
     @property
-    def global_init_scripts(self) -> service.compute.GlobalInitScriptsAPI:
+    def global_init_scripts(self) -> GlobalInitScriptsAPI:
         """The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace."""
         return self._global_init_scripts
 
     @property
-    def grants(self) -> service.catalog.GrantsAPI:
+    def grants(self) -> GrantsAPI:
         """In Unity Catalog, data is secure by default."""
         return self._grants
 
     @property
-    def groups(self) -> service.iam.GroupsAPI:
+    def groups(self) -> GroupsAPI:
         """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects."""
         return self._groups
 
     @property
-    def instance_pools(self) -> service.compute.InstancePoolsAPI:
+    def instance_pools(self) -> InstancePoolsAPI:
         """Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times."""
         return self._instance_pools
 
     @property
-    def instance_profiles(self) -> service.compute.InstanceProfilesAPI:
+    def instance_profiles(self) -> InstanceProfilesAPI:
         """The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with."""
         return self._instance_profiles
 
     @property
-    def ip_access_lists(self) -> service.settings.IpAccessListsAPI:
+    def ip_access_lists(self) -> IpAccessListsAPI:
         """IP Access List enables admins to configure IP access lists."""
         return self._ip_access_lists
 
@@ -505,178 +506,192 @@ def jobs(self) -> JobsExt:
         return self._jobs
 
     @property
-    def lakeview(self) -> service.dashboards.LakeviewAPI:
+    def lakeview(self) -> LakeviewAPI:
         """These APIs provide specific management operations for Lakeview dashboards."""
         return self._lakeview
 
     @property
-    def libraries(self) -> service.compute.LibrariesAPI:
+    def lakeview_embedded(self) -> LakeviewEmbeddedAPI:
+        """Token-based Lakeview APIs for embedding dashboards in external applications."""
+        return self._lakeview_embedded
+
+    @property
+    def libraries(self) -> LibrariesAPI:
         """The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster."""
         return self._libraries
 
     @property
-    def metastores(self) -> service.catalog.MetastoresAPI:
+    def metastores(self) -> MetastoresAPI:
         """A metastore is the top-level container of objects in Unity Catalog."""
         return self._metastores
 
     @property
-    def model_registry(self) -> service.ml.ModelRegistryAPI:
+    def model_registry(self) -> ModelRegistryAPI:
         """Note: This API reference documents APIs for the Workspace Model Registry."""
         return self._model_registry
 
     @property
-    def model_versions(self) -> service.catalog.ModelVersionsAPI:
+    def model_versions(self) -> ModelVersionsAPI:
         """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog."""
         return self._model_versions
 
     @property
-    def notification_destinations(self) -> service.settings.NotificationDestinationsAPI:
+    def notification_destinations(self) -> NotificationDestinationsAPI:
         """The notification destinations API lets you programmatically manage a workspace's notification destinations."""
         return self._notification_destinations
 
     @property
-    def online_tables(self) -> service.catalog.OnlineTablesAPI:
+    def online_tables(self) -> OnlineTablesAPI:
         """Online tables provide lower latency and higher QPS access to data from Delta tables."""
         return self._online_tables
 
     @property
-    def permission_migration(self) -> service.iam.PermissionMigrationAPI:
+    def permission_migration(self) -> PermissionMigrationAPI:
         """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx."""
         return self._permission_migration
 
     @property
-    def permissions(self) -> service.iam.PermissionsAPI:
+    def permissions(self) -> PermissionsAPI:
         """Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints."""
         return self._permissions
 
     @property
-    def pipelines(self) -> service.pipelines.PipelinesAPI:
+    def pipelines(self) -> PipelinesAPI:
         """The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines."""
         return self._pipelines
 
     @property
-    def policy_compliance_for_clusters(self) -> service.compute.PolicyComplianceForClustersAPI:
+    def policy_compliance_for_clusters(self) -> PolicyComplianceForClustersAPI:
         """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace."""
         return self._policy_compliance_for_clusters
 
     @property
-    def policy_compliance_for_jobs(self) -> service.jobs.PolicyComplianceForJobsAPI:
+    def policy_compliance_for_jobs(self) -> PolicyComplianceForJobsAPI:
         """The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace."""
         return self._policy_compliance_for_jobs
 
     @property
-    def policy_families(self) -> service.compute.PolicyFamiliesAPI:
+    def policy_families(self) -> PolicyFamiliesAPI:
         """View available policy families."""
         return self._policy_families
 
     @property
-    def provider_exchange_filters(self) -> service.marketplace.ProviderExchangeFiltersAPI:
+    def provider_exchange_filters(self) -> ProviderExchangeFiltersAPI:
         """Marketplace exchanges filters curate which groups can access an exchange."""
         return self._provider_exchange_filters
 
     @property
-    def provider_exchanges(self) -> service.marketplace.ProviderExchangesAPI:
+    def provider_exchanges(self) -> ProviderExchangesAPI:
         """Marketplace exchanges allow providers to share their listings with a curated set of customers."""
         return self._provider_exchanges
 
     @property
-    def provider_files(self) -> service.marketplace.ProviderFilesAPI:
+    def provider_files(self) -> ProviderFilesAPI:
         """Marketplace offers a set of file APIs for various purposes such as preview notebooks and provider icons."""
         return self._provider_files
 
     @property
-    def provider_listings(self) -> service.marketplace.ProviderListingsAPI:
+    def provider_listings(self) -> ProviderListingsAPI:
         """Listings are the core entities in the Marketplace."""
         return self._provider_listings
 
     @property
-    def provider_personalization_requests(self) -> service.marketplace.ProviderPersonalizationRequestsAPI:
+    def provider_personalization_requests(self) -> ProviderPersonalizationRequestsAPI:
         """Personalization requests are an alternate to instantly available listings."""
         return self._provider_personalization_requests
 
     @property
-    def provider_provider_analytics_dashboards(
-            self) -> service.marketplace.ProviderProviderAnalyticsDashboardsAPI:
+    def provider_provider_analytics_dashboards(self) -> ProviderProviderAnalyticsDashboardsAPI:
         """Manage templated analytics solution for providers."""
         return self._provider_provider_analytics_dashboards
 
     @property
-    def provider_providers(self) -> service.marketplace.ProviderProvidersAPI:
+    def provider_providers(self) -> ProviderProvidersAPI:
         """Providers are entities that manage assets in Marketplace."""
         return self._provider_providers
 
     @property
-    def providers(self) -> service.sharing.ProvidersAPI:
+    def providers(self) -> ProvidersAPI:
         """A data provider is an object representing the organization in the real world who shares the data."""
         return self._providers
 
     @property
-    def quality_monitors(self) -> service.catalog.QualityMonitorsAPI:
+    def quality_monitors(self) -> QualityMonitorsAPI:
         """A monitor computes and monitors data or model quality metrics for a table over time."""
         return self._quality_monitors
 
     @property
-    def queries(self) -> service.sql.QueriesAPI:
+    def queries(self) -> QueriesAPI:
         """The queries API can be used to perform CRUD operations on queries."""
         return self._queries
 
     @property
-    def queries_legacy(self) -> service.sql.QueriesLegacyAPI:
+    def queries_legacy(self) -> QueriesLegacyAPI:
         """These endpoints are used for CRUD operations on query definitions."""
         return self._queries_legacy
 
     @property
-    def query_history(self) -> service.sql.QueryHistoryAPI:
+    def query_execution(self) -> QueryExecutionAPI:
+        """Query execution APIs for AI / BI Dashboards."""
+        return self._query_execution
+
+    @property
+    def query_history(self) -> QueryHistoryAPI:
         """A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute."""
         return self._query_history
 
     @property
-    def query_visualizations(self) -> service.sql.QueryVisualizationsAPI:
+    def query_visualizations(self) -> QueryVisualizationsAPI:
         """This is an evolving API that facilitates the addition and removal of visualizations from existing queries in the Databricks Workspace."""
         return self._query_visualizations
 
     @property
-    def query_visualizations_legacy(self) -> service.sql.QueryVisualizationsLegacyAPI:
+    def query_visualizations_legacy(self) -> QueryVisualizationsLegacyAPI:
         """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace."""
         return self._query_visualizations_legacy
 
     @property
-    def recipient_activation(self) -> service.sharing.RecipientActivationAPI:
+    def recipient_activation(self) -> RecipientActivationAPI:
         """The Recipient Activation API is only applicable in the open sharing model where the recipient object has the authentication type of `TOKEN`."""
         return self._recipient_activation
 
     @property
-    def recipients(self) -> service.sharing.RecipientsAPI:
+    def recipients(self) -> RecipientsAPI:
         """A recipient is an object you create using :method:recipients/create to represent an organization which you want to allow access shares."""
         return self._recipients
 
     @property
-    def registered_models(self) -> service.catalog.RegisteredModelsAPI:
+    def redash_config(self) -> RedashConfigAPI:
+        """Redash V2 service for workspace configurations (internal)."""
+        return self._redash_config
+
+    @property
+    def registered_models(self) -> RegisteredModelsAPI:
         """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog."""
         return self._registered_models
 
     @property
-    def repos(self) -> service.workspace.ReposAPI:
+    def repos(self) -> ReposAPI:
         """The Repos API allows users to manage their git repos."""
         return self._repos
 
     @property
-    def resource_quotas(self) -> service.catalog.ResourceQuotasAPI:
+    def resource_quotas(self) -> ResourceQuotasAPI:
         """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created."""
         return self._resource_quotas
 
     @property
-    def schemas(self) -> service.catalog.SchemasAPI:
+    def schemas(self) -> SchemasAPI:
         """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace."""
         return self._schemas
 
     @property
-    def secrets(self) -> service.workspace.SecretsAPI:
+    def secrets(self) -> SecretsAPI:
         """The Secrets API allows you to manage secrets, secret scopes, and access permissions."""
         return self._secrets
 
     @property
-    def service_principals(self) -> service.iam.ServicePrincipalsAPI:
+    def service_principals(self) -> ServicePrincipalsAPI:
         """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms."""
         return self._service_principals
 
@@ -686,82 +701,82 @@ def serving_endpoints(self) -> ServingEndpointsExt:
         return self._serving_endpoints
 
     @property
-    def serving_endpoints_data_plane(self) -> service.serving.ServingEndpointsDataPlaneAPI:
+    def serving_endpoints_data_plane(self) -> ServingEndpointsDataPlaneAPI:
         """Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving endpoints service."""
         return self._serving_endpoints_data_plane
 
     @property
-    def settings(self) -> service.settings.SettingsAPI:
+    def settings(self) -> SettingsAPI:
         """Workspace Settings API allows users to manage settings at the workspace level."""
         return self._settings
 
     @property
-    def shares(self) -> service.sharing.SharesAPI:
+    def shares(self) -> SharesAPI:
         """A share is a container instantiated with :method:shares/create."""
         return self._shares
 
     @property
-    def statement_execution(self) -> service.sql.StatementExecutionAPI:
+    def statement_execution(self) -> StatementExecutionAPI:
         """The Databricks SQL Statement Execution API can be used to execute SQL statements on a SQL warehouse and fetch the result."""
         return self._statement_execution
 
     @property
-    def storage_credentials(self) -> service.catalog.StorageCredentialsAPI:
+    def storage_credentials(self) -> StorageCredentialsAPI:
         """A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant."""
         return self._storage_credentials
 
     @property
-    def system_schemas(self) -> service.catalog.SystemSchemasAPI:
+    def system_schemas(self) -> SystemSchemasAPI:
         """A system schema is a schema that lives within the system catalog."""
         return self._system_schemas
 
     @property
-    def table_constraints(self) -> service.catalog.TableConstraintsAPI:
+    def table_constraints(self) -> TableConstraintsAPI:
         """Primary key and foreign key constraints encode relationships between fields in tables."""
         return self._table_constraints
 
     @property
-    def tables(self) -> service.catalog.TablesAPI:
+    def tables(self) -> TablesAPI:
         """A table resides in the third layer of Unity Catalog’s three-level namespace."""
         return self._tables
 
     @property
-    def temporary_table_credentials(self) -> service.catalog.TemporaryTableCredentialsAPI:
+    def temporary_table_credentials(self) -> TemporaryTableCredentialsAPI:
         """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage locationswhere table data is stored in Databricks."""
         return self._temporary_table_credentials
 
     @property
-    def token_management(self) -> service.settings.TokenManagementAPI:
+    def token_management(self) -> TokenManagementAPI:
         """Enables administrators to get all tokens and delete tokens for other users."""
         return self._token_management
 
     @property
-    def tokens(self) -> service.settings.TokensAPI:
+    def tokens(self) -> TokensAPI:
         """The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs."""
         return self._tokens
 
     @property
-    def users(self) -> service.iam.UsersAPI:
+    def users(self) -> UsersAPI:
         """User identities recognized by Databricks and represented by email addresses."""
         return self._users
 
     @property
-    def vector_search_endpoints(self) -> service.vectorsearch.VectorSearchEndpointsAPI:
+    def vector_search_endpoints(self) -> VectorSearchEndpointsAPI:
         """**Endpoint**: Represents the compute resources to host vector search indexes."""
         return self._vector_search_endpoints
 
     @property
-    def vector_search_indexes(self) -> service.vectorsearch.VectorSearchIndexesAPI:
+    def vector_search_indexes(self) -> VectorSearchIndexesAPI:
         """**Index**: An efficient representation of your embedding vectors that supports real-time and efficient approximate nearest neighbor (ANN) search queries."""
         return self._vector_search_indexes
 
     @property
-    def volumes(self) -> service.catalog.VolumesAPI:
+    def volumes(self) -> VolumesAPI:
         """Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files."""
         return self._volumes
 
     @property
-    def warehouses(self) -> service.sql.WarehousesAPI:
+    def warehouses(self) -> WarehousesAPI:
         """A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL."""
         return self._warehouses
 
@@ -771,12 +786,12 @@ def workspace(self) -> WorkspaceExt:
         return self._workspace
 
     @property
-    def workspace_bindings(self) -> service.catalog.WorkspaceBindingsAPI:
+    def workspace_bindings(self) -> WorkspaceBindingsAPI:
         """A securable in Databricks can be configured as __OPEN__ or __ISOLATED__."""
         return self._workspace_bindings
 
     @property
-    def workspace_conf(self) -> service.settings.WorkspaceConfAPI:
+    def workspace_conf(self) -> WorkspaceConfAPI:
         """This API allows updating known workspace settings for advanced users."""
         return self._workspace_conf
 
@@ -850,36 +865,35 @@ def __init__(self,
                                    product_version=product_version)
         self._config = config.copy()
         self._api_client = client.ApiClient(self._config)
-        self._access_control = service.iam.AccountAccessControlAPI(self._api_client)
-        self._billable_usage = service.billing.BillableUsageAPI(self._api_client)
-        self._budget_policy = service.billing.BudgetPolicyAPI(self._api_client)
-        self._credentials = service.provisioning.CredentialsAPI(self._api_client)
-        self._custom_app_integration = service.oauth2.CustomAppIntegrationAPI(self._api_client)
-        self._encryption_keys = service.provisioning.EncryptionKeysAPI(self._api_client)
-        self._federation_policy = service.oauth2.AccountFederationPolicyAPI(self._api_client)
-        self._groups = service.iam.AccountGroupsAPI(self._api_client)
-        self._ip_access_lists = service.settings.AccountIpAccessListsAPI(self._api_client)
-        self._log_delivery = service.billing.LogDeliveryAPI(self._api_client)
-        self._metastore_assignments = service.catalog.AccountMetastoreAssignmentsAPI(self._api_client)
-        self._metastores = service.catalog.AccountMetastoresAPI(self._api_client)
-        self._network_connectivity = service.settings.NetworkConnectivityAPI(self._api_client)
-        self._networks = service.provisioning.NetworksAPI(self._api_client)
-        self._o_auth_published_apps = service.oauth2.OAuthPublishedAppsAPI(self._api_client)
-        self._private_access = service.provisioning.PrivateAccessAPI(self._api_client)
-        self._published_app_integration = service.oauth2.PublishedAppIntegrationAPI(self._api_client)
-        self._service_principal_federation_policy = service.oauth2.ServicePrincipalFederationPolicyAPI(
-            self._api_client)
-        self._service_principal_secrets = service.oauth2.ServicePrincipalSecretsAPI(self._api_client)
-        self._service_principals = service.iam.AccountServicePrincipalsAPI(self._api_client)
-        self._settings = service.settings.AccountSettingsAPI(self._api_client)
-        self._storage = service.provisioning.StorageAPI(self._api_client)
-        self._storage_credentials = service.catalog.AccountStorageCredentialsAPI(self._api_client)
-        self._usage_dashboards = service.billing.UsageDashboardsAPI(self._api_client)
-        self._users = service.iam.AccountUsersAPI(self._api_client)
-        self._vpc_endpoints = service.provisioning.VpcEndpointsAPI(self._api_client)
-        self._workspace_assignment = service.iam.WorkspaceAssignmentAPI(self._api_client)
-        self._workspaces = service.provisioning.WorkspacesAPI(self._api_client)
-        self._budgets = service.billing.BudgetsAPI(self._api_client)
+        self._access_control = AccountAccessControlAPI(self._api_client)
+        self._billable_usage = BillableUsageAPI(self._api_client)
+        self._budget_policy = BudgetPolicyAPI(self._api_client)
+        self._credentials = CredentialsAPI(self._api_client)
+        self._custom_app_integration = CustomAppIntegrationAPI(self._api_client)
+        self._encryption_keys = EncryptionKeysAPI(self._api_client)
+        self._federation_policy = AccountFederationPolicyAPI(self._api_client)
+        self._groups = AccountGroupsAPI(self._api_client)
+        self._ip_access_lists = AccountIpAccessListsAPI(self._api_client)
+        self._log_delivery = LogDeliveryAPI(self._api_client)
+        self._metastore_assignments = AccountMetastoreAssignmentsAPI(self._api_client)
+        self._metastores = AccountMetastoresAPI(self._api_client)
+        self._network_connectivity = NetworkConnectivityAPI(self._api_client)
+        self._networks = NetworksAPI(self._api_client)
+        self._o_auth_published_apps = OAuthPublishedAppsAPI(self._api_client)
+        self._private_access = PrivateAccessAPI(self._api_client)
+        self._published_app_integration = PublishedAppIntegrationAPI(self._api_client)
+        self._service_principal_federation_policy = ServicePrincipalFederationPolicyAPI(self._api_client)
+        self._service_principal_secrets = ServicePrincipalSecretsAPI(self._api_client)
+        self._service_principals = AccountServicePrincipalsAPI(self._api_client)
+        self._settings = AccountSettingsAPI(self._api_client)
+        self._storage = StorageAPI(self._api_client)
+        self._storage_credentials = AccountStorageCredentialsAPI(self._api_client)
+        self._usage_dashboards = UsageDashboardsAPI(self._api_client)
+        self._users = AccountUsersAPI(self._api_client)
+        self._vpc_endpoints = VpcEndpointsAPI(self._api_client)
+        self._workspace_assignment = WorkspaceAssignmentAPI(self._api_client)
+        self._workspaces = WorkspacesAPI(self._api_client)
+        self._budgets = BudgetsAPI(self._api_client)
 
     @property
     def config(self) -> client.Config:
@@ -890,147 +904,147 @@ def api_client(self) -> client.ApiClient:
         return self._api_client
 
     @property
-    def access_control(self) -> service.iam.AccountAccessControlAPI:
+    def access_control(self) -> AccountAccessControlAPI:
         """These APIs manage access rules on resources in an account."""
         return self._access_control
 
     @property
-    def billable_usage(self) -> service.billing.BillableUsageAPI:
+    def billable_usage(self) -> BillableUsageAPI:
         """This API allows you to download billable usage logs for the specified account and date range."""
         return self._billable_usage
 
     @property
-    def budget_policy(self) -> service.billing.BudgetPolicyAPI:
+    def budget_policy(self) -> BudgetPolicyAPI:
         """A service serves REST API about Budget policies."""
         return self._budget_policy
 
     @property
-    def credentials(self) -> service.provisioning.CredentialsAPI:
+    def credentials(self) -> CredentialsAPI:
         """These APIs manage credential configurations for this workspace."""
         return self._credentials
 
     @property
-    def custom_app_integration(self) -> service.oauth2.CustomAppIntegrationAPI:
+    def custom_app_integration(self) -> CustomAppIntegrationAPI:
         """These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud."""
         return self._custom_app_integration
 
     @property
-    def encryption_keys(self) -> service.provisioning.EncryptionKeysAPI:
+    def encryption_keys(self) -> EncryptionKeysAPI:
         """These APIs manage encryption key configurations for this workspace (optional)."""
         return self._encryption_keys
 
     @property
-    def federation_policy(self) -> service.oauth2.AccountFederationPolicyAPI:
+    def federation_policy(self) -> AccountFederationPolicyAPI:
         """These APIs manage account federation policies."""
         return self._federation_policy
 
     @property
-    def groups(self) -> service.iam.AccountGroupsAPI:
+    def groups(self) -> AccountGroupsAPI:
         """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects."""
         return self._groups
 
     @property
-    def ip_access_lists(self) -> service.settings.AccountIpAccessListsAPI:
+    def ip_access_lists(self) -> AccountIpAccessListsAPI:
         """The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console."""
         return self._ip_access_lists
 
     @property
-    def log_delivery(self) -> service.billing.LogDeliveryAPI:
+    def log_delivery(self) -> LogDeliveryAPI:
         """These APIs manage log delivery configurations for this account."""
         return self._log_delivery
 
     @property
-    def metastore_assignments(self) -> service.catalog.AccountMetastoreAssignmentsAPI:
+    def metastore_assignments(self) -> AccountMetastoreAssignmentsAPI:
         """These APIs manage metastore assignments to a workspace."""
         return self._metastore_assignments
 
     @property
-    def metastores(self) -> service.catalog.AccountMetastoresAPI:
+    def metastores(self) -> AccountMetastoresAPI:
         """These APIs manage Unity Catalog metastores for an account."""
         return self._metastores
 
     @property
-    def network_connectivity(self) -> service.settings.NetworkConnectivityAPI:
+    def network_connectivity(self) -> NetworkConnectivityAPI:
         """These APIs provide configurations for the network connectivity of your workspaces for serverless compute resources."""
         return self._network_connectivity
 
     @property
-    def networks(self) -> service.provisioning.NetworksAPI:
+    def networks(self) -> NetworksAPI:
         """These APIs manage network configurations for customer-managed VPCs (optional)."""
         return self._networks
 
     @property
-    def o_auth_published_apps(self) -> service.oauth2.OAuthPublishedAppsAPI:
+    def o_auth_published_apps(self) -> OAuthPublishedAppsAPI:
         """These APIs enable administrators to view all the available published OAuth applications in Databricks."""
         return self._o_auth_published_apps
 
     @property
-    def private_access(self) -> service.provisioning.PrivateAccessAPI:
+    def private_access(self) -> PrivateAccessAPI:
         """These APIs manage private access settings for this account."""
         return self._private_access
 
     @property
-    def published_app_integration(self) -> service.oauth2.PublishedAppIntegrationAPI:
+    def published_app_integration(self) -> PublishedAppIntegrationAPI:
         """These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud."""
         return self._published_app_integration
 
     @property
-    def service_principal_federation_policy(self) -> service.oauth2.ServicePrincipalFederationPolicyAPI:
+    def service_principal_federation_policy(self) -> ServicePrincipalFederationPolicyAPI:
         """These APIs manage service principal federation policies."""
         return self._service_principal_federation_policy
 
     @property
-    def service_principal_secrets(self) -> service.oauth2.ServicePrincipalSecretsAPI:
+    def service_principal_secrets(self) -> ServicePrincipalSecretsAPI:
         """These APIs enable administrators to manage service principal secrets."""
         return self._service_principal_secrets
 
     @property
-    def service_principals(self) -> service.iam.AccountServicePrincipalsAPI:
+    def service_principals(self) -> AccountServicePrincipalsAPI:
         """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms."""
         return self._service_principals
 
     @property
-    def settings(self) -> service.settings.AccountSettingsAPI:
+    def settings(self) -> AccountSettingsAPI:
         """Accounts Settings API allows users to manage settings at the account level."""
         return self._settings
 
     @property
-    def storage(self) -> service.provisioning.StorageAPI:
+    def storage(self) -> StorageAPI:
         """These APIs manage storage configurations for this workspace."""
         return self._storage
 
     @property
-    def storage_credentials(self) -> service.catalog.AccountStorageCredentialsAPI:
+    def storage_credentials(self) -> AccountStorageCredentialsAPI:
         """These APIs manage storage credentials for a particular metastore."""
         return self._storage_credentials
 
     @property
-    def usage_dashboards(self) -> service.billing.UsageDashboardsAPI:
+    def usage_dashboards(self) -> UsageDashboardsAPI:
         """These APIs manage usage dashboards for this account."""
         return self._usage_dashboards
 
     @property
-    def users(self) -> service.iam.AccountUsersAPI:
+    def users(self) -> AccountUsersAPI:
         """User identities recognized by Databricks and represented by email addresses."""
         return self._users
 
     @property
-    def vpc_endpoints(self) -> service.provisioning.VpcEndpointsAPI:
+    def vpc_endpoints(self) -> VpcEndpointsAPI:
         """These APIs manage VPC endpoint configurations for this account."""
         return self._vpc_endpoints
 
     @property
-    def workspace_assignment(self) -> service.iam.WorkspaceAssignmentAPI:
+    def workspace_assignment(self) -> WorkspaceAssignmentAPI:
         """The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account."""
         return self._workspace_assignment
 
     @property
-    def workspaces(self) -> service.provisioning.WorkspacesAPI:
+    def workspaces(self) -> WorkspacesAPI:
         """These APIs manage workspaces for this account."""
         return self._workspaces
 
     @property
-    def budgets(self) -> service.billing.BudgetsAPI:
+    def budgets(self) -> BudgetsAPI:
         """These APIs manage budget configurations for this account."""
         return self._budgets
 
diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py
index 63efcd627..a8a087762 100755
--- a/databricks/sdk/service/catalog.py
+++ b/databricks/sdk/service/catalog.py
@@ -6913,12 +6913,17 @@ class TemporaryCredentials:
     """Server time when the credential will expire, in epoch milliseconds. The API client is advised to
     cache the credential given this expiration time."""
 
+    gcp_oauth_token: Optional[GcpOauthToken] = None
+    """GCP temporary credentials for API authentication. Read more at
+    https://developers.google.com/identity/protocols/oauth2/service-account"""
+
     def as_dict(self) -> dict:
         """Serializes the TemporaryCredentials into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials.as_dict()
         if self.azure_aad: body['azure_aad'] = self.azure_aad.as_dict()
         if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
@@ -6927,6 +6932,7 @@ def as_shallow_dict(self) -> dict:
         if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials
         if self.azure_aad: body['azure_aad'] = self.azure_aad
         if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token
         return body
 
     @classmethod
@@ -6934,7 +6940,8 @@ def from_dict(cls, d: Dict[str, any]) -> TemporaryCredentials:
         """Deserializes the TemporaryCredentials from a dictionary."""
         return cls(aws_temp_credentials=_from_dict(d, 'aws_temp_credentials', AwsCredentials),
                    azure_aad=_from_dict(d, 'azure_aad', AzureActiveDirectoryToken),
-                   expiration_time=d.get('expiration_time', None))
+                   expiration_time=d.get('expiration_time', None),
+                   gcp_oauth_token=_from_dict(d, 'gcp_oauth_token', GcpOauthToken))
 
 
 @dataclass
@@ -7043,6 +7050,9 @@ class UpdateCatalog:
     new_name: Optional[str] = None
     """New name for the catalog."""
 
+    options: Optional[Dict[str, str]] = None
+    """A map of key-value properties attached to the securable."""
+
     owner: Optional[str] = None
     """Username of current owner of catalog."""
 
@@ -7058,6 +7068,7 @@ def as_dict(self) -> dict:
         if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
         if self.name is not None: body['name'] = self.name
         if self.new_name is not None: body['new_name'] = self.new_name
+        if self.options: body['options'] = self.options
         if self.owner is not None: body['owner'] = self.owner
         if self.properties: body['properties'] = self.properties
         return body
@@ -7071,6 +7082,7 @@ def as_shallow_dict(self) -> dict:
         if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
         if self.name is not None: body['name'] = self.name
         if self.new_name is not None: body['new_name'] = self.new_name
+        if self.options: body['options'] = self.options
         if self.owner is not None: body['owner'] = self.owner
         if self.properties: body['properties'] = self.properties
         return body
@@ -7084,6 +7096,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateCatalog:
                    isolation_mode=_enum(d, 'isolation_mode', CatalogIsolationMode),
                    name=d.get('name', None),
                    new_name=d.get('new_name', None),
+                   options=d.get('options', None),
                    owner=d.get('owner', None),
                    properties=d.get('properties', None))
 
@@ -8986,6 +8999,7 @@ def update(self,
                enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None,
                isolation_mode: Optional[CatalogIsolationMode] = None,
                new_name: Optional[str] = None,
+               options: Optional[Dict[str, str]] = None,
                owner: Optional[str] = None,
                properties: Optional[Dict[str, str]] = None) -> CatalogInfo:
         """Update a catalog.
@@ -9003,6 +9017,8 @@ def update(self,
           Whether the current securable is accessible from all workspaces or a specific set of workspaces.
         :param new_name: str (optional)
           New name for the catalog.
+        :param options: Dict[str,str] (optional)
+          A map of key-value properties attached to the securable.
         :param owner: str (optional)
           Username of current owner of catalog.
         :param properties: Dict[str,str] (optional)
@@ -9016,6 +9032,7 @@ def update(self,
             body['enable_predictive_optimization'] = enable_predictive_optimization.value
         if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value
         if new_name is not None: body['new_name'] = new_name
+        if options is not None: body['options'] = options
         if owner is not None: body['owner'] = owner
         if properties is not None: body['properties'] = properties
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py
index 813ca5f37..45ad2f290 100755
--- a/databricks/sdk/service/cleanrooms.py
+++ b/databricks/sdk/service/cleanrooms.py
@@ -1228,8 +1228,9 @@ def create(self, *, clean_room: Optional[CleanRoom] = None) -> CleanRoom:
         
         Create a new clean room with the specified collaborators. This method is asynchronous; the returned
         name field inside the clean_room field can be used to poll the clean room status, using the
-        :method:cleanrooms/get method. When this method returns, the cluster will be in a PROVISIONING state.
-        The cluster will be usable once it enters an ACTIVE state.
+        :method:cleanrooms/get method. When this method returns, the clean room will be in a PROVISIONING
+        state, with only name, owner, comment, created_at and status populated. The clean room will be usable
+        once it enters an ACTIVE state.
         
         The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore.
         
diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py
index fab54e84b..58a89ea55 100755
--- a/databricks/sdk/service/dashboards.py
+++ b/databricks/sdk/service/dashboards.py
@@ -20,6 +20,66 @@
 # all definitions in this file are in alphabetical order
 
 
+@dataclass
+class CancelQueryExecutionResponse:
+    status: Optional[List[CancelQueryExecutionResponseStatus]] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the CancelQueryExecutionResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.status: body['status'] = [v.as_dict() for v in self.status]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelQueryExecutionResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.status: body['status'] = self.status
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CancelQueryExecutionResponse:
+        """Deserializes the CancelQueryExecutionResponse from a dictionary."""
+        return cls(status=_repeated_dict(d, 'status', CancelQueryExecutionResponseStatus))
+
+
+@dataclass
+class CancelQueryExecutionResponseStatus:
+    data_token: str
+    """The token to poll for result asynchronously Example:
+    EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ"""
+
+    pending: Optional[Empty] = None
+    """Represents an empty message, similar to google.protobuf.Empty, which is not available in the
+    firm right now."""
+
+    success: Optional[Empty] = None
+    """Represents an empty message, similar to google.protobuf.Empty, which is not available in the
+    firm right now."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CancelQueryExecutionResponseStatus into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.data_token is not None: body['data_token'] = self.data_token
+        if self.pending: body['pending'] = self.pending.as_dict()
+        if self.success: body['success'] = self.success.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelQueryExecutionResponseStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data_token is not None: body['data_token'] = self.data_token
+        if self.pending: body['pending'] = self.pending
+        if self.success: body['success'] = self.success
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CancelQueryExecutionResponseStatus:
+        """Deserializes the CancelQueryExecutionResponseStatus from a dictionary."""
+        return cls(data_token=d.get('data_token', None),
+                   pending=_from_dict(d, 'pending', Empty),
+                   success=_from_dict(d, 'success', Empty))
+
+
 @dataclass
 class CronSchedule:
     quartz_cron_expression: str
@@ -207,6 +267,87 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteSubscriptionResponse:
         return cls()
 
 
+@dataclass
+class Empty:
+    """Represents an empty message, similar to google.protobuf.Empty, which is not available in the
+    firm right now."""
+
+    def as_dict(self) -> dict:
+        """Serializes the Empty into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Empty into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> Empty:
+        """Deserializes the Empty from a dictionary."""
+        return cls()
+
+
+@dataclass
+class ExecutePublishedDashboardQueryRequest:
+    """Execute query request for published Dashboards. Since published dashboards have the option of
+    running as the publisher, the datasets, warehouse_id are excluded from the request and instead
+    read from the source (lakeview-config) via the additional parameters (dashboardName and
+    dashboardRevisionId)"""
+
+    dashboard_name: str
+    """Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains
+    the list of datasets, warehouse_id, and embedded_credentials"""
+
+    dashboard_revision_id: str
+
+    override_warehouse_id: Optional[str] = None
+    """A dashboard schedule can override the warehouse used as compute for processing the published
+    dashboard queries"""
+
+    def as_dict(self) -> dict:
+        """Serializes the ExecutePublishedDashboardQueryRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.dashboard_name is not None: body['dashboard_name'] = self.dashboard_name
+        if self.dashboard_revision_id is not None: body['dashboard_revision_id'] = self.dashboard_revision_id
+        if self.override_warehouse_id is not None: body['override_warehouse_id'] = self.override_warehouse_id
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExecutePublishedDashboardQueryRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_name is not None: body['dashboard_name'] = self.dashboard_name
+        if self.dashboard_revision_id is not None: body['dashboard_revision_id'] = self.dashboard_revision_id
+        if self.override_warehouse_id is not None: body['override_warehouse_id'] = self.override_warehouse_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ExecutePublishedDashboardQueryRequest:
+        """Deserializes the ExecutePublishedDashboardQueryRequest from a dictionary."""
+        return cls(dashboard_name=d.get('dashboard_name', None),
+                   dashboard_revision_id=d.get('dashboard_revision_id', None),
+                   override_warehouse_id=d.get('override_warehouse_id', None))
+
+
+@dataclass
+class ExecuteQueryResponse:
+
+    def as_dict(self) -> dict:
+        """Serializes the ExecuteQueryResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExecuteQueryResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ExecuteQueryResponse:
+        """Deserializes the ExecuteQueryResponse from a dictionary."""
+        return cls()
+
+
 @dataclass
 class GenieAttachment:
     """Genie AI Response"""
@@ -513,6 +654,25 @@ def from_dict(cls, d: Dict[str, any]) -> GenieStartConversationResponse:
                    message_id=d.get('message_id', None))
 
 
+@dataclass
+class GetPublishedDashboardEmbeddedResponse:
+
+    def as_dict(self) -> dict:
+        """Serializes the GetPublishedDashboardEmbeddedResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPublishedDashboardEmbeddedResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GetPublishedDashboardEmbeddedResponse:
+        """Deserializes the GetPublishedDashboardEmbeddedResponse from a dictionary."""
+        return cls()
+
+
 class LifecycleState(Enum):
 
     ACTIVE = 'ACTIVE'
@@ -747,6 +907,74 @@ def from_dict(cls, d: Dict[str, any]) -> MigrateDashboardRequest:
                    update_parameter_syntax=d.get('update_parameter_syntax', None))
 
 
+@dataclass
+class PendingStatus:
+    data_token: str
+    """The token to poll for result asynchronously Example:
+    EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ"""
+
+    def as_dict(self) -> dict:
+        """Serializes the PendingStatus into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.data_token is not None: body['data_token'] = self.data_token
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PendingStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data_token is not None: body['data_token'] = self.data_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> PendingStatus:
+        """Deserializes the PendingStatus from a dictionary."""
+        return cls(data_token=d.get('data_token', None))
+
+
+@dataclass
+class PollQueryStatusResponse:
+    data: Optional[List[PollQueryStatusResponseData]] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the PollQueryStatusResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.data: body['data'] = [v.as_dict() for v in self.data]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PollQueryStatusResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data: body['data'] = self.data
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> PollQueryStatusResponse:
+        """Deserializes the PollQueryStatusResponse from a dictionary."""
+        return cls(data=_repeated_dict(d, 'data', PollQueryStatusResponseData))
+
+
+@dataclass
+class PollQueryStatusResponseData:
+    status: QueryResponseStatus
+
+    def as_dict(self) -> dict:
+        """Serializes the PollQueryStatusResponseData into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.status: body['status'] = self.status.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PollQueryStatusResponseData into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.status: body['status'] = self.status
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> PollQueryStatusResponseData:
+        """Deserializes the PollQueryStatusResponseData from a dictionary."""
+        return cls(status=_from_dict(d, 'status', QueryResponseStatus))
+
+
 @dataclass
 class PublishRequest:
     dashboard_id: Optional[str] = None
@@ -895,6 +1123,55 @@ def from_dict(cls, d: Dict[str, any]) -> QueryAttachment:
                    title=d.get('title', None))
 
 
+@dataclass
+class QueryResponseStatus:
+    canceled: Optional[Empty] = None
+    """Represents an empty message, similar to google.protobuf.Empty, which is not available in the
+    firm right now."""
+
+    closed: Optional[Empty] = None
+    """Represents an empty message, similar to google.protobuf.Empty, which is not available in the
+    firm right now."""
+
+    pending: Optional[PendingStatus] = None
+
+    statement_id: Optional[str] = None
+    """The statement id in format(01eef5da-c56e-1f36-bafa-21906587d6ba) The statement_id should be
+    identical to data_token in SuccessStatus and PendingStatus. This field is created for audit
+    logging purpose to record the statement_id of all QueryResponseStatus."""
+
+    success: Optional[SuccessStatus] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the QueryResponseStatus into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.canceled: body['canceled'] = self.canceled.as_dict()
+        if self.closed: body['closed'] = self.closed.as_dict()
+        if self.pending: body['pending'] = self.pending.as_dict()
+        if self.statement_id is not None: body['statement_id'] = self.statement_id
+        if self.success: body['success'] = self.success.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryResponseStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.canceled: body['canceled'] = self.canceled
+        if self.closed: body['closed'] = self.closed
+        if self.pending: body['pending'] = self.pending
+        if self.statement_id is not None: body['statement_id'] = self.statement_id
+        if self.success: body['success'] = self.success
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> QueryResponseStatus:
+        """Deserializes the QueryResponseStatus from a dictionary."""
+        return cls(canceled=_from_dict(d, 'canceled', Empty),
+                   closed=_from_dict(d, 'closed', Empty),
+                   pending=_from_dict(d, 'pending', PendingStatus),
+                   statement_id=d.get('statement_id', None),
+                   success=_from_dict(d, 'success', SuccessStatus))
+
+
 @dataclass
 class QuerySchema:
     columns: Optional[List[QuerySchemaColumn]] = None
@@ -1218,6 +1495,35 @@ def from_dict(cls, d: Dict[str, any]) -> SubscriptionSubscriberUser:
         return cls(user_id=d.get('user_id', None))
 
 
+@dataclass
+class SuccessStatus:
+    data_token: str
+    """The token to poll for result asynchronously Example:
+    EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ"""
+
+    truncated: Optional[bool] = None
+    """Whether the query result is truncated (either by byte limit or row limit)"""
+
+    def as_dict(self) -> dict:
+        """Serializes the SuccessStatus into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.data_token is not None: body['data_token'] = self.data_token
+        if self.truncated is not None: body['truncated'] = self.truncated
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SuccessStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data_token is not None: body['data_token'] = self.data_token
+        if self.truncated is not None: body['truncated'] = self.truncated
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> SuccessStatus:
+        """Deserializes the SuccessStatus from a dictionary."""
+        return cls(data_token=d.get('data_token', None), truncated=d.get('truncated', None))
+
+
 @dataclass
 class TextAttachment:
     content: Optional[str] = None
@@ -1907,3 +2213,107 @@ def update_schedule(self,
                            body=body,
                            headers=headers)
         return Schedule.from_dict(res)
+
+
+class LakeviewEmbeddedAPI:
+    """Token-based Lakeview APIs for embedding dashboards in external applications."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def get_published_dashboard_embedded(self, dashboard_id: str):
+        """Read a published dashboard in an embedded ui.
+        
+        Get the current published dashboard within an embedded context.
+        
+        :param dashboard_id: str
+          UUID identifying the published dashboard.
+        
+        
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        self._api.do('GET',
+                     f'/api/2.0/lakeview/dashboards/{dashboard_id}/published/embedded',
+                     headers=headers)
+
+
+class QueryExecutionAPI:
+    """Query execution APIs for AI / BI Dashboards"""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def cancel_published_query_execution(self,
+                                         dashboard_name: str,
+                                         dashboard_revision_id: str,
+                                         *,
+                                         tokens: Optional[List[str]] = None) -> CancelQueryExecutionResponse:
+        """Cancel the results for the a query for a published, embedded dashboard.
+        
+        :param dashboard_name: str
+        :param dashboard_revision_id: str
+        :param tokens: List[str] (optional)
+          Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+        
+        :returns: :class:`CancelQueryExecutionResponse`
+        """
+
+        query = {}
+        if dashboard_name is not None: query['dashboard_name'] = dashboard_name
+        if dashboard_revision_id is not None: query['dashboard_revision_id'] = dashboard_revision_id
+        if tokens is not None: query['tokens'] = [v for v in tokens]
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('DELETE', '/api/2.0/lakeview-query/query/published', query=query, headers=headers)
+        return CancelQueryExecutionResponse.from_dict(res)
+
+    def execute_published_dashboard_query(self,
+                                          dashboard_name: str,
+                                          dashboard_revision_id: str,
+                                          *,
+                                          override_warehouse_id: Optional[str] = None):
+        """Execute a query for a published dashboard.
+        
+        :param dashboard_name: str
+          Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains the
+          list of datasets, warehouse_id, and embedded_credentials
+        :param dashboard_revision_id: str
+        :param override_warehouse_id: str (optional)
+          A dashboard schedule can override the warehouse used as compute for processing the published
+          dashboard queries
+        
+        
+        """
+        body = {}
+        if dashboard_name is not None: body['dashboard_name'] = dashboard_name
+        if dashboard_revision_id is not None: body['dashboard_revision_id'] = dashboard_revision_id
+        if override_warehouse_id is not None: body['override_warehouse_id'] = override_warehouse_id
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        self._api.do('POST', '/api/2.0/lakeview-query/query/published', body=body, headers=headers)
+
+    def poll_published_query_status(self,
+                                    dashboard_name: str,
+                                    dashboard_revision_id: str,
+                                    *,
+                                    tokens: Optional[List[str]] = None) -> PollQueryStatusResponse:
+        """Poll the results for the a query for a published, embedded dashboard.
+        
+        :param dashboard_name: str
+        :param dashboard_revision_id: str
+        :param tokens: List[str] (optional)
+          Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+        
+        :returns: :class:`PollQueryStatusResponse`
+        """
+
+        query = {}
+        if dashboard_name is not None: query['dashboard_name'] = dashboard_name
+        if dashboard_revision_id is not None: query['dashboard_revision_id'] = dashboard_revision_id
+        if tokens is not None: query['tokens'] = [v for v in tokens]
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET', '/api/2.0/lakeview-query/query/published', query=query, headers=headers)
+        return PollQueryStatusResponse.from_dict(res)
diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index 44445d020..6cc2e4213 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -4915,6 +4915,10 @@ class RunTask:
     description: Optional[str] = None
     """An optional description for this task."""
 
+    disabled: Optional[bool] = None
+    """Denotes whether or not the task was disabled by the user. Disabled tasks do not execute and are
+    immediately skipped as soon as they are unblocked."""
+
     effective_performance_target: Optional[PerformanceTarget] = None
     """effective_performance_target is the actual performance target used by the run during execution.
     effective_performance_target can differ from performance_target depending on if the job was
@@ -5069,6 +5073,7 @@ def as_dict(self) -> dict:
         if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict()
         if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on]
         if self.description is not None: body['description'] = self.description
+        if self.disabled is not None: body['disabled'] = self.disabled
         if self.effective_performance_target is not None:
             body['effective_performance_target'] = self.effective_performance_target.value
         if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
@@ -5116,6 +5121,7 @@ def as_shallow_dict(self) -> dict:
         if self.dbt_task: body['dbt_task'] = self.dbt_task
         if self.depends_on: body['depends_on'] = self.depends_on
         if self.description is not None: body['description'] = self.description
+        if self.disabled is not None: body['disabled'] = self.disabled
         if self.effective_performance_target is not None:
             body['effective_performance_target'] = self.effective_performance_target
         if self.email_notifications: body['email_notifications'] = self.email_notifications
@@ -5164,6 +5170,7 @@ def from_dict(cls, d: Dict[str, any]) -> RunTask:
                    dbt_task=_from_dict(d, 'dbt_task', DbtTask),
                    depends_on=_repeated_dict(d, 'depends_on', TaskDependency),
                    description=d.get('description', None),
+                   disabled=d.get('disabled', None),
                    effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget),
                    email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
                    end_time=d.get('end_time', None),
diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py
index 488ab72b9..42c8c882d 100755
--- a/databricks/sdk/service/settings.py
+++ b/databricks/sdk/service/settings.py
@@ -563,7 +563,9 @@ class ComplianceStandard(Enum):
     FEDRAMP_IL5 = 'FEDRAMP_IL5'
     FEDRAMP_MODERATE = 'FEDRAMP_MODERATE'
     HIPAA = 'HIPAA'
+    HITRUST = 'HITRUST'
     IRAP_PROTECTED = 'IRAP_PROTECTED'
+    ISMAP = 'ISMAP'
     ITAR_EAR = 'ITAR_EAR'
     NONE = 'NONE'
     PCI_DSS = 'PCI_DSS'
@@ -3642,9 +3644,15 @@ class UpdateAccountIpAccessEnableRequest:
     setting: AccountIpAccessEnable
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateAccountIpAccessEnableRequest into a dictionary suitable for use as a JSON request body."""
@@ -3680,9 +3688,15 @@ class UpdateAibiDashboardEmbeddingAccessPolicySettingRequest:
     setting: AibiDashboardEmbeddingAccessPolicySetting
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest into a dictionary suitable for use as a JSON request body."""
@@ -3718,9 +3732,15 @@ class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest:
     setting: AibiDashboardEmbeddingApprovedDomainsSetting
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest into a dictionary suitable for use as a JSON request body."""
@@ -3756,9 +3776,15 @@ class UpdateAutomaticClusterUpdateSettingRequest:
     setting: AutomaticClusterUpdateSetting
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateAutomaticClusterUpdateSettingRequest into a dictionary suitable for use as a JSON request body."""
@@ -3794,9 +3820,15 @@ class UpdateComplianceSecurityProfileSettingRequest:
     setting: ComplianceSecurityProfileSetting
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateComplianceSecurityProfileSettingRequest into a dictionary suitable for use as a JSON request body."""
@@ -3832,9 +3864,15 @@ class UpdateCspEnablementAccountSettingRequest:
     setting: CspEnablementAccountSetting
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateCspEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body."""
@@ -3877,9 +3915,15 @@ class UpdateDefaultNamespaceSettingRequest:
     applies when using Unity Catalog-enabled compute."""
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateDefaultNamespaceSettingRequest into a dictionary suitable for use as a JSON request body."""
@@ -3915,9 +3959,15 @@ class UpdateDisableLegacyAccessRequest:
     setting: DisableLegacyAccess
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateDisableLegacyAccessRequest into a dictionary suitable for use as a JSON request body."""
@@ -3953,9 +4003,15 @@ class UpdateDisableLegacyDbfsRequest:
     setting: DisableLegacyDbfs
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateDisableLegacyDbfsRequest into a dictionary suitable for use as a JSON request body."""
@@ -3991,9 +4047,15 @@ class UpdateDisableLegacyFeaturesRequest:
     setting: DisableLegacyFeatures
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateDisableLegacyFeaturesRequest into a dictionary suitable for use as a JSON request body."""
@@ -4029,9 +4091,15 @@ class UpdateEnhancedSecurityMonitoringSettingRequest:
     setting: EnhancedSecurityMonitoringSetting
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateEnhancedSecurityMonitoringSettingRequest into a dictionary suitable for use as a JSON request body."""
@@ -4067,9 +4135,15 @@ class UpdateEsmEnablementAccountSettingRequest:
     setting: EsmEnablementAccountSetting
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateEsmEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body."""
@@ -4191,9 +4265,15 @@ class UpdatePersonalComputeSettingRequest:
     setting: PersonalComputeSetting
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdatePersonalComputeSettingRequest into a dictionary suitable for use as a JSON request body."""
@@ -4248,9 +4328,15 @@ class UpdateRestrictWorkspaceAdminsSettingRequest:
     setting: RestrictWorkspaceAdminsSetting
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateRestrictWorkspaceAdminsSettingRequest into a dictionary suitable for use as a JSON request body."""
@@ -4602,9 +4688,15 @@ def update(self, allow_missing: bool, setting: AibiDashboardEmbeddingAccessPolic
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`AibiDashboardEmbeddingAccessPolicySetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
         """
@@ -4692,9 +4784,15 @@ def update(self, allow_missing: bool, setting: AibiDashboardEmbeddingApprovedDom
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
         """
@@ -4756,9 +4854,15 @@ def update(self, allow_missing: bool, setting: AutomaticClusterUpdateSetting,
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`AutomaticClusterUpdateSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`AutomaticClusterUpdateSetting`
         """
@@ -4822,9 +4926,15 @@ def update(self, allow_missing: bool, setting: ComplianceSecurityProfileSetting,
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`ComplianceSecurityProfileSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`ComplianceSecurityProfileSetting`
         """
@@ -4924,9 +5034,15 @@ def update(self, allow_missing: bool, setting: CspEnablementAccountSetting,
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`CspEnablementAccountSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`CspEnablementAccountSetting`
         """
@@ -5034,9 +5150,15 @@ def update(self, allow_missing: bool, setting: DefaultNamespaceSetting,
           restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only
           applies when using Unity Catalog-enabled compute.
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`DefaultNamespaceSetting`
         """
@@ -5124,9 +5246,15 @@ def update(self, allow_missing: bool, setting: DisableLegacyAccess,
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`DisableLegacyAccess`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`DisableLegacyAccess`
         """
@@ -5209,9 +5337,15 @@ def update(self, allow_missing: bool, setting: DisableLegacyDbfs, field_mask: st
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`DisableLegacyDbfs`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`DisableLegacyDbfs`
         """
@@ -5300,9 +5434,15 @@ def update(self, allow_missing: bool, setting: DisableLegacyFeatures,
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`DisableLegacyFeatures`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`DisableLegacyFeatures`
         """
@@ -5389,9 +5529,15 @@ def update(self, allow_missing: bool, setting: AccountIpAccessEnable,
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`AccountIpAccessEnable`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`AccountIpAccessEnable`
         """
@@ -5458,9 +5604,15 @@ def update(self, allow_missing: bool, setting: EnhancedSecurityMonitoringSetting
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`EnhancedSecurityMonitoringSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`EnhancedSecurityMonitoringSetting`
         """
@@ -5521,9 +5673,15 @@ def update(self, allow_missing: bool, setting: EsmEnablementAccountSetting,
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`EsmEnablementAccountSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`EsmEnablementAccountSetting`
         """
@@ -6153,9 +6311,15 @@ def update(self, allow_missing: bool, setting: PersonalComputeSetting,
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`PersonalComputeSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`PersonalComputeSetting`
         """
@@ -6253,9 +6417,15 @@ def update(self, allow_missing: bool, setting: RestrictWorkspaceAdminsSetting,
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`RestrictWorkspaceAdminsSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`RestrictWorkspaceAdminsSetting`
         """
diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py
index 2c20a7aef..cfa94aaa7 100755
--- a/databricks/sdk/service/sql.py
+++ b/databricks/sdk/service/sql.py
@@ -631,6 +631,79 @@ class ChannelName(Enum):
     CHANNEL_NAME_PREVIOUS = 'CHANNEL_NAME_PREVIOUS'
 
 
+@dataclass
+class ClientConfig:
+    allow_custom_js_visualizations: Optional[bool] = None
+
+    allow_downloads: Optional[bool] = None
+
+    allow_external_shares: Optional[bool] = None
+
+    allow_subscriptions: Optional[bool] = None
+
+    date_format: Optional[str] = None
+
+    date_time_format: Optional[str] = None
+
+    disable_publish: Optional[bool] = None
+
+    enable_legacy_autodetect_types: Optional[bool] = None
+
+    feature_show_permissions_control: Optional[bool] = None
+
+    hide_plotly_mode_bar: Optional[bool] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the ClientConfig into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.allow_custom_js_visualizations is not None:
+            body['allow_custom_js_visualizations'] = self.allow_custom_js_visualizations
+        if self.allow_downloads is not None: body['allow_downloads'] = self.allow_downloads
+        if self.allow_external_shares is not None: body['allow_external_shares'] = self.allow_external_shares
+        if self.allow_subscriptions is not None: body['allow_subscriptions'] = self.allow_subscriptions
+        if self.date_format is not None: body['date_format'] = self.date_format
+        if self.date_time_format is not None: body['date_time_format'] = self.date_time_format
+        if self.disable_publish is not None: body['disable_publish'] = self.disable_publish
+        if self.enable_legacy_autodetect_types is not None:
+            body['enable_legacy_autodetect_types'] = self.enable_legacy_autodetect_types
+        if self.feature_show_permissions_control is not None:
+            body['feature_show_permissions_control'] = self.feature_show_permissions_control
+        if self.hide_plotly_mode_bar is not None: body['hide_plotly_mode_bar'] = self.hide_plotly_mode_bar
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClientConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_custom_js_visualizations is not None:
+            body['allow_custom_js_visualizations'] = self.allow_custom_js_visualizations
+        if self.allow_downloads is not None: body['allow_downloads'] = self.allow_downloads
+        if self.allow_external_shares is not None: body['allow_external_shares'] = self.allow_external_shares
+        if self.allow_subscriptions is not None: body['allow_subscriptions'] = self.allow_subscriptions
+        if self.date_format is not None: body['date_format'] = self.date_format
+        if self.date_time_format is not None: body['date_time_format'] = self.date_time_format
+        if self.disable_publish is not None: body['disable_publish'] = self.disable_publish
+        if self.enable_legacy_autodetect_types is not None:
+            body['enable_legacy_autodetect_types'] = self.enable_legacy_autodetect_types
+        if self.feature_show_permissions_control is not None:
+            body['feature_show_permissions_control'] = self.feature_show_permissions_control
+        if self.hide_plotly_mode_bar is not None: body['hide_plotly_mode_bar'] = self.hide_plotly_mode_bar
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ClientConfig:
+        """Deserializes the ClientConfig from a dictionary."""
+        return cls(allow_custom_js_visualizations=d.get('allow_custom_js_visualizations', None),
+                   allow_downloads=d.get('allow_downloads', None),
+                   allow_external_shares=d.get('allow_external_shares', None),
+                   allow_subscriptions=d.get('allow_subscriptions', None),
+                   date_format=d.get('date_format', None),
+                   date_time_format=d.get('date_time_format', None),
+                   disable_publish=d.get('disable_publish', None),
+                   enable_legacy_autodetect_types=d.get('enable_legacy_autodetect_types', None),
+                   feature_show_permissions_control=d.get('feature_show_permissions_control', None),
+                   hide_plotly_mode_bar=d.get('hide_plotly_mode_bar', None))
+
+
 @dataclass
 class ColumnInfo:
     name: Optional[str] = None
@@ -5540,9 +5613,15 @@ def from_dict(cls, d: Dict[str, any]) -> TransferOwnershipObjectId:
 @dataclass
 class UpdateAlertRequest:
     update_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     alert: Optional[UpdateAlertRequestAlert] = None
 
@@ -5646,9 +5725,15 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequestAlert:
 @dataclass
 class UpdateQueryRequest:
     update_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     id: Optional[str] = None
 
@@ -5782,9 +5867,15 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
 @dataclass
 class UpdateVisualizationRequest:
     update_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     id: Optional[str] = None
 
@@ -6464,9 +6555,15 @@ def update(self, id: str, update_mask: str, *, alert: Optional[UpdateAlertReques
         
         :param id: str
         :param update_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         :param alert: :class:`UpdateAlertRequestAlert` (optional)
         
         :returns: :class:`Alert`
@@ -7173,9 +7270,15 @@ def update(self, id: str, update_mask: str, *, query: Optional[UpdateQueryReques
         
         :param id: str
         :param update_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         :param query: :class:`UpdateQueryRequestQuery` (optional)
         
         :returns: :class:`Query`
@@ -7547,9 +7650,15 @@ def update(self,
         
         :param id: str
         :param update_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         :param visualization: :class:`UpdateVisualizationRequestVisualization` (optional)
         
         :returns: :class:`Visualization`
@@ -7686,6 +7795,24 @@ def update(self,
         return LegacyVisualization.from_dict(res)
 
 
+class RedashConfigAPI:
+    """Redash V2 service for workspace configurations (internal)"""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def get_config(self) -> ClientConfig:
+        """Read workspace configuration for Redash-v2.
+        
+        :returns: :class:`ClientConfig`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET', '/api/2.0/redash-v2/config', headers=headers)
+        return ClientConfig.from_dict(res)
+
+
 class StatementExecutionAPI:
     """The Databricks SQL Statement Execution API can be used to execute SQL statements on a SQL warehouse and
     fetch the result.
diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py
index ccd8b38ef..1e79165d5 100644
--- a/databricks/sdk/version.py
+++ b/databricks/sdk/version.py
@@ -1 +1 @@
-__version__ = '0.42.0'
+__version__ = '0.43.0'
diff --git a/docs/account/settings/csp_enablement_account.rst b/docs/account/settings/csp_enablement_account.rst
index b6fec691c..885aae89f 100644
--- a/docs/account/settings/csp_enablement_account.rst
+++ b/docs/account/settings/csp_enablement_account.rst
@@ -37,9 +37,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`CspEnablementAccountSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`CspEnablementAccountSetting`
         
\ No newline at end of file
diff --git a/docs/account/settings/disable_legacy_features.rst b/docs/account/settings/disable_legacy_features.rst
index d7f1db9d3..b10d7e2dc 100644
--- a/docs/account/settings/disable_legacy_features.rst
+++ b/docs/account/settings/disable_legacy_features.rst
@@ -52,9 +52,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`DisableLegacyFeatures`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`DisableLegacyFeatures`
         
\ No newline at end of file
diff --git a/docs/account/settings/enable_ip_access_lists.rst b/docs/account/settings/enable_ip_access_lists.rst
index 3d32a762b..9485b7332 100644
--- a/docs/account/settings/enable_ip_access_lists.rst
+++ b/docs/account/settings/enable_ip_access_lists.rst
@@ -49,9 +49,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`AccountIpAccessEnable`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`AccountIpAccessEnable`
         
\ No newline at end of file
diff --git a/docs/account/settings/esm_enablement_account.rst b/docs/account/settings/esm_enablement_account.rst
index 59376793b..e9359d907 100644
--- a/docs/account/settings/esm_enablement_account.rst
+++ b/docs/account/settings/esm_enablement_account.rst
@@ -34,9 +34,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`EsmEnablementAccountSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`EsmEnablementAccountSetting`
         
\ No newline at end of file
diff --git a/docs/account/settings/personal_compute.rst b/docs/account/settings/personal_compute.rst
index 00ccf3012..54e958a28 100644
--- a/docs/account/settings/personal_compute.rst
+++ b/docs/account/settings/personal_compute.rst
@@ -54,9 +54,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`PersonalComputeSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`PersonalComputeSetting`
         
\ No newline at end of file
diff --git a/docs/dbdataclasses/dashboards.rst b/docs/dbdataclasses/dashboards.rst
index 6d0e847ba..42b0fb462 100644
--- a/docs/dbdataclasses/dashboards.rst
+++ b/docs/dbdataclasses/dashboards.rst
@@ -4,6 +4,14 @@ Dashboards
 These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.dashboards`` module.
 
 .. py:currentmodule:: databricks.sdk.service.dashboards
+.. autoclass:: CancelQueryExecutionResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: CancelQueryExecutionResponseStatus
+   :members:
+   :undoc-members:
+
 .. autoclass:: CronSchedule
    :members:
    :undoc-members:
@@ -78,6 +86,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: Empty
+   :members:
+   :undoc-members:
+
+.. autoclass:: ExecutePublishedDashboardQueryRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: ExecuteQueryResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: GenieAttachment
    :members:
    :undoc-members:
@@ -106,6 +126,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: GetPublishedDashboardEmbeddedResponse
+   :members:
+   :undoc-members:
+
 .. py:class:: LifecycleState
 
    .. py:attribute:: ACTIVE
@@ -290,6 +314,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: PendingStatus
+   :members:
+   :undoc-members:
+
+.. autoclass:: PollQueryStatusResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: PollQueryStatusResponseData
+   :members:
+   :undoc-members:
+
 .. autoclass:: PublishRequest
    :members:
    :undoc-members:
@@ -302,6 +338,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: QueryResponseStatus
+   :members:
+   :undoc-members:
+
 .. autoclass:: QuerySchema
    :members:
    :undoc-members:
@@ -342,6 +382,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: SuccessStatus
+   :members:
+   :undoc-members:
+
 .. autoclass:: TextAttachment
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst
index b6fb0be58..2325c4023 100644
--- a/docs/dbdataclasses/settings.rst
+++ b/docs/dbdataclasses/settings.rst
@@ -139,9 +139,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: HIPAA
       :value: "HIPAA"
 
+   .. py:attribute:: HITRUST
+      :value: "HITRUST"
+
    .. py:attribute:: IRAP_PROTECTED
       :value: "IRAP_PROTECTED"
 
+   .. py:attribute:: ISMAP
+      :value: "ISMAP"
+
    .. py:attribute:: ITAR_EAR
       :value: "ITAR_EAR"
 
diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst
index 1a252f7c6..c63fe7cd2 100644
--- a/docs/dbdataclasses/sql.rst
+++ b/docs/dbdataclasses/sql.rst
@@ -117,6 +117,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CHANNEL_NAME_PREVIOUS
       :value: "CHANNEL_NAME_PREVIOUS"
 
+.. autoclass:: ClientConfig
+   :members:
+   :undoc-members:
+
 .. autoclass:: ColumnInfo
    :members:
    :undoc-members:
diff --git a/docs/workspace/catalog/catalogs.rst b/docs/workspace/catalog/catalogs.rst
index 200168ee6..1d6b6dc2a 100644
--- a/docs/workspace/catalog/catalogs.rst
+++ b/docs/workspace/catalog/catalogs.rst
@@ -143,7 +143,7 @@
         :returns: Iterator over :class:`CatalogInfo`
         
 
-    .. py:method:: update(name: str [, comment: Optional[str], enable_predictive_optimization: Optional[EnablePredictiveOptimization], isolation_mode: Optional[CatalogIsolationMode], new_name: Optional[str], owner: Optional[str], properties: Optional[Dict[str, str]]]) -> CatalogInfo
+    .. py:method:: update(name: str [, comment: Optional[str], enable_predictive_optimization: Optional[EnablePredictiveOptimization], isolation_mode: Optional[CatalogIsolationMode], new_name: Optional[str], options: Optional[Dict[str, str]], owner: Optional[str], properties: Optional[Dict[str, str]]]) -> CatalogInfo
 
 
         Usage:
@@ -178,6 +178,8 @@
           Whether the current securable is accessible from all workspaces or a specific set of workspaces.
         :param new_name: str (optional)
           New name for the catalog.
+        :param options: Dict[str,str] (optional)
+          A map of key-value properties attached to the securable.
         :param owner: str (optional)
           Username of current owner of catalog.
         :param properties: Dict[str,str] (optional)
diff --git a/docs/workspace/catalog/credentials.rst b/docs/workspace/catalog/credentials.rst
index 3927e6351..54b55516b 100644
--- a/docs/workspace/catalog/credentials.rst
+++ b/docs/workspace/catalog/credentials.rst
@@ -4,190 +4,65 @@
 
 .. py:class:: CredentialsAPI
 
-    A credential represents an authentication and authorization mechanism for accessing services on your cloud
-    tenant. Each credential is subject to Unity Catalog access-control policies that control which users and
-    groups can access the credential.
-    
-    To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE CREDENTIAL`
-    privilege. The user who creates the credential can delegate ownership to another user or group to manage
-    permissions on it.
+    These APIs manage credential configurations for this workspace. Databricks needs access to a cross-account
+    service IAM role in your AWS account so that Databricks can deploy clusters in the appropriate VPC for the
+    new workspace. A credential configuration encapsulates this role information, and its ID is used when
+    creating a new workspace.
 
-    .. py:method:: create_credential(name: str [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], azure_service_principal: Optional[AzureServicePrincipal], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], purpose: Optional[CredentialPurpose], read_only: Optional[bool], skip_validation: Optional[bool]]) -> CredentialInfo
+    .. py:method:: create(credentials_name: str, aws_credentials: CreateCredentialAwsCredentials) -> Credential
 
-        Create a credential.
-        
-        Creates a new credential. The type of credential to be created is determined by the **purpose** field,
-        which should be either **SERVICE** or **STORAGE**.
-        
-        The caller must be a metastore admin or have the metastore privilege **CREATE_STORAGE_CREDENTIAL** for
-        storage credentials, or **CREATE_SERVICE_CREDENTIAL** for service credentials.
-        
-        :param name: str
-          The credential name. The name must be unique among storage and service credentials within the
-          metastore.
-        :param aws_iam_role: :class:`AwsIamRole` (optional)
-          The AWS IAM role configuration
-        :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
-          The Azure managed identity configuration.
-        :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
-          The Azure service principal configuration. Only applicable when purpose is **STORAGE**.
-        :param comment: str (optional)
-          Comment associated with the credential.
-        :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional)
-          GCP long-lived credential. Databricks-created Google Cloud Storage service account.
-        :param purpose: :class:`CredentialPurpose` (optional)
-          Indicates the purpose of the credential.
-        :param read_only: bool (optional)
-          Whether the credential is usable only for read operations. Only applicable when purpose is
-          **STORAGE**.
-        :param skip_validation: bool (optional)
-          Optional. Supplying true to this argument skips validation of the created set of credentials.
-        
-        :returns: :class:`CredentialInfo`
+        Create credential configuration.
         
-
-    .. py:method:: delete_credential(name_arg: str [, force: Optional[bool]])
-
-        Delete a credential.
-        
-        Deletes a service or storage credential from the metastore. The caller must be an owner of the
-        credential.
+        Creates a Databricks credential configuration that represents cloud cross-account credentials for a
+        specified account. Databricks uses this to set up network infrastructure properly to host Databricks
+        clusters. For your AWS IAM role, you need to trust the External ID (the Databricks Account API account
+        ID) in the returned credential object, and configure the required access policy.
         
-        :param name_arg: str
-          Name of the credential.
-        :param force: bool (optional)
-          Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
-          external locations and external tables (when purpose is **STORAGE**).
+        Save the response's `credentials_id` field, which is the ID for your new credential configuration
+        object.
         
+        For information about how to create a new workspace with this API, see [Create a new workspace using
+        the Account API]
         
+        [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
         
-
-    .. py:method:: generate_temporary_service_credential(credential_name: str [, azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions], gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions]]) -> TemporaryCredentials
-
-        Generate a temporary service credential.
-        
-        Returns a set of temporary credentials generated using the specified service credential. The caller
-        must be a metastore admin or have the metastore privilege **ACCESS** on the service credential.
-        
-        :param credential_name: str
-          The name of the service credential used to generate a temporary credential
-        :param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional)
-          The Azure cloud options to customize the requested temporary credential
-        :param gcp_options: :class:`GenerateTemporaryServiceCredentialGcpOptions` (optional)
-          The GCP cloud options to customize the requested temporary credential
+        :param credentials_name: str
+          The human-readable name of the credential configuration object.
+        :param aws_credentials: :class:`CreateCredentialAwsCredentials`
         
-        :returns: :class:`TemporaryCredentials`
+        :returns: :class:`Credential`
         
 
-    .. py:method:: get_credential(name_arg: str) -> CredentialInfo
+    .. py:method:: delete(credentials_id: str)
 
-        Get a credential.
+        Delete credential configuration.
+        
+        Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot
+        delete a credential that is associated with any workspace.
         
-        Gets a service or storage credential from the metastore. The caller must be a metastore admin, the
-        owner of the credential, or have any permission on the credential.
+        :param credentials_id: str
+          Databricks Account API credential configuration ID
         
-        :param name_arg: str
-          Name of the credential.
         
-        :returns: :class:`CredentialInfo`
         
 
-    .. py:method:: list_credentials( [, max_results: Optional[int], page_token: Optional[str], purpose: Optional[CredentialPurpose]]) -> Iterator[CredentialInfo]
+    .. py:method:: get(credentials_id: str) -> Credential
 
-        List credentials.
-        
-        Gets an array of credentials (as __CredentialInfo__ objects).
+        Get credential configuration.
         
-        The array is limited to only the credentials that the caller has permission to access. If the caller
-        is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific
-        ordering of the elements in the array.
+        Gets a Databricks credential configuration object for an account, both specified by ID.
         
-        :param max_results: int (optional)
-          Maximum number of credentials to return. - If not set, the default max page size is used. - When set
-          to a value greater than 0, the page length is the minimum of this value and a server-configured
-          value. - When set to 0, the page length is set to a server-configured value (recommended). - When
-          set to a value less than 0, an invalid parameter error is returned.
-        :param page_token: str (optional)
-          Opaque token to retrieve the next page of results.
-        :param purpose: :class:`CredentialPurpose` (optional)
-          Return only credentials for the specified purpose.
+        :param credentials_id: str
+          Databricks Account API credential configuration ID
         
-        :returns: Iterator over :class:`CredentialInfo`
+        :returns: :class:`Credential`
         
 
-    .. py:method:: update_credential(name_arg: str [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], azure_service_principal: Optional[AzureServicePrincipal], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], force: Optional[bool], isolation_mode: Optional[IsolationMode], new_name: Optional[str], owner: Optional[str], read_only: Optional[bool], skip_validation: Optional[bool]]) -> CredentialInfo
+    .. py:method:: list() -> Iterator[Credential]
 
-        Update a credential.
-        
-        Updates a service or storage credential on the metastore.
-        
-        The caller must be the owner of the credential or a metastore admin or have the `MANAGE` permission.
-        If the caller is a metastore admin, only the __owner__ field can be changed.
-        
-        :param name_arg: str
-          Name of the credential.
-        :param aws_iam_role: :class:`AwsIamRole` (optional)
-          The AWS IAM role configuration
-        :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
-          The Azure managed identity configuration.
-        :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
-          The Azure service principal configuration. Only applicable when purpose is **STORAGE**.
-        :param comment: str (optional)
-          Comment associated with the credential.
-        :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional)
-          GCP long-lived credential. Databricks-created Google Cloud Storage service account.
-        :param force: bool (optional)
-          Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
-          external locations and external tables (when purpose is **STORAGE**).
-        :param isolation_mode: :class:`IsolationMode` (optional)
-          Whether the current securable is accessible from all workspaces or a specific set of workspaces.
-        :param new_name: str (optional)
-          New name of credential.
-        :param owner: str (optional)
-          Username of current owner of credential.
-        :param read_only: bool (optional)
-          Whether the credential is usable only for read operations. Only applicable when purpose is
-          **STORAGE**.
-        :param skip_validation: bool (optional)
-          Supply true to this argument to skip validation of the updated credential.
-        
-        :returns: :class:`CredentialInfo`
+        Get all credential configurations.
         
-
-    .. py:method:: validate_credential( [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], credential_name: Optional[str], external_location_name: Optional[str], purpose: Optional[CredentialPurpose], read_only: Optional[bool], url: Optional[str]]) -> ValidateCredentialResponse
-
-        Validate a credential.
-        
-        Validates a credential.
-        
-        For service credentials (purpose is **SERVICE**), either the __credential_name__ or the cloud-specific
-        credential must be provided.
-        
-        For storage credentials (purpose is **STORAGE**), at least one of __external_location_name__ and
-        __url__ need to be provided. If only one of them is provided, it will be used for validation. And if
-        both are provided, the __url__ will be used for validation, and __external_location_name__ will be
-        ignored when checking overlapping urls. Either the __credential_name__ or the cloud-specific
-        credential must be provided.
-        
-        The caller must be a metastore admin or the credential owner or have the required permission on the
-        metastore and the credential (e.g., **CREATE_EXTERNAL_LOCATION** when purpose is **STORAGE**).
-        
-        :param aws_iam_role: :class:`AwsIamRole` (optional)
-          The AWS IAM role configuration
-        :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
-          The Azure managed identity configuration.
-        :param credential_name: str (optional)
-          Required. The name of an existing credential or long-lived cloud credential to validate.
-        :param external_location_name: str (optional)
-          The name of an existing external location to validate. Only applicable for storage credentials
-          (purpose is **STORAGE**.)
-        :param purpose: :class:`CredentialPurpose` (optional)
-          The purpose of the credential. This should only be used when the credential is specified.
-        :param read_only: bool (optional)
-          Whether the credential is only usable for read operations. Only applicable for storage credentials
-          (purpose is **STORAGE**.)
-        :param url: str (optional)
-          The external location url to validate. Only applicable when purpose is **STORAGE**.
-        
-        :returns: :class:`ValidateCredentialResponse`
+        Gets all Databricks credential configurations associated with an account specified by ID.
+        
+        :returns: Iterator over :class:`Credential`
         
\ No newline at end of file
diff --git a/docs/workspace/cleanrooms/clean_rooms.rst b/docs/workspace/cleanrooms/clean_rooms.rst
index 0d1468399..8ef5d8827 100644
--- a/docs/workspace/cleanrooms/clean_rooms.rst
+++ b/docs/workspace/cleanrooms/clean_rooms.rst
@@ -14,8 +14,9 @@
         
         Create a new clean room with the specified collaborators. This method is asynchronous; the returned
         name field inside the clean_room field can be used to poll the clean room status, using the
-        :method:cleanrooms/get method. When this method returns, the cluster will be in a PROVISIONING state.
-        The cluster will be usable once it enters an ACTIVE state.
+        :method:cleanrooms/get method. When this method returns, the clean room will be in a PROVISIONING
+        state, with only name, owner, comment, created_at and status populated. The clean room will be usable
+        once it enters an ACTIVE state.
         
         The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore.
         
diff --git a/docs/workspace/dashboards/index.rst b/docs/workspace/dashboards/index.rst
index 6d1565bb6..940efa5dd 100644
--- a/docs/workspace/dashboards/index.rst
+++ b/docs/workspace/dashboards/index.rst
@@ -8,4 +8,6 @@ Manage Lakeview dashboards
    :maxdepth: 1
 
    genie
-   lakeview
\ No newline at end of file
+   lakeview
+   lakeview_embedded
+   query_execution
\ No newline at end of file
diff --git a/docs/workspace/dashboards/lakeview_embedded.rst b/docs/workspace/dashboards/lakeview_embedded.rst
new file mode 100644
index 000000000..4c06031f5
--- /dev/null
+++ b/docs/workspace/dashboards/lakeview_embedded.rst
@@ -0,0 +1,19 @@
+``w.lakeview_embedded``: Lakeview Embedded
+==========================================
+.. currentmodule:: databricks.sdk.service.dashboards
+
+.. py:class:: LakeviewEmbeddedAPI
+
+    Token-based Lakeview APIs for embedding dashboards in external applications.
+
+    .. py:method:: get_published_dashboard_embedded(dashboard_id: str)
+
+        Read a published dashboard in an embedded ui.
+        
+        Get the current published dashboard within an embedded context.
+        
+        :param dashboard_id: str
+          UUID identifying the published dashboard.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/dashboards/query_execution.rst b/docs/workspace/dashboards/query_execution.rst
new file mode 100644
index 000000000..5672183d9
--- /dev/null
+++ b/docs/workspace/dashboards/query_execution.rst
@@ -0,0 +1,46 @@
+``w.query_execution``: Query Execution
+======================================
+.. currentmodule:: databricks.sdk.service.dashboards
+
+.. py:class:: QueryExecutionAPI
+
+    Query execution APIs for AI / BI Dashboards
+
+    .. py:method:: cancel_published_query_execution(dashboard_name: str, dashboard_revision_id: str [, tokens: Optional[List[str]]]) -> CancelQueryExecutionResponse
+
+        Cancel the results for the a query for a published, embedded dashboard.
+        
+        :param dashboard_name: str
+        :param dashboard_revision_id: str
+        :param tokens: List[str] (optional)
+          Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+        
+        :returns: :class:`CancelQueryExecutionResponse`
+        
+
+    .. py:method:: execute_published_dashboard_query(dashboard_name: str, dashboard_revision_id: str [, override_warehouse_id: Optional[str]])
+
+        Execute a query for a published dashboard.
+        
+        :param dashboard_name: str
+          Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains the
+          list of datasets, warehouse_id, and embedded_credentials
+        :param dashboard_revision_id: str
+        :param override_warehouse_id: str (optional)
+          A dashboard schedule can override the warehouse used as compute for processing the published
+          dashboard queries
+        
+        
+        
+
+    .. py:method:: poll_published_query_status(dashboard_name: str, dashboard_revision_id: str [, tokens: Optional[List[str]]]) -> PollQueryStatusResponse
+
+        Poll the results for the a query for a published, embedded dashboard.
+        
+        :param dashboard_name: str
+        :param dashboard_revision_id: str
+        :param tokens: List[str] (optional)
+          Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+        
+        :returns: :class:`PollQueryStatusResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst
index d54bc088d..f4857476a 100644
--- a/docs/workspace/jobs/jobs.rst
+++ b/docs/workspace/jobs/jobs.rst
@@ -1,5 +1,5 @@
-``w.jobs``: Jobs (latest)
-=========================
+``w.jobs``: Jobs (2.2)
+======================
 .. currentmodule:: databricks.sdk.service.jobs
 
 .. py:class:: JobsExt
diff --git a/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst
index 00d12fa36..66c621997 100644
--- a/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst
+++ b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst
@@ -50,9 +50,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`AibiDashboardEmbeddingAccessPolicySetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
         
\ No newline at end of file
diff --git a/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst
index d793e9a7c..0c9294130 100644
--- a/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst
+++ b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst
@@ -51,9 +51,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
         
\ No newline at end of file
diff --git a/docs/workspace/settings/automatic_cluster_update.rst b/docs/workspace/settings/automatic_cluster_update.rst
index 2219e1130..350e0e713 100644
--- a/docs/workspace/settings/automatic_cluster_update.rst
+++ b/docs/workspace/settings/automatic_cluster_update.rst
@@ -36,9 +36,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`AutomaticClusterUpdateSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`AutomaticClusterUpdateSetting`
         
\ No newline at end of file
diff --git a/docs/workspace/settings/compliance_security_profile.rst b/docs/workspace/settings/compliance_security_profile.rst
index f503830bc..855451b82 100644
--- a/docs/workspace/settings/compliance_security_profile.rst
+++ b/docs/workspace/settings/compliance_security_profile.rst
@@ -38,9 +38,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`ComplianceSecurityProfileSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`ComplianceSecurityProfileSetting`
         
\ No newline at end of file
diff --git a/docs/workspace/settings/default_namespace.rst b/docs/workspace/settings/default_namespace.rst
index 061a0e34e..960949930 100644
--- a/docs/workspace/settings/default_namespace.rst
+++ b/docs/workspace/settings/default_namespace.rst
@@ -72,9 +72,15 @@
           restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only
           applies when using Unity Catalog-enabled compute.
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`DefaultNamespaceSetting`
         
\ No newline at end of file
diff --git a/docs/workspace/settings/disable_legacy_access.rst b/docs/workspace/settings/disable_legacy_access.rst
index c8baba3a7..a015e777f 100644
--- a/docs/workspace/settings/disable_legacy_access.rst
+++ b/docs/workspace/settings/disable_legacy_access.rst
@@ -53,9 +53,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`DisableLegacyAccess`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`DisableLegacyAccess`
         
\ No newline at end of file
diff --git a/docs/workspace/settings/disable_legacy_dbfs.rst b/docs/workspace/settings/disable_legacy_dbfs.rst
index ad11fa606..502111fe4 100644
--- a/docs/workspace/settings/disable_legacy_dbfs.rst
+++ b/docs/workspace/settings/disable_legacy_dbfs.rst
@@ -49,9 +49,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`DisableLegacyDbfs`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`DisableLegacyDbfs`
         
\ No newline at end of file
diff --git a/docs/workspace/settings/enhanced_security_monitoring.rst b/docs/workspace/settings/enhanced_security_monitoring.rst
index fe7668973..c9dfb547d 100644
--- a/docs/workspace/settings/enhanced_security_monitoring.rst
+++ b/docs/workspace/settings/enhanced_security_monitoring.rst
@@ -40,9 +40,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`EnhancedSecurityMonitoringSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`EnhancedSecurityMonitoringSetting`
         
\ No newline at end of file
diff --git a/docs/workspace/settings/restrict_workspace_admins.rst b/docs/workspace/settings/restrict_workspace_admins.rst
index 47660fda4..b025112cc 100644
--- a/docs/workspace/settings/restrict_workspace_admins.rst
+++ b/docs/workspace/settings/restrict_workspace_admins.rst
@@ -62,9 +62,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`RestrictWorkspaceAdminsSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`RestrictWorkspaceAdminsSetting`
         
\ No newline at end of file
diff --git a/docs/workspace/sql/alerts.rst b/docs/workspace/sql/alerts.rst
index c552d5f80..c8d9c31ab 100644
--- a/docs/workspace/sql/alerts.rst
+++ b/docs/workspace/sql/alerts.rst
@@ -182,9 +182,15 @@
         
         :param id: str
         :param update_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         :param alert: :class:`UpdateAlertRequestAlert` (optional)
         
         :returns: :class:`Alert`
diff --git a/docs/workspace/sql/index.rst b/docs/workspace/sql/index.rst
index 728730209..bddb6a827 100644
--- a/docs/workspace/sql/index.rst
+++ b/docs/workspace/sql/index.rst
@@ -18,5 +18,6 @@ Manage Databricks SQL assets, including warehouses, dashboards, queries and quer
    query_history
    query_visualizations
    query_visualizations_legacy
+   redash_config
    statement_execution
    warehouses
\ No newline at end of file
diff --git a/docs/workspace/sql/queries.rst b/docs/workspace/sql/queries.rst
index 1f01c2f1d..959552850 100644
--- a/docs/workspace/sql/queries.rst
+++ b/docs/workspace/sql/queries.rst
@@ -151,9 +151,15 @@
         
         :param id: str
         :param update_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         :param query: :class:`UpdateQueryRequestQuery` (optional)
         
         :returns: :class:`Query`
diff --git a/docs/workspace/sql/query_visualizations.rst b/docs/workspace/sql/query_visualizations.rst
index 95095fb20..ac3d6c565 100644
--- a/docs/workspace/sql/query_visualizations.rst
+++ b/docs/workspace/sql/query_visualizations.rst
@@ -37,9 +37,15 @@
         
         :param id: str
         :param update_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         :param visualization: :class:`UpdateVisualizationRequestVisualization` (optional)
         
         :returns: :class:`Visualization`
diff --git a/docs/workspace/sql/redash_config.rst b/docs/workspace/sql/redash_config.rst
new file mode 100644
index 000000000..9b4382dd5
--- /dev/null
+++ b/docs/workspace/sql/redash_config.rst
@@ -0,0 +1,14 @@
+``w.redash_config``: Redash Config
+==================================
+.. currentmodule:: databricks.sdk.service.sql
+
+.. py:class:: RedashConfigAPI
+
+    Redash V2 service for workspace configurations (internal)
+
+    .. py:method:: get_config() -> ClientConfig
+
+        Read workspace configuration for Redash-v2.
+        
+        :returns: :class:`ClientConfig`
+        
\ No newline at end of file

From 3ab8c3bffc5ef086c8fb8d5fd2923c0020c51f36 Mon Sep 17 00:00:00 2001
From: Miles Yucht 
Date: Tue, 4 Feb 2025 17:40:39 +0100
Subject: [PATCH 095/136] [Internal] Fix
 `tests/integration/test_dbutils.py::test_secrets` (#884)

## What changes are proposed in this pull request?
`tests/integration/test_dbutils.py::test_secrets` currently assumes that
the principal running the test has access to all key vaults in our test
workspace. This isn't necessarily the case. To alleviate this, rather
than listing all secrets across all scopes, we just check that our scope
is included in the list of secret scopes, and only list the secrets from
our scope.

## How is this tested?

Integration tests will automatically run on this PR.
---
 tests/integration/test_dbutils.py | 25 +++++++++++++------------
 1 file changed, 13 insertions(+), 12 deletions(-)

diff --git a/tests/integration/test_dbutils.py b/tests/integration/test_dbutils.py
index e6e2a8668..e486f2282 100644
--- a/tests/integration/test_dbutils.py
+++ b/tests/integration/test_dbutils.py
@@ -192,20 +192,21 @@ def test_secrets(w, random):
 
     from databricks.sdk.runtime import dbutils
 
+    all_scopes = dbutils.secrets.listScopes()
+    assert random_scope in [scope.getName() for scope in all_scopes]
+
     all_secrets = {}
-    for secret_scope in dbutils.secrets.listScopes():
-        scope = secret_scope.name
-        for secret_metadata in dbutils.secrets.list(scope):
-            key = secret_metadata.key
-            try:
-                all_secrets[f'{scope}.{key}'] = dbutils.secrets.get(scope, key)
-            except DatabricksError as e:
-                if e.error_code == 'BAD_REQUEST':
-                    pytest.skip('dbconnect is not enabled on this workspace')
-                raise e
+    for secret_metadata in dbutils.secrets.list(random_scope):
+        key = secret_metadata.key
+        try:
+            all_secrets[key] = dbutils.secrets.get(random_scope, key)
+        except DatabricksError as e:
+            if e.error_code == 'BAD_REQUEST':
+                pytest.skip('dbconnect is not enabled on this workspace')
+            raise e
 
     logger.info(f'After loading secret: {random_value}')
     logging.getLogger('databricks.sdk').info(f'After loading secret: {random_value}')
 
-    assert all_secrets[f'{random_scope}.{key_for_string}'] == random_value
-    assert all_secrets[f'{random_scope}.{key_for_bytes}'] == random_value
+    assert all_secrets[key_for_string] == random_value
+    assert all_secrets[key_for_bytes] == random_value

From 3c391a03d14bffa9a715bd949eed58adafded9e3 Mon Sep 17 00:00:00 2001
From: hectorcast-db 
Date: Tue, 11 Feb 2025 16:06:08 +0100
Subject: [PATCH 096/136] [Release] Release v0.44.0 (#885)

### Internal Changes

* Fix `tests/integration/test_dbutils.py::test_secrets`
([#884](https://github.com/databricks/databricks-sdk-py/pull/884)).


### API Changes:

* Added `get_message_query_result_by_attachment()` method for
[w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html)
workspace-level service.
 * Added `id` field for `databricks.sdk.service.apps.App`.
* Added `limit_config` field for
`databricks.sdk.service.billing.UpdateBudgetPolicyRequest`.
* Added `volumes` field for
`databricks.sdk.service.compute.ClusterLogConf`.
* Removed `review_state`, `reviews` and `runner_collaborators` fields
for `databricks.sdk.service.cleanrooms.CleanRoomAssetNotebook`.

OpenAPI SHA: 99f644e72261ef5ecf8d74db20f4b7a1e09723cc, Date: 2025-02-11
---
 .codegen/_openapi_sha                         |    2 +-
 CHANGELOG.md                                  |   17 +
 databricks/sdk/__init__.py                    |  493 +++----
 databricks/sdk/service/apps.py                |    6 +
 databricks/sdk/service/billing.py             |   32 +-
 databricks/sdk/service/catalog.py             |    8 +
 databricks/sdk/service/cleanrooms.py          |   72 +-
 databricks/sdk/service/compute.py             |   92 +-
 databricks/sdk/service/dashboards.py          |   28 +
 databricks/sdk/service/serving.py             |    2 +-
 databricks/sdk/service/sharing.py             |   21 +-
 databricks/sdk/version.py                     |    2 +-
 docs/account/billing/billable_usage.rst       |   43 +-
 docs/account/billing/budget_policy.rst        |  123 +-
 docs/account/billing/budgets.rst              |   95 +-
 docs/account/billing/log_delivery.rst         |  213 ++-
 docs/account/billing/usage_dashboards.rst     |   47 +-
 .../account/catalog/metastore_assignments.rst |  101 +-
 docs/account/catalog/metastores.rst           |   79 +-
 docs/account/catalog/storage_credentials.rst  |  117 +-
 docs/account/iam/access_control.rst           |   73 +-
 docs/account/iam/groups.rst                   |  209 ++-
 docs/account/iam/service_principals.rst       |  215 ++-
 docs/account/iam/users.rst                    |  297 ++--
 docs/account/iam/workspace_assignment.rst     |   89 +-
 .../account/oauth2/custom_app_integration.rst |  131 +-
 docs/account/oauth2/federation_policy.rst     |  155 ++-
 docs/account/oauth2/o_auth_published_apps.rst |   23 +-
 .../oauth2/published_app_integration.rst      |   91 +-
 .../service_principal_federation_policy.rst   |  175 ++-
 .../oauth2/service_principal_secrets.rst      |   87 +-
 docs/account/provisioning/credentials.rst     |   89 +-
 docs/account/provisioning/encryption_keys.rst |  151 +-
 docs/account/provisioning/networks.rst        |  105 +-
 docs/account/provisioning/private_access.rst  |  255 ++--
 docs/account/provisioning/storage.rst         |   85 +-
 docs/account/provisioning/vpc_endpoints.rst   |  121 +-
 docs/account/provisioning/workspaces.rst      |  554 ++++----
 .../settings/csp_enablement_account.rst       |   71 +-
 .../settings/disable_legacy_features.rst      |   93 +-
 .../settings/enable_ip_access_lists.rst       |   87 +-
 .../settings/esm_enablement_account.rst       |   65 +-
 docs/account/settings/ip_access_lists.rst     |  239 ++--
 .../account/settings/network_connectivity.rst |  179 ++-
 docs/account/settings/personal_compute.rst    |   97 +-
 docs/account/settings/settings.rst            |   36 +-
 docs/dbdataclasses/billing.rst                |    4 +
 docs/dbdataclasses/cleanrooms.rst             |   15 -
 docs/dbdataclasses/dashboards.rst             |    3 +
 docs/workspace/apps/apps.rst                  |  272 ++--
 .../workspace/catalog/artifact_allowlists.rst |   43 +-
 docs/workspace/catalog/catalogs.rst           |  195 ++-
 docs/workspace/catalog/connections.rst        |  141 +-
 docs/workspace/catalog/credentials.rst        |  248 +++-
 docs/workspace/catalog/external_locations.rst |  225 ++-
 docs/workspace/catalog/functions.rst          |  179 ++-
 docs/workspace/catalog/grants.rst             |   89 +-
 docs/workspace/catalog/metastores.rst         |  255 ++--
 docs/workspace/catalog/model_versions.rst     |  203 ++-
 docs/workspace/catalog/online_tables.rst      |   56 +-
 docs/workspace/catalog/quality_monitors.rst   |  431 +++---
 docs/workspace/catalog/registered_models.rst  |  331 +++--
 docs/workspace/catalog/resource_quotas.rst    |   59 +-
 docs/workspace/catalog/schemas.rst            |  169 ++-
 .../workspace/catalog/storage_credentials.rst |  265 ++--
 docs/workspace/catalog/system_schemas.rst     |   77 +-
 docs/workspace/catalog/table_constraints.rst  |   93 +-
 docs/workspace/catalog/tables.rst             |  255 ++--
 .../catalog/temporary_table_credentials.rst   |   49 +-
 docs/workspace/catalog/volumes.rst            |  237 ++--
 docs/workspace/catalog/workspace_bindings.rst |  135 +-
 .../cleanrooms/clean_room_assets.rst          |  133 +-
 .../cleanrooms/clean_room_task_runs.rst       |   27 +-
 docs/workspace/cleanrooms/clean_rooms.rst     |  127 +-
 docs/workspace/compute/cluster_policies.rst   |  307 +++--
 docs/workspace/compute/clusters.rst           | 1216 ++++++++---------
 docs/workspace/compute/command_execution.rst  |  142 +-
 .../workspace/compute/global_init_scripts.rst |  151 +-
 docs/workspace/compute/instance_pools.rst     |  311 +++--
 docs/workspace/compute/instance_profiles.rst  |  163 ++-
 docs/workspace/compute/libraries.rst          |  103 +-
 .../policy_compliance_for_clusters.rst        |  103 +-
 docs/workspace/compute/policy_families.rst    |   57 +-
 docs/workspace/dashboards/genie.rst           |  153 ++-
 docs/workspace/dashboards/lakeview.rst        |  363 +++--
 .../dashboards/lakeview_embedded.rst          |   15 +-
 docs/workspace/dashboards/query_execution.rst |   53 +-
 docs/workspace/files/dbfs.rst                 |  220 +--
 docs/workspace/files/files.rst                |  231 ++--
 docs/workspace/iam/access_control.rst         |   23 +-
 .../iam/account_access_control_proxy.rst      |   73 +-
 docs/workspace/iam/current_user.rst           |    9 +-
 docs/workspace/iam/groups.rst                 |  209 ++-
 docs/workspace/iam/permission_migration.rst   |   23 +-
 docs/workspace/iam/permissions.rst            |  199 ++-
 docs/workspace/iam/service_principals.rst     |  215 ++-
 docs/workspace/iam/users.rst                  |  347 +++--
 docs/workspace/jobs/jobs.rst                  | 1152 ++++++++--------
 .../jobs/policy_compliance_for_jobs.rst       |   93 +-
 .../marketplace/consumer_fulfillments.rst     |   41 +-
 .../marketplace/consumer_installations.rst    |  101 +-
 .../marketplace/consumer_listings.rst         |  111 +-
 .../consumer_personalization_requests.rst     |   61 +-
 .../marketplace/consumer_providers.rst        |   45 +-
 .../marketplace/provider_exchange_filters.rst |   61 +-
 .../marketplace/provider_exchanges.rst        |  139 +-
 docs/workspace/marketplace/provider_files.rst |   65 +-
 .../marketplace/provider_listings.rst         |   75 +-
 .../provider_personalization_requests.rst     |   41 +-
 ...provider_provider_analytics_dashboards.rst |   53 +-
 .../marketplace/provider_providers.rst        |   73 +-
 docs/workspace/ml/experiments.rst             |  923 +++++++------
 docs/workspace/ml/model_registry.rst          | 1141 ++++++++--------
 docs/workspace/pipelines/pipelines.rst        |  596 ++++----
 docs/workspace/serving/serving_endpoints.rst  |  500 +++----
 .../serving/serving_endpoints_data_plane.rst  |   95 +-
 ...aibi_dashboard_embedding_access_policy.rst |   89 +-
 ...i_dashboard_embedding_approved_domains.rst |   91 +-
 .../settings/automatic_cluster_update.rst     |   69 +-
 .../settings/compliance_security_profile.rst  |   73 +-
 .../settings/credentials_manager.rst          |   27 +-
 docs/workspace/settings/default_namespace.rst |  133 +-
 .../settings/disable_legacy_access.rst        |   95 +-
 .../settings/disable_legacy_dbfs.rst          |   87 +-
 .../settings/enhanced_security_monitoring.rst |   77 +-
 docs/workspace/settings/ip_access_lists.rst   |  243 ++--
 .../settings/notification_destinations.rst    |   95 +-
 .../settings/restrict_workspace_admins.rst    |  113 +-
 docs/workspace/settings/settings.rst          |   68 +-
 docs/workspace/settings/token_management.rst  |  127 +-
 docs/workspace/settings/tokens.rst            |   59 +-
 docs/workspace/settings/workspace_conf.rst    |   25 +-
 docs/workspace/sharing/providers.rst          |  189 ++-
 .../sharing/recipient_activation.rst          |   43 +-
 docs/workspace/sharing/recipients.rst         |  275 ++--
 docs/workspace/sharing/shares.rst             |  253 ++--
 docs/workspace/sql/alerts.rst                 |  105 +-
 docs/workspace/sql/alerts_legacy.rst          |  173 ++-
 docs/workspace/sql/dashboard_widgets.rst      |   73 +-
 docs/workspace/sql/dashboards.rst             |  149 +-
 docs/workspace/sql/data_sources.rst           |   43 +-
 docs/workspace/sql/dbsql_permissions.rst      |  125 +-
 docs/workspace/sql/queries.rst                |  121 +-
 docs/workspace/sql/queries_legacy.rst         |  303 ++--
 docs/workspace/sql/query_history.rst          |   43 +-
 docs/workspace/sql/query_visualizations.rst   |   65 +-
 .../sql/query_visualizations_legacy.rst       |  131 +-
 docs/workspace/sql/redash_config.rst          |    5 +-
 docs/workspace/sql/statement_execution.rst    |  457 ++++---
 docs/workspace/sql/warehouses.rst             |  486 +++----
 .../vectorsearch/vector_search_endpoints.rst  |   60 +-
 .../vectorsearch/vector_search_indexes.rst    |  271 ++--
 docs/workspace/workspace/git_credentials.rst  |  141 +-
 docs/workspace/workspace/repos.rst            |  219 ++-
 docs/workspace/workspace/secrets.rst          |  375 +++--
 docs/workspace/workspace/workspace.rst        |  353 +++--
 tests/integration/test_clusters.py            |    4 +-
 157 files changed, 12860 insertions(+), 12806 deletions(-)

diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 9a95107e8..562b72fcc 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-c72c58f97b950fcb924a90ef164bcb10cfcd5ece
\ No newline at end of file
+99f644e72261ef5ecf8d74db20f4b7a1e09723cc
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index cd073f71c..c0e5d78ee 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,22 @@
 # Version changelog
 
+## [Release] Release v0.44.0
+
+### Internal Changes
+
+ * Fix `tests/integration/test_dbutils.py::test_secrets` ([#884](https://github.com/databricks/databricks-sdk-py/pull/884)).
+
+
+### API Changes:
+
+ * Added `get_message_query_result_by_attachment()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html) workspace-level service.
+ * Added `id` field for `databricks.sdk.service.apps.App`.
+ * Added `limit_config` field for `databricks.sdk.service.billing.UpdateBudgetPolicyRequest`.
+ * Added `volumes` field for `databricks.sdk.service.compute.ClusterLogConf`.
+ * Removed `review_state`, `reviews` and `runner_collaborators` fields for `databricks.sdk.service.cleanrooms.CleanRoomAssetNotebook`.
+
+OpenAPI SHA: 99f644e72261ef5ecf8d74db20f4b7a1e09723cc, Date: 2025-02-11
+
 ## [Release] Release v0.43.0
 
 ### API Changes:
diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py
index 9f67adc59..79b1c3353 100755
--- a/databricks/sdk/__init__.py
+++ b/databricks/sdk/__init__.py
@@ -5,6 +5,7 @@
 
 import databricks.sdk.core as client
 import databricks.sdk.dbutils as dbutils
+import databricks.sdk.service as service
 from databricks.sdk import azure
 from databricks.sdk.credentials_provider import CredentialsStrategy
 from databricks.sdk.mixins.compute import ClustersExt
@@ -193,105 +194,109 @@ def __init__(self,
         self._dbutils = _make_dbutils(self._config)
         self._api_client = client.ApiClient(self._config)
         serving_endpoints = ServingEndpointsExt(self._api_client)
-        self._access_control = AccessControlAPI(self._api_client)
-        self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client)
-        self._alerts = AlertsAPI(self._api_client)
-        self._alerts_legacy = AlertsLegacyAPI(self._api_client)
-        self._apps = AppsAPI(self._api_client)
-        self._artifact_allowlists = ArtifactAllowlistsAPI(self._api_client)
-        self._catalogs = CatalogsAPI(self._api_client)
-        self._clean_room_assets = CleanRoomAssetsAPI(self._api_client)
-        self._clean_room_task_runs = CleanRoomTaskRunsAPI(self._api_client)
-        self._clean_rooms = CleanRoomsAPI(self._api_client)
-        self._cluster_policies = ClusterPoliciesAPI(self._api_client)
+        self._access_control = service.iam.AccessControlAPI(self._api_client)
+        self._account_access_control_proxy = service.iam.AccountAccessControlProxyAPI(self._api_client)
+        self._alerts = service.sql.AlertsAPI(self._api_client)
+        self._alerts_legacy = service.sql.AlertsLegacyAPI(self._api_client)
+        self._apps = service.apps.AppsAPI(self._api_client)
+        self._artifact_allowlists = service.catalog.ArtifactAllowlistsAPI(self._api_client)
+        self._catalogs = service.catalog.CatalogsAPI(self._api_client)
+        self._clean_room_assets = service.cleanrooms.CleanRoomAssetsAPI(self._api_client)
+        self._clean_room_task_runs = service.cleanrooms.CleanRoomTaskRunsAPI(self._api_client)
+        self._clean_rooms = service.cleanrooms.CleanRoomsAPI(self._api_client)
+        self._cluster_policies = service.compute.ClusterPoliciesAPI(self._api_client)
         self._clusters = ClustersExt(self._api_client)
-        self._command_execution = CommandExecutionAPI(self._api_client)
-        self._connections = ConnectionsAPI(self._api_client)
-        self._consumer_fulfillments = ConsumerFulfillmentsAPI(self._api_client)
-        self._consumer_installations = ConsumerInstallationsAPI(self._api_client)
-        self._consumer_listings = ConsumerListingsAPI(self._api_client)
-        self._consumer_personalization_requests = ConsumerPersonalizationRequestsAPI(self._api_client)
-        self._consumer_providers = ConsumerProvidersAPI(self._api_client)
-        self._credentials = CredentialsAPI(self._api_client)
-        self._credentials_manager = CredentialsManagerAPI(self._api_client)
-        self._current_user = CurrentUserAPI(self._api_client)
-        self._dashboard_widgets = DashboardWidgetsAPI(self._api_client)
-        self._dashboards = DashboardsAPI(self._api_client)
-        self._data_sources = DataSourcesAPI(self._api_client)
+        self._command_execution = service.compute.CommandExecutionAPI(self._api_client)
+        self._connections = service.catalog.ConnectionsAPI(self._api_client)
+        self._consumer_fulfillments = service.marketplace.ConsumerFulfillmentsAPI(self._api_client)
+        self._consumer_installations = service.marketplace.ConsumerInstallationsAPI(self._api_client)
+        self._consumer_listings = service.marketplace.ConsumerListingsAPI(self._api_client)
+        self._consumer_personalization_requests = service.marketplace.ConsumerPersonalizationRequestsAPI(
+            self._api_client)
+        self._consumer_providers = service.marketplace.ConsumerProvidersAPI(self._api_client)
+        self._credentials = service.catalog.CredentialsAPI(self._api_client)
+        self._credentials_manager = service.settings.CredentialsManagerAPI(self._api_client)
+        self._current_user = service.iam.CurrentUserAPI(self._api_client)
+        self._dashboard_widgets = service.sql.DashboardWidgetsAPI(self._api_client)
+        self._dashboards = service.sql.DashboardsAPI(self._api_client)
+        self._data_sources = service.sql.DataSourcesAPI(self._api_client)
         self._dbfs = DbfsExt(self._api_client)
-        self._dbsql_permissions = DbsqlPermissionsAPI(self._api_client)
-        self._experiments = ExperimentsAPI(self._api_client)
-        self._external_locations = ExternalLocationsAPI(self._api_client)
+        self._dbsql_permissions = service.sql.DbsqlPermissionsAPI(self._api_client)
+        self._experiments = service.ml.ExperimentsAPI(self._api_client)
+        self._external_locations = service.catalog.ExternalLocationsAPI(self._api_client)
         self._files = _make_files_client(self._api_client, self._config)
-        self._functions = FunctionsAPI(self._api_client)
-        self._genie = GenieAPI(self._api_client)
-        self._git_credentials = GitCredentialsAPI(self._api_client)
-        self._global_init_scripts = GlobalInitScriptsAPI(self._api_client)
-        self._grants = GrantsAPI(self._api_client)
-        self._groups = GroupsAPI(self._api_client)
-        self._instance_pools = InstancePoolsAPI(self._api_client)
-        self._instance_profiles = InstanceProfilesAPI(self._api_client)
-        self._ip_access_lists = IpAccessListsAPI(self._api_client)
+        self._functions = service.catalog.FunctionsAPI(self._api_client)
+        self._genie = service.dashboards.GenieAPI(self._api_client)
+        self._git_credentials = service.workspace.GitCredentialsAPI(self._api_client)
+        self._global_init_scripts = service.compute.GlobalInitScriptsAPI(self._api_client)
+        self._grants = service.catalog.GrantsAPI(self._api_client)
+        self._groups = service.iam.GroupsAPI(self._api_client)
+        self._instance_pools = service.compute.InstancePoolsAPI(self._api_client)
+        self._instance_profiles = service.compute.InstanceProfilesAPI(self._api_client)
+        self._ip_access_lists = service.settings.IpAccessListsAPI(self._api_client)
         self._jobs = JobsExt(self._api_client)
-        self._lakeview = LakeviewAPI(self._api_client)
-        self._lakeview_embedded = LakeviewEmbeddedAPI(self._api_client)
-        self._libraries = LibrariesAPI(self._api_client)
-        self._metastores = MetastoresAPI(self._api_client)
-        self._model_registry = ModelRegistryAPI(self._api_client)
-        self._model_versions = ModelVersionsAPI(self._api_client)
-        self._notification_destinations = NotificationDestinationsAPI(self._api_client)
-        self._online_tables = OnlineTablesAPI(self._api_client)
-        self._permission_migration = PermissionMigrationAPI(self._api_client)
-        self._permissions = PermissionsAPI(self._api_client)
-        self._pipelines = PipelinesAPI(self._api_client)
-        self._policy_compliance_for_clusters = PolicyComplianceForClustersAPI(self._api_client)
-        self._policy_compliance_for_jobs = PolicyComplianceForJobsAPI(self._api_client)
-        self._policy_families = PolicyFamiliesAPI(self._api_client)
-        self._provider_exchange_filters = ProviderExchangeFiltersAPI(self._api_client)
-        self._provider_exchanges = ProviderExchangesAPI(self._api_client)
-        self._provider_files = ProviderFilesAPI(self._api_client)
-        self._provider_listings = ProviderListingsAPI(self._api_client)
-        self._provider_personalization_requests = ProviderPersonalizationRequestsAPI(self._api_client)
-        self._provider_provider_analytics_dashboards = ProviderProviderAnalyticsDashboardsAPI(
+        self._lakeview = service.dashboards.LakeviewAPI(self._api_client)
+        self._lakeview_embedded = service.dashboards.LakeviewEmbeddedAPI(self._api_client)
+        self._libraries = service.compute.LibrariesAPI(self._api_client)
+        self._metastores = service.catalog.MetastoresAPI(self._api_client)
+        self._model_registry = service.ml.ModelRegistryAPI(self._api_client)
+        self._model_versions = service.catalog.ModelVersionsAPI(self._api_client)
+        self._notification_destinations = service.settings.NotificationDestinationsAPI(self._api_client)
+        self._online_tables = service.catalog.OnlineTablesAPI(self._api_client)
+        self._permission_migration = service.iam.PermissionMigrationAPI(self._api_client)
+        self._permissions = service.iam.PermissionsAPI(self._api_client)
+        self._pipelines = service.pipelines.PipelinesAPI(self._api_client)
+        self._policy_compliance_for_clusters = service.compute.PolicyComplianceForClustersAPI(
+            self._api_client)
+        self._policy_compliance_for_jobs = service.jobs.PolicyComplianceForJobsAPI(self._api_client)
+        self._policy_families = service.compute.PolicyFamiliesAPI(self._api_client)
+        self._provider_exchange_filters = service.marketplace.ProviderExchangeFiltersAPI(self._api_client)
+        self._provider_exchanges = service.marketplace.ProviderExchangesAPI(self._api_client)
+        self._provider_files = service.marketplace.ProviderFilesAPI(self._api_client)
+        self._provider_listings = service.marketplace.ProviderListingsAPI(self._api_client)
+        self._provider_personalization_requests = service.marketplace.ProviderPersonalizationRequestsAPI(
             self._api_client)
-        self._provider_providers = ProviderProvidersAPI(self._api_client)
-        self._providers = ProvidersAPI(self._api_client)
-        self._quality_monitors = QualityMonitorsAPI(self._api_client)
-        self._queries = QueriesAPI(self._api_client)
-        self._queries_legacy = QueriesLegacyAPI(self._api_client)
-        self._query_execution = QueryExecutionAPI(self._api_client)
-        self._query_history = QueryHistoryAPI(self._api_client)
-        self._query_visualizations = QueryVisualizationsAPI(self._api_client)
-        self._query_visualizations_legacy = QueryVisualizationsLegacyAPI(self._api_client)
-        self._recipient_activation = RecipientActivationAPI(self._api_client)
-        self._recipients = RecipientsAPI(self._api_client)
-        self._redash_config = RedashConfigAPI(self._api_client)
-        self._registered_models = RegisteredModelsAPI(self._api_client)
-        self._repos = ReposAPI(self._api_client)
-        self._resource_quotas = ResourceQuotasAPI(self._api_client)
-        self._schemas = SchemasAPI(self._api_client)
-        self._secrets = SecretsAPI(self._api_client)
-        self._service_principals = ServicePrincipalsAPI(self._api_client)
+        self._provider_provider_analytics_dashboards = service.marketplace.ProviderProviderAnalyticsDashboardsAPI(
+            self._api_client)
+        self._provider_providers = service.marketplace.ProviderProvidersAPI(self._api_client)
+        self._providers = service.sharing.ProvidersAPI(self._api_client)
+        self._quality_monitors = service.catalog.QualityMonitorsAPI(self._api_client)
+        self._queries = service.sql.QueriesAPI(self._api_client)
+        self._queries_legacy = service.sql.QueriesLegacyAPI(self._api_client)
+        self._query_execution = service.dashboards.QueryExecutionAPI(self._api_client)
+        self._query_history = service.sql.QueryHistoryAPI(self._api_client)
+        self._query_visualizations = service.sql.QueryVisualizationsAPI(self._api_client)
+        self._query_visualizations_legacy = service.sql.QueryVisualizationsLegacyAPI(self._api_client)
+        self._recipient_activation = service.sharing.RecipientActivationAPI(self._api_client)
+        self._recipients = service.sharing.RecipientsAPI(self._api_client)
+        self._redash_config = service.sql.RedashConfigAPI(self._api_client)
+        self._registered_models = service.catalog.RegisteredModelsAPI(self._api_client)
+        self._repos = service.workspace.ReposAPI(self._api_client)
+        self._resource_quotas = service.catalog.ResourceQuotasAPI(self._api_client)
+        self._schemas = service.catalog.SchemasAPI(self._api_client)
+        self._secrets = service.workspace.SecretsAPI(self._api_client)
+        self._service_principals = service.iam.ServicePrincipalsAPI(self._api_client)
         self._serving_endpoints = serving_endpoints
-        self._serving_endpoints_data_plane = ServingEndpointsDataPlaneAPI(self._api_client, serving_endpoints)
-        self._settings = SettingsAPI(self._api_client)
-        self._shares = SharesAPI(self._api_client)
-        self._statement_execution = StatementExecutionAPI(self._api_client)
-        self._storage_credentials = StorageCredentialsAPI(self._api_client)
-        self._system_schemas = SystemSchemasAPI(self._api_client)
-        self._table_constraints = TableConstraintsAPI(self._api_client)
-        self._tables = TablesAPI(self._api_client)
-        self._temporary_table_credentials = TemporaryTableCredentialsAPI(self._api_client)
-        self._token_management = TokenManagementAPI(self._api_client)
-        self._tokens = TokensAPI(self._api_client)
-        self._users = UsersAPI(self._api_client)
-        self._vector_search_endpoints = VectorSearchEndpointsAPI(self._api_client)
-        self._vector_search_indexes = VectorSearchIndexesAPI(self._api_client)
-        self._volumes = VolumesAPI(self._api_client)
-        self._warehouses = WarehousesAPI(self._api_client)
+        self._serving_endpoints_data_plane = service.serving.ServingEndpointsDataPlaneAPI(
+            self._api_client, serving_endpoints)
+        self._settings = service.settings.SettingsAPI(self._api_client)
+        self._shares = service.sharing.SharesAPI(self._api_client)
+        self._statement_execution = service.sql.StatementExecutionAPI(self._api_client)
+        self._storage_credentials = service.catalog.StorageCredentialsAPI(self._api_client)
+        self._system_schemas = service.catalog.SystemSchemasAPI(self._api_client)
+        self._table_constraints = service.catalog.TableConstraintsAPI(self._api_client)
+        self._tables = service.catalog.TablesAPI(self._api_client)
+        self._temporary_table_credentials = service.catalog.TemporaryTableCredentialsAPI(self._api_client)
+        self._token_management = service.settings.TokenManagementAPI(self._api_client)
+        self._tokens = service.settings.TokensAPI(self._api_client)
+        self._users = service.iam.UsersAPI(self._api_client)
+        self._vector_search_endpoints = service.vectorsearch.VectorSearchEndpointsAPI(self._api_client)
+        self._vector_search_indexes = service.vectorsearch.VectorSearchIndexesAPI(self._api_client)
+        self._volumes = service.catalog.VolumesAPI(self._api_client)
+        self._warehouses = service.sql.WarehousesAPI(self._api_client)
         self._workspace = WorkspaceExt(self._api_client)
-        self._workspace_bindings = WorkspaceBindingsAPI(self._api_client)
-        self._workspace_conf = WorkspaceConfAPI(self._api_client)
+        self._workspace_bindings = service.catalog.WorkspaceBindingsAPI(self._api_client)
+        self._workspace_conf = service.settings.WorkspaceConfAPI(self._api_client)
 
     @property
     def config(self) -> client.Config:
@@ -306,57 +311,57 @@ def dbutils(self) -> dbutils.RemoteDbUtils:
         return self._dbutils
 
     @property
-    def access_control(self) -> AccessControlAPI:
+    def access_control(self) -> service.iam.AccessControlAPI:
         """Rule based Access Control for Databricks Resources."""
         return self._access_control
 
     @property
-    def account_access_control_proxy(self) -> AccountAccessControlProxyAPI:
+    def account_access_control_proxy(self) -> service.iam.AccountAccessControlProxyAPI:
         """These APIs manage access rules on resources in an account."""
         return self._account_access_control_proxy
 
     @property
-    def alerts(self) -> AlertsAPI:
+    def alerts(self) -> service.sql.AlertsAPI:
         """The alerts API can be used to perform CRUD operations on alerts."""
         return self._alerts
 
     @property
-    def alerts_legacy(self) -> AlertsLegacyAPI:
+    def alerts_legacy(self) -> service.sql.AlertsLegacyAPI:
         """The alerts API can be used to perform CRUD operations on alerts."""
         return self._alerts_legacy
 
     @property
-    def apps(self) -> AppsAPI:
+    def apps(self) -> service.apps.AppsAPI:
         """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on."""
         return self._apps
 
     @property
-    def artifact_allowlists(self) -> ArtifactAllowlistsAPI:
+    def artifact_allowlists(self) -> service.catalog.ArtifactAllowlistsAPI:
         """In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the `allowlist` in UC so that users can leverage these artifacts on compute configured with shared access mode."""
         return self._artifact_allowlists
 
     @property
-    def catalogs(self) -> CatalogsAPI:
+    def catalogs(self) -> service.catalog.CatalogsAPI:
         """A catalog is the first layer of Unity Catalog’s three-level namespace."""
         return self._catalogs
 
     @property
-    def clean_room_assets(self) -> CleanRoomAssetsAPI:
+    def clean_room_assets(self) -> service.cleanrooms.CleanRoomAssetsAPI:
         """Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the clean room."""
         return self._clean_room_assets
 
     @property
-    def clean_room_task_runs(self) -> CleanRoomTaskRunsAPI:
+    def clean_room_task_runs(self) -> service.cleanrooms.CleanRoomTaskRunsAPI:
         """Clean room task runs are the executions of notebooks in a clean room."""
         return self._clean_room_task_runs
 
     @property
-    def clean_rooms(self) -> CleanRoomsAPI:
+    def clean_rooms(self) -> service.cleanrooms.CleanRoomsAPI:
         """A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each other’s data."""
         return self._clean_rooms
 
     @property
-    def cluster_policies(self) -> ClusterPoliciesAPI:
+    def cluster_policies(self) -> service.compute.ClusterPoliciesAPI:
         """You can use cluster policies to control users' ability to configure clusters based on a set of rules."""
         return self._cluster_policies
 
@@ -366,67 +371,67 @@ def clusters(self) -> ClustersExt:
         return self._clusters
 
     @property
-    def command_execution(self) -> CommandExecutionAPI:
+    def command_execution(self) -> service.compute.CommandExecutionAPI:
         """This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters."""
         return self._command_execution
 
     @property
-    def connections(self) -> ConnectionsAPI:
+    def connections(self) -> service.catalog.ConnectionsAPI:
         """Connections allow for creating a connection to an external data source."""
         return self._connections
 
     @property
-    def consumer_fulfillments(self) -> ConsumerFulfillmentsAPI:
+    def consumer_fulfillments(self) -> service.marketplace.ConsumerFulfillmentsAPI:
         """Fulfillments are entities that allow consumers to preview installations."""
         return self._consumer_fulfillments
 
     @property
-    def consumer_installations(self) -> ConsumerInstallationsAPI:
+    def consumer_installations(self) -> service.marketplace.ConsumerInstallationsAPI:
         """Installations are entities that allow consumers to interact with Databricks Marketplace listings."""
         return self._consumer_installations
 
     @property
-    def consumer_listings(self) -> ConsumerListingsAPI:
+    def consumer_listings(self) -> service.marketplace.ConsumerListingsAPI:
         """Listings are the core entities in the Marketplace."""
         return self._consumer_listings
 
     @property
-    def consumer_personalization_requests(self) -> ConsumerPersonalizationRequestsAPI:
+    def consumer_personalization_requests(self) -> service.marketplace.ConsumerPersonalizationRequestsAPI:
         """Personalization Requests allow customers to interact with the individualized Marketplace listing flow."""
         return self._consumer_personalization_requests
 
     @property
-    def consumer_providers(self) -> ConsumerProvidersAPI:
+    def consumer_providers(self) -> service.marketplace.ConsumerProvidersAPI:
         """Providers are the entities that publish listings to the Marketplace."""
         return self._consumer_providers
 
     @property
-    def credentials(self) -> CredentialsAPI:
+    def credentials(self) -> service.catalog.CredentialsAPI:
         """A credential represents an authentication and authorization mechanism for accessing services on your cloud tenant."""
         return self._credentials
 
     @property
-    def credentials_manager(self) -> CredentialsManagerAPI:
+    def credentials_manager(self) -> service.settings.CredentialsManagerAPI:
         """Credentials manager interacts with with Identity Providers to to perform token exchanges using stored credentials and refresh tokens."""
         return self._credentials_manager
 
     @property
-    def current_user(self) -> CurrentUserAPI:
+    def current_user(self) -> service.iam.CurrentUserAPI:
         """This API allows retrieving information about currently authenticated user or service principal."""
         return self._current_user
 
     @property
-    def dashboard_widgets(self) -> DashboardWidgetsAPI:
+    def dashboard_widgets(self) -> service.sql.DashboardWidgetsAPI:
         """This is an evolving API that facilitates the addition and removal of widgets from existing dashboards within the Databricks Workspace."""
         return self._dashboard_widgets
 
     @property
-    def dashboards(self) -> DashboardsAPI:
+    def dashboards(self) -> service.sql.DashboardsAPI:
         """In general, there is little need to modify dashboards using the API."""
         return self._dashboards
 
     @property
-    def data_sources(self) -> DataSourcesAPI:
+    def data_sources(self) -> service.sql.DataSourcesAPI:
         """This API is provided to assist you in making new query objects."""
         return self._data_sources
 
@@ -436,67 +441,67 @@ def dbfs(self) -> DbfsExt:
         return self._dbfs
 
     @property
-    def dbsql_permissions(self) -> DbsqlPermissionsAPI:
+    def dbsql_permissions(self) -> service.sql.DbsqlPermissionsAPI:
         """The SQL Permissions API is similar to the endpoints of the :method:permissions/set."""
         return self._dbsql_permissions
 
     @property
-    def experiments(self) -> ExperimentsAPI:
+    def experiments(self) -> service.ml.ExperimentsAPI:
         """Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment."""
         return self._experiments
 
     @property
-    def external_locations(self) -> ExternalLocationsAPI:
+    def external_locations(self) -> service.catalog.ExternalLocationsAPI:
         """An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path."""
         return self._external_locations
 
     @property
-    def files(self) -> FilesAPI:
+    def files(self) -> service.files.FilesAPI:
         """The Files API is a standard HTTP API that allows you to read, write, list, and delete files and directories by referring to their URI."""
         return self._files
 
     @property
-    def functions(self) -> FunctionsAPI:
+    def functions(self) -> service.catalog.FunctionsAPI:
         """Functions implement User-Defined Functions (UDFs) in Unity Catalog."""
         return self._functions
 
     @property
-    def genie(self) -> GenieAPI:
+    def genie(self) -> service.dashboards.GenieAPI:
         """Genie provides a no-code experience for business users, powered by AI/BI."""
         return self._genie
 
     @property
-    def git_credentials(self) -> GitCredentialsAPI:
+    def git_credentials(self) -> service.workspace.GitCredentialsAPI:
         """Registers personal access token for Databricks to do operations on behalf of the user."""
         return self._git_credentials
 
     @property
-    def global_init_scripts(self) -> GlobalInitScriptsAPI:
+    def global_init_scripts(self) -> service.compute.GlobalInitScriptsAPI:
         """The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace."""
         return self._global_init_scripts
 
     @property
-    def grants(self) -> GrantsAPI:
+    def grants(self) -> service.catalog.GrantsAPI:
         """In Unity Catalog, data is secure by default."""
         return self._grants
 
     @property
-    def groups(self) -> GroupsAPI:
+    def groups(self) -> service.iam.GroupsAPI:
         """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects."""
         return self._groups
 
     @property
-    def instance_pools(self) -> InstancePoolsAPI:
+    def instance_pools(self) -> service.compute.InstancePoolsAPI:
         """Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times."""
         return self._instance_pools
 
     @property
-    def instance_profiles(self) -> InstanceProfilesAPI:
+    def instance_profiles(self) -> service.compute.InstanceProfilesAPI:
         """The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with."""
         return self._instance_profiles
 
     @property
-    def ip_access_lists(self) -> IpAccessListsAPI:
+    def ip_access_lists(self) -> service.settings.IpAccessListsAPI:
         """IP Access List enables admins to configure IP access lists."""
         return self._ip_access_lists
 
@@ -506,192 +511,193 @@ def jobs(self) -> JobsExt:
         return self._jobs
 
     @property
-    def lakeview(self) -> LakeviewAPI:
+    def lakeview(self) -> service.dashboards.LakeviewAPI:
         """These APIs provide specific management operations for Lakeview dashboards."""
         return self._lakeview
 
     @property
-    def lakeview_embedded(self) -> LakeviewEmbeddedAPI:
+    def lakeview_embedded(self) -> service.dashboards.LakeviewEmbeddedAPI:
         """Token-based Lakeview APIs for embedding dashboards in external applications."""
         return self._lakeview_embedded
 
     @property
-    def libraries(self) -> LibrariesAPI:
+    def libraries(self) -> service.compute.LibrariesAPI:
         """The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster."""
         return self._libraries
 
     @property
-    def metastores(self) -> MetastoresAPI:
+    def metastores(self) -> service.catalog.MetastoresAPI:
         """A metastore is the top-level container of objects in Unity Catalog."""
         return self._metastores
 
     @property
-    def model_registry(self) -> ModelRegistryAPI:
+    def model_registry(self) -> service.ml.ModelRegistryAPI:
         """Note: This API reference documents APIs for the Workspace Model Registry."""
         return self._model_registry
 
     @property
-    def model_versions(self) -> ModelVersionsAPI:
+    def model_versions(self) -> service.catalog.ModelVersionsAPI:
         """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog."""
         return self._model_versions
 
     @property
-    def notification_destinations(self) -> NotificationDestinationsAPI:
+    def notification_destinations(self) -> service.settings.NotificationDestinationsAPI:
         """The notification destinations API lets you programmatically manage a workspace's notification destinations."""
         return self._notification_destinations
 
     @property
-    def online_tables(self) -> OnlineTablesAPI:
+    def online_tables(self) -> service.catalog.OnlineTablesAPI:
         """Online tables provide lower latency and higher QPS access to data from Delta tables."""
         return self._online_tables
 
     @property
-    def permission_migration(self) -> PermissionMigrationAPI:
+    def permission_migration(self) -> service.iam.PermissionMigrationAPI:
         """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx."""
         return self._permission_migration
 
     @property
-    def permissions(self) -> PermissionsAPI:
+    def permissions(self) -> service.iam.PermissionsAPI:
         """Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints."""
         return self._permissions
 
     @property
-    def pipelines(self) -> PipelinesAPI:
+    def pipelines(self) -> service.pipelines.PipelinesAPI:
         """The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines."""
         return self._pipelines
 
     @property
-    def policy_compliance_for_clusters(self) -> PolicyComplianceForClustersAPI:
+    def policy_compliance_for_clusters(self) -> service.compute.PolicyComplianceForClustersAPI:
         """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace."""
         return self._policy_compliance_for_clusters
 
     @property
-    def policy_compliance_for_jobs(self) -> PolicyComplianceForJobsAPI:
+    def policy_compliance_for_jobs(self) -> service.jobs.PolicyComplianceForJobsAPI:
         """The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace."""
         return self._policy_compliance_for_jobs
 
     @property
-    def policy_families(self) -> PolicyFamiliesAPI:
+    def policy_families(self) -> service.compute.PolicyFamiliesAPI:
         """View available policy families."""
         return self._policy_families
 
     @property
-    def provider_exchange_filters(self) -> ProviderExchangeFiltersAPI:
+    def provider_exchange_filters(self) -> service.marketplace.ProviderExchangeFiltersAPI:
         """Marketplace exchanges filters curate which groups can access an exchange."""
         return self._provider_exchange_filters
 
     @property
-    def provider_exchanges(self) -> ProviderExchangesAPI:
+    def provider_exchanges(self) -> service.marketplace.ProviderExchangesAPI:
         """Marketplace exchanges allow providers to share their listings with a curated set of customers."""
         return self._provider_exchanges
 
     @property
-    def provider_files(self) -> ProviderFilesAPI:
+    def provider_files(self) -> service.marketplace.ProviderFilesAPI:
         """Marketplace offers a set of file APIs for various purposes such as preview notebooks and provider icons."""
         return self._provider_files
 
     @property
-    def provider_listings(self) -> ProviderListingsAPI:
+    def provider_listings(self) -> service.marketplace.ProviderListingsAPI:
         """Listings are the core entities in the Marketplace."""
         return self._provider_listings
 
     @property
-    def provider_personalization_requests(self) -> ProviderPersonalizationRequestsAPI:
+    def provider_personalization_requests(self) -> service.marketplace.ProviderPersonalizationRequestsAPI:
         """Personalization requests are an alternate to instantly available listings."""
         return self._provider_personalization_requests
 
     @property
-    def provider_provider_analytics_dashboards(self) -> ProviderProviderAnalyticsDashboardsAPI:
+    def provider_provider_analytics_dashboards(
+            self) -> service.marketplace.ProviderProviderAnalyticsDashboardsAPI:
         """Manage templated analytics solution for providers."""
         return self._provider_provider_analytics_dashboards
 
     @property
-    def provider_providers(self) -> ProviderProvidersAPI:
+    def provider_providers(self) -> service.marketplace.ProviderProvidersAPI:
         """Providers are entities that manage assets in Marketplace."""
         return self._provider_providers
 
     @property
-    def providers(self) -> ProvidersAPI:
+    def providers(self) -> service.sharing.ProvidersAPI:
         """A data provider is an object representing the organization in the real world who shares the data."""
         return self._providers
 
     @property
-    def quality_monitors(self) -> QualityMonitorsAPI:
+    def quality_monitors(self) -> service.catalog.QualityMonitorsAPI:
         """A monitor computes and monitors data or model quality metrics for a table over time."""
         return self._quality_monitors
 
     @property
-    def queries(self) -> QueriesAPI:
+    def queries(self) -> service.sql.QueriesAPI:
         """The queries API can be used to perform CRUD operations on queries."""
         return self._queries
 
     @property
-    def queries_legacy(self) -> QueriesLegacyAPI:
+    def queries_legacy(self) -> service.sql.QueriesLegacyAPI:
         """These endpoints are used for CRUD operations on query definitions."""
         return self._queries_legacy
 
     @property
-    def query_execution(self) -> QueryExecutionAPI:
+    def query_execution(self) -> service.dashboards.QueryExecutionAPI:
         """Query execution APIs for AI / BI Dashboards."""
         return self._query_execution
 
     @property
-    def query_history(self) -> QueryHistoryAPI:
+    def query_history(self) -> service.sql.QueryHistoryAPI:
         """A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute."""
         return self._query_history
 
     @property
-    def query_visualizations(self) -> QueryVisualizationsAPI:
+    def query_visualizations(self) -> service.sql.QueryVisualizationsAPI:
         """This is an evolving API that facilitates the addition and removal of visualizations from existing queries in the Databricks Workspace."""
         return self._query_visualizations
 
     @property
-    def query_visualizations_legacy(self) -> QueryVisualizationsLegacyAPI:
+    def query_visualizations_legacy(self) -> service.sql.QueryVisualizationsLegacyAPI:
         """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace."""
         return self._query_visualizations_legacy
 
     @property
-    def recipient_activation(self) -> RecipientActivationAPI:
+    def recipient_activation(self) -> service.sharing.RecipientActivationAPI:
         """The Recipient Activation API is only applicable in the open sharing model where the recipient object has the authentication type of `TOKEN`."""
         return self._recipient_activation
 
     @property
-    def recipients(self) -> RecipientsAPI:
+    def recipients(self) -> service.sharing.RecipientsAPI:
         """A recipient is an object you create using :method:recipients/create to represent an organization which you want to allow access shares."""
         return self._recipients
 
     @property
-    def redash_config(self) -> RedashConfigAPI:
+    def redash_config(self) -> service.sql.RedashConfigAPI:
         """Redash V2 service for workspace configurations (internal)."""
         return self._redash_config
 
     @property
-    def registered_models(self) -> RegisteredModelsAPI:
+    def registered_models(self) -> service.catalog.RegisteredModelsAPI:
         """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog."""
         return self._registered_models
 
     @property
-    def repos(self) -> ReposAPI:
+    def repos(self) -> service.workspace.ReposAPI:
         """The Repos API allows users to manage their git repos."""
         return self._repos
 
     @property
-    def resource_quotas(self) -> ResourceQuotasAPI:
+    def resource_quotas(self) -> service.catalog.ResourceQuotasAPI:
         """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created."""
         return self._resource_quotas
 
     @property
-    def schemas(self) -> SchemasAPI:
+    def schemas(self) -> service.catalog.SchemasAPI:
         """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace."""
         return self._schemas
 
     @property
-    def secrets(self) -> SecretsAPI:
+    def secrets(self) -> service.workspace.SecretsAPI:
         """The Secrets API allows you to manage secrets, secret scopes, and access permissions."""
         return self._secrets
 
     @property
-    def service_principals(self) -> ServicePrincipalsAPI:
+    def service_principals(self) -> service.iam.ServicePrincipalsAPI:
         """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms."""
         return self._service_principals
 
@@ -701,82 +707,82 @@ def serving_endpoints(self) -> ServingEndpointsExt:
         return self._serving_endpoints
 
     @property
-    def serving_endpoints_data_plane(self) -> ServingEndpointsDataPlaneAPI:
+    def serving_endpoints_data_plane(self) -> service.serving.ServingEndpointsDataPlaneAPI:
         """Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving endpoints service."""
         return self._serving_endpoints_data_plane
 
     @property
-    def settings(self) -> SettingsAPI:
+    def settings(self) -> service.settings.SettingsAPI:
         """Workspace Settings API allows users to manage settings at the workspace level."""
         return self._settings
 
     @property
-    def shares(self) -> SharesAPI:
+    def shares(self) -> service.sharing.SharesAPI:
         """A share is a container instantiated with :method:shares/create."""
         return self._shares
 
     @property
-    def statement_execution(self) -> StatementExecutionAPI:
+    def statement_execution(self) -> service.sql.StatementExecutionAPI:
         """The Databricks SQL Statement Execution API can be used to execute SQL statements on a SQL warehouse and fetch the result."""
         return self._statement_execution
 
     @property
-    def storage_credentials(self) -> StorageCredentialsAPI:
+    def storage_credentials(self) -> service.catalog.StorageCredentialsAPI:
         """A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant."""
         return self._storage_credentials
 
     @property
-    def system_schemas(self) -> SystemSchemasAPI:
+    def system_schemas(self) -> service.catalog.SystemSchemasAPI:
         """A system schema is a schema that lives within the system catalog."""
         return self._system_schemas
 
     @property
-    def table_constraints(self) -> TableConstraintsAPI:
+    def table_constraints(self) -> service.catalog.TableConstraintsAPI:
         """Primary key and foreign key constraints encode relationships between fields in tables."""
         return self._table_constraints
 
     @property
-    def tables(self) -> TablesAPI:
+    def tables(self) -> service.catalog.TablesAPI:
         """A table resides in the third layer of Unity Catalog’s three-level namespace."""
         return self._tables
 
     @property
-    def temporary_table_credentials(self) -> TemporaryTableCredentialsAPI:
+    def temporary_table_credentials(self) -> service.catalog.TemporaryTableCredentialsAPI:
         """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage locationswhere table data is stored in Databricks."""
         return self._temporary_table_credentials
 
     @property
-    def token_management(self) -> TokenManagementAPI:
+    def token_management(self) -> service.settings.TokenManagementAPI:
         """Enables administrators to get all tokens and delete tokens for other users."""
         return self._token_management
 
     @property
-    def tokens(self) -> TokensAPI:
+    def tokens(self) -> service.settings.TokensAPI:
         """The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs."""
         return self._tokens
 
     @property
-    def users(self) -> UsersAPI:
+    def users(self) -> service.iam.UsersAPI:
         """User identities recognized by Databricks and represented by email addresses."""
         return self._users
 
     @property
-    def vector_search_endpoints(self) -> VectorSearchEndpointsAPI:
+    def vector_search_endpoints(self) -> service.vectorsearch.VectorSearchEndpointsAPI:
         """**Endpoint**: Represents the compute resources to host vector search indexes."""
         return self._vector_search_endpoints
 
     @property
-    def vector_search_indexes(self) -> VectorSearchIndexesAPI:
+    def vector_search_indexes(self) -> service.vectorsearch.VectorSearchIndexesAPI:
         """**Index**: An efficient representation of your embedding vectors that supports real-time and efficient approximate nearest neighbor (ANN) search queries."""
         return self._vector_search_indexes
 
     @property
-    def volumes(self) -> VolumesAPI:
+    def volumes(self) -> service.catalog.VolumesAPI:
         """Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files."""
         return self._volumes
 
     @property
-    def warehouses(self) -> WarehousesAPI:
+    def warehouses(self) -> service.sql.WarehousesAPI:
         """A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL."""
         return self._warehouses
 
@@ -786,12 +792,12 @@ def workspace(self) -> WorkspaceExt:
         return self._workspace
 
     @property
-    def workspace_bindings(self) -> WorkspaceBindingsAPI:
+    def workspace_bindings(self) -> service.catalog.WorkspaceBindingsAPI:
         """A securable in Databricks can be configured as __OPEN__ or __ISOLATED__."""
         return self._workspace_bindings
 
     @property
-    def workspace_conf(self) -> WorkspaceConfAPI:
+    def workspace_conf(self) -> service.settings.WorkspaceConfAPI:
         """This API allows updating known workspace settings for advanced users."""
         return self._workspace_conf
 
@@ -865,35 +871,36 @@ def __init__(self,
                                    product_version=product_version)
         self._config = config.copy()
         self._api_client = client.ApiClient(self._config)
-        self._access_control = AccountAccessControlAPI(self._api_client)
-        self._billable_usage = BillableUsageAPI(self._api_client)
-        self._budget_policy = BudgetPolicyAPI(self._api_client)
-        self._credentials = CredentialsAPI(self._api_client)
-        self._custom_app_integration = CustomAppIntegrationAPI(self._api_client)
-        self._encryption_keys = EncryptionKeysAPI(self._api_client)
-        self._federation_policy = AccountFederationPolicyAPI(self._api_client)
-        self._groups = AccountGroupsAPI(self._api_client)
-        self._ip_access_lists = AccountIpAccessListsAPI(self._api_client)
-        self._log_delivery = LogDeliveryAPI(self._api_client)
-        self._metastore_assignments = AccountMetastoreAssignmentsAPI(self._api_client)
-        self._metastores = AccountMetastoresAPI(self._api_client)
-        self._network_connectivity = NetworkConnectivityAPI(self._api_client)
-        self._networks = NetworksAPI(self._api_client)
-        self._o_auth_published_apps = OAuthPublishedAppsAPI(self._api_client)
-        self._private_access = PrivateAccessAPI(self._api_client)
-        self._published_app_integration = PublishedAppIntegrationAPI(self._api_client)
-        self._service_principal_federation_policy = ServicePrincipalFederationPolicyAPI(self._api_client)
-        self._service_principal_secrets = ServicePrincipalSecretsAPI(self._api_client)
-        self._service_principals = AccountServicePrincipalsAPI(self._api_client)
-        self._settings = AccountSettingsAPI(self._api_client)
-        self._storage = StorageAPI(self._api_client)
-        self._storage_credentials = AccountStorageCredentialsAPI(self._api_client)
-        self._usage_dashboards = UsageDashboardsAPI(self._api_client)
-        self._users = AccountUsersAPI(self._api_client)
-        self._vpc_endpoints = VpcEndpointsAPI(self._api_client)
-        self._workspace_assignment = WorkspaceAssignmentAPI(self._api_client)
-        self._workspaces = WorkspacesAPI(self._api_client)
-        self._budgets = BudgetsAPI(self._api_client)
+        self._access_control = service.iam.AccountAccessControlAPI(self._api_client)
+        self._billable_usage = service.billing.BillableUsageAPI(self._api_client)
+        self._budget_policy = service.billing.BudgetPolicyAPI(self._api_client)
+        self._credentials = service.provisioning.CredentialsAPI(self._api_client)
+        self._custom_app_integration = service.oauth2.CustomAppIntegrationAPI(self._api_client)
+        self._encryption_keys = service.provisioning.EncryptionKeysAPI(self._api_client)
+        self._federation_policy = service.oauth2.AccountFederationPolicyAPI(self._api_client)
+        self._groups = service.iam.AccountGroupsAPI(self._api_client)
+        self._ip_access_lists = service.settings.AccountIpAccessListsAPI(self._api_client)
+        self._log_delivery = service.billing.LogDeliveryAPI(self._api_client)
+        self._metastore_assignments = service.catalog.AccountMetastoreAssignmentsAPI(self._api_client)
+        self._metastores = service.catalog.AccountMetastoresAPI(self._api_client)
+        self._network_connectivity = service.settings.NetworkConnectivityAPI(self._api_client)
+        self._networks = service.provisioning.NetworksAPI(self._api_client)
+        self._o_auth_published_apps = service.oauth2.OAuthPublishedAppsAPI(self._api_client)
+        self._private_access = service.provisioning.PrivateAccessAPI(self._api_client)
+        self._published_app_integration = service.oauth2.PublishedAppIntegrationAPI(self._api_client)
+        self._service_principal_federation_policy = service.oauth2.ServicePrincipalFederationPolicyAPI(
+            self._api_client)
+        self._service_principal_secrets = service.oauth2.ServicePrincipalSecretsAPI(self._api_client)
+        self._service_principals = service.iam.AccountServicePrincipalsAPI(self._api_client)
+        self._settings = service.settings.AccountSettingsAPI(self._api_client)
+        self._storage = service.provisioning.StorageAPI(self._api_client)
+        self._storage_credentials = service.catalog.AccountStorageCredentialsAPI(self._api_client)
+        self._usage_dashboards = service.billing.UsageDashboardsAPI(self._api_client)
+        self._users = service.iam.AccountUsersAPI(self._api_client)
+        self._vpc_endpoints = service.provisioning.VpcEndpointsAPI(self._api_client)
+        self._workspace_assignment = service.iam.WorkspaceAssignmentAPI(self._api_client)
+        self._workspaces = service.provisioning.WorkspacesAPI(self._api_client)
+        self._budgets = service.billing.BudgetsAPI(self._api_client)
 
     @property
     def config(self) -> client.Config:
@@ -904,147 +911,147 @@ def api_client(self) -> client.ApiClient:
         return self._api_client
 
     @property
-    def access_control(self) -> AccountAccessControlAPI:
+    def access_control(self) -> service.iam.AccountAccessControlAPI:
         """These APIs manage access rules on resources in an account."""
         return self._access_control
 
     @property
-    def billable_usage(self) -> BillableUsageAPI:
+    def billable_usage(self) -> service.billing.BillableUsageAPI:
         """This API allows you to download billable usage logs for the specified account and date range."""
         return self._billable_usage
 
     @property
-    def budget_policy(self) -> BudgetPolicyAPI:
+    def budget_policy(self) -> service.billing.BudgetPolicyAPI:
         """A service serves REST API about Budget policies."""
         return self._budget_policy
 
     @property
-    def credentials(self) -> CredentialsAPI:
+    def credentials(self) -> service.provisioning.CredentialsAPI:
         """These APIs manage credential configurations for this workspace."""
         return self._credentials
 
     @property
-    def custom_app_integration(self) -> CustomAppIntegrationAPI:
+    def custom_app_integration(self) -> service.oauth2.CustomAppIntegrationAPI:
         """These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud."""
         return self._custom_app_integration
 
     @property
-    def encryption_keys(self) -> EncryptionKeysAPI:
+    def encryption_keys(self) -> service.provisioning.EncryptionKeysAPI:
         """These APIs manage encryption key configurations for this workspace (optional)."""
         return self._encryption_keys
 
     @property
-    def federation_policy(self) -> AccountFederationPolicyAPI:
+    def federation_policy(self) -> service.oauth2.AccountFederationPolicyAPI:
         """These APIs manage account federation policies."""
         return self._federation_policy
 
     @property
-    def groups(self) -> AccountGroupsAPI:
+    def groups(self) -> service.iam.AccountGroupsAPI:
         """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects."""
         return self._groups
 
     @property
-    def ip_access_lists(self) -> AccountIpAccessListsAPI:
+    def ip_access_lists(self) -> service.settings.AccountIpAccessListsAPI:
         """The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console."""
         return self._ip_access_lists
 
     @property
-    def log_delivery(self) -> LogDeliveryAPI:
+    def log_delivery(self) -> service.billing.LogDeliveryAPI:
         """These APIs manage log delivery configurations for this account."""
         return self._log_delivery
 
     @property
-    def metastore_assignments(self) -> AccountMetastoreAssignmentsAPI:
+    def metastore_assignments(self) -> service.catalog.AccountMetastoreAssignmentsAPI:
         """These APIs manage metastore assignments to a workspace."""
         return self._metastore_assignments
 
     @property
-    def metastores(self) -> AccountMetastoresAPI:
+    def metastores(self) -> service.catalog.AccountMetastoresAPI:
         """These APIs manage Unity Catalog metastores for an account."""
         return self._metastores
 
     @property
-    def network_connectivity(self) -> NetworkConnectivityAPI:
+    def network_connectivity(self) -> service.settings.NetworkConnectivityAPI:
         """These APIs provide configurations for the network connectivity of your workspaces for serverless compute resources."""
         return self._network_connectivity
 
     @property
-    def networks(self) -> NetworksAPI:
+    def networks(self) -> service.provisioning.NetworksAPI:
         """These APIs manage network configurations for customer-managed VPCs (optional)."""
         return self._networks
 
     @property
-    def o_auth_published_apps(self) -> OAuthPublishedAppsAPI:
+    def o_auth_published_apps(self) -> service.oauth2.OAuthPublishedAppsAPI:
         """These APIs enable administrators to view all the available published OAuth applications in Databricks."""
         return self._o_auth_published_apps
 
     @property
-    def private_access(self) -> PrivateAccessAPI:
+    def private_access(self) -> service.provisioning.PrivateAccessAPI:
         """These APIs manage private access settings for this account."""
         return self._private_access
 
     @property
-    def published_app_integration(self) -> PublishedAppIntegrationAPI:
+    def published_app_integration(self) -> service.oauth2.PublishedAppIntegrationAPI:
         """These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud."""
         return self._published_app_integration
 
     @property
-    def service_principal_federation_policy(self) -> ServicePrincipalFederationPolicyAPI:
+    def service_principal_federation_policy(self) -> service.oauth2.ServicePrincipalFederationPolicyAPI:
         """These APIs manage service principal federation policies."""
         return self._service_principal_federation_policy
 
     @property
-    def service_principal_secrets(self) -> ServicePrincipalSecretsAPI:
+    def service_principal_secrets(self) -> service.oauth2.ServicePrincipalSecretsAPI:
         """These APIs enable administrators to manage service principal secrets."""
         return self._service_principal_secrets
 
     @property
-    def service_principals(self) -> AccountServicePrincipalsAPI:
+    def service_principals(self) -> service.iam.AccountServicePrincipalsAPI:
         """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms."""
         return self._service_principals
 
     @property
-    def settings(self) -> AccountSettingsAPI:
+    def settings(self) -> service.settings.AccountSettingsAPI:
         """Accounts Settings API allows users to manage settings at the account level."""
         return self._settings
 
     @property
-    def storage(self) -> StorageAPI:
+    def storage(self) -> service.provisioning.StorageAPI:
         """These APIs manage storage configurations for this workspace."""
         return self._storage
 
     @property
-    def storage_credentials(self) -> AccountStorageCredentialsAPI:
+    def storage_credentials(self) -> service.catalog.AccountStorageCredentialsAPI:
         """These APIs manage storage credentials for a particular metastore."""
         return self._storage_credentials
 
     @property
-    def usage_dashboards(self) -> UsageDashboardsAPI:
+    def usage_dashboards(self) -> service.billing.UsageDashboardsAPI:
         """These APIs manage usage dashboards for this account."""
         return self._usage_dashboards
 
     @property
-    def users(self) -> AccountUsersAPI:
+    def users(self) -> service.iam.AccountUsersAPI:
         """User identities recognized by Databricks and represented by email addresses."""
         return self._users
 
     @property
-    def vpc_endpoints(self) -> VpcEndpointsAPI:
+    def vpc_endpoints(self) -> service.provisioning.VpcEndpointsAPI:
         """These APIs manage VPC endpoint configurations for this account."""
         return self._vpc_endpoints
 
     @property
-    def workspace_assignment(self) -> WorkspaceAssignmentAPI:
+    def workspace_assignment(self) -> service.iam.WorkspaceAssignmentAPI:
         """The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account."""
         return self._workspace_assignment
 
     @property
-    def workspaces(self) -> WorkspacesAPI:
+    def workspaces(self) -> service.provisioning.WorkspacesAPI:
         """These APIs manage workspaces for this account."""
         return self._workspaces
 
     @property
-    def budgets(self) -> BudgetsAPI:
+    def budgets(self) -> service.billing.BudgetsAPI:
         """These APIs manage budget configurations for this account."""
         return self._budgets
 
diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py
index 843c94e94..d15a6bef2 100755
--- a/databricks/sdk/service/apps.py
+++ b/databricks/sdk/service/apps.py
@@ -45,6 +45,9 @@ class App:
     description: Optional[str] = None
     """The description of the app."""
 
+    id: Optional[str] = None
+    """The unique identifier of the app."""
+
     pending_deployment: Optional[AppDeployment] = None
     """The pending deployment of the app. A deployment is considered pending when it is being prepared
     for deployment to the app compute."""
@@ -78,6 +81,7 @@ def as_dict(self) -> dict:
         if self.default_source_code_path is not None:
             body['default_source_code_path'] = self.default_source_code_path
         if self.description is not None: body['description'] = self.description
+        if self.id is not None: body['id'] = self.id
         if self.name is not None: body['name'] = self.name
         if self.pending_deployment: body['pending_deployment'] = self.pending_deployment.as_dict()
         if self.resources: body['resources'] = [v.as_dict() for v in self.resources]
@@ -102,6 +106,7 @@ def as_shallow_dict(self) -> dict:
         if self.default_source_code_path is not None:
             body['default_source_code_path'] = self.default_source_code_path
         if self.description is not None: body['description'] = self.description
+        if self.id is not None: body['id'] = self.id
         if self.name is not None: body['name'] = self.name
         if self.pending_deployment: body['pending_deployment'] = self.pending_deployment
         if self.resources: body['resources'] = self.resources
@@ -125,6 +130,7 @@ def from_dict(cls, d: Dict[str, any]) -> App:
                    creator=d.get('creator', None),
                    default_source_code_path=d.get('default_source_code_path', None),
                    description=d.get('description', None),
+                   id=d.get('id', None),
                    name=d.get('name', None),
                    pending_deployment=_from_dict(d, 'pending_deployment', AppDeployment),
                    resources=_repeated_dict(d, 'resources', AppResource),
diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py
index d58765f23..dd2579921 100755
--- a/databricks/sdk/service/billing.py
+++ b/databricks/sdk/service/billing.py
@@ -894,6 +894,27 @@ def from_dict(cls, d: Dict[str, any]) -> GetBudgetConfigurationResponse:
         return cls(budget=_from_dict(d, 'budget', BudgetConfiguration))
 
 
+@dataclass
+class LimitConfig:
+    """The limit configuration of the policy. Limit configuration provide a budget policy level cost
+    control by enforcing the limit."""
+
+    def as_dict(self) -> dict:
+        """Serializes the LimitConfig into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LimitConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> LimitConfig:
+        """Deserializes the LimitConfig from a dictionary."""
+        return cls()
+
+
 @dataclass
 class ListBudgetConfigurationsResponse:
     budgets: Optional[List[BudgetConfiguration]] = None
@@ -1641,23 +1662,32 @@ def list(self,
                 return
             query['page_token'] = json['next_page_token']
 
-    def update(self, policy_id: str, *, policy: Optional[BudgetPolicy] = None) -> BudgetPolicy:
+    def update(self,
+               policy_id: str,
+               *,
+               limit_config: Optional[LimitConfig] = None,
+               policy: Optional[BudgetPolicy] = None) -> BudgetPolicy:
         """Update a budget policy.
         
         Updates a policy
         
         :param policy_id: str
           The Id of the policy. This field is generated by Databricks and globally unique.
+        :param limit_config: :class:`LimitConfig` (optional)
+          DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy
         :param policy: :class:`BudgetPolicy` (optional)
           Contains the BudgetPolicy details.
         
         :returns: :class:`BudgetPolicy`
         """
         body = policy.as_dict()
+        query = {}
+        if limit_config is not None: query['limit_config'] = limit_config.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('PATCH',
                            f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}',
+                           query=query,
                            body=body,
                            headers=headers)
         return BudgetPolicy.from_dict(res)
diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py
index a8a087762..83d7de4e8 100755
--- a/databricks/sdk/service/catalog.py
+++ b/databricks/sdk/service/catalog.py
@@ -8983,6 +8983,7 @@ def list(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET', '/api/2.1/unity-catalog/catalogs', query=query, headers=headers)
             if 'catalogs' in json:
@@ -9151,6 +9152,7 @@ def list(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET', '/api/2.1/unity-catalog/connections', query=query, headers=headers)
             if 'connections' in json:
@@ -9656,6 +9658,7 @@ def list(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET',
                                 '/api/2.1/unity-catalog/external-locations',
@@ -11389,6 +11392,7 @@ def list(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET', '/api/2.1/unity-catalog/schemas', query=query, headers=headers)
             if 'schemas' in json:
@@ -11578,6 +11582,7 @@ def list(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET',
                                 '/api/2.1/unity-catalog/storage-credentials',
@@ -11802,6 +11807,7 @@ def list(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET',
                                 f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas',
@@ -12044,6 +12050,7 @@ def list(self,
         if schema_name is not None: query['schema_name'] = schema_name
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET', '/api/2.1/unity-catalog/tables', query=query, headers=headers)
             if 'tables' in json:
@@ -12104,6 +12111,7 @@ def list_summaries(self,
         if table_name_pattern is not None: query['table_name_pattern'] = table_name_pattern
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET', '/api/2.1/unity-catalog/table-summaries', query=query, headers=headers)
             if 'tables' in json:
diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py
index 45ad2f290..f7a213669 100755
--- a/databricks/sdk/service/cleanrooms.py
+++ b/databricks/sdk/service/cleanrooms.py
@@ -289,24 +289,11 @@ class CleanRoomAssetNotebook:
     """Base 64 representation of the notebook contents. This is the same format as returned by
     :method:workspace/export with the format of **HTML**."""
 
-    review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None
-    """top-level status derived from all reviews"""
-
-    reviews: Optional[List[CleanRoomNotebookReview]] = None
-    """All existing approvals or rejections"""
-
-    runner_collaborators: Optional[List[CleanRoomCollaborator]] = None
-    """collaborators that can run the notebook"""
-
     def as_dict(self) -> dict:
         """Serializes the CleanRoomAssetNotebook into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.etag is not None: body['etag'] = self.etag
         if self.notebook_content is not None: body['notebook_content'] = self.notebook_content
-        if self.review_state is not None: body['review_state'] = self.review_state.value
-        if self.reviews: body['reviews'] = [v.as_dict() for v in self.reviews]
-        if self.runner_collaborators:
-            body['runner_collaborators'] = [v.as_dict() for v in self.runner_collaborators]
         return body
 
     def as_shallow_dict(self) -> dict:
@@ -314,19 +301,12 @@ def as_shallow_dict(self) -> dict:
         body = {}
         if self.etag is not None: body['etag'] = self.etag
         if self.notebook_content is not None: body['notebook_content'] = self.notebook_content
-        if self.review_state is not None: body['review_state'] = self.review_state
-        if self.reviews: body['reviews'] = self.reviews
-        if self.runner_collaborators: body['runner_collaborators'] = self.runner_collaborators
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetNotebook:
         """Deserializes the CleanRoomAssetNotebook from a dictionary."""
-        return cls(etag=d.get('etag', None),
-                   notebook_content=d.get('notebook_content', None),
-                   review_state=_enum(d, 'review_state', CleanRoomNotebookReviewNotebookReviewState),
-                   reviews=_repeated_dict(d, 'reviews', CleanRoomNotebookReview),
-                   runner_collaborators=_repeated_dict(d, 'runner_collaborators', CleanRoomCollaborator))
+        return cls(etag=d.get('etag', None), notebook_content=d.get('notebook_content', None))
 
 
 class CleanRoomAssetStatusEnum(Enum):
@@ -531,56 +511,6 @@ def from_dict(cls, d: Dict[str, any]) -> CleanRoomCollaborator:
                    organization_name=d.get('organization_name', None))
 
 
-@dataclass
-class CleanRoomNotebookReview:
-    comment: Optional[str] = None
-    """review comment"""
-
-    created_at_millis: Optional[int] = None
-    """timestamp of when the review was submitted"""
-
-    review_state: Optional[CleanRoomNotebookReviewNotebookReviewState] = None
-    """review outcome"""
-
-    reviewer_collaborator_alias: Optional[str] = None
-    """collaborator alias of the reviewer"""
-
-    def as_dict(self) -> dict:
-        """Serializes the CleanRoomNotebookReview into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at_millis is not None: body['created_at_millis'] = self.created_at_millis
-        if self.review_state is not None: body['review_state'] = self.review_state.value
-        if self.reviewer_collaborator_alias is not None:
-            body['reviewer_collaborator_alias'] = self.reviewer_collaborator_alias
-        return body
-
-    def as_shallow_dict(self) -> dict:
-        """Serializes the CleanRoomNotebookReview into a shallow dictionary of its immediate attributes."""
-        body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at_millis is not None: body['created_at_millis'] = self.created_at_millis
-        if self.review_state is not None: body['review_state'] = self.review_state
-        if self.reviewer_collaborator_alias is not None:
-            body['reviewer_collaborator_alias'] = self.reviewer_collaborator_alias
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CleanRoomNotebookReview:
-        """Deserializes the CleanRoomNotebookReview from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   created_at_millis=d.get('created_at_millis', None),
-                   review_state=_enum(d, 'review_state', CleanRoomNotebookReviewNotebookReviewState),
-                   reviewer_collaborator_alias=d.get('reviewer_collaborator_alias', None))
-
-
-class CleanRoomNotebookReviewNotebookReviewState(Enum):
-
-    APPROVED = 'APPROVED'
-    PENDING = 'PENDING'
-    REJECTED = 'REJECTED'
-
-
 @dataclass
 class CleanRoomNotebookTaskRun:
     """Stores information about a single task run."""
diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py
index 8a48b0cc0..c16f699bb 100755
--- a/databricks/sdk/service/compute.py
+++ b/databricks/sdk/service/compute.py
@@ -637,11 +637,11 @@ class ClusterAttributes:
     a set of default values will be used."""
 
     cluster_log_conf: Optional[ClusterLogConf] = None
-    """The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-    destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster.
-    If the conf is given, the logs will be delivered to the destination every `5 mins`. The
-    destination of driver logs is `$destination/$clusterId/driver`, while the destination of
-    executor logs is `$destination/$clusterId/executor`."""
+    """The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+    destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+    specified for one cluster. If the conf is given, the logs will be delivered to the destination
+    every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the
+    destination of executor logs is `$destination/$clusterId/executor`."""
 
     cluster_name: Optional[str] = None
     """Cluster name requested by the user. This doesn't have to be unique. If not specified at
@@ -947,11 +947,11 @@ class ClusterDetails:
     while each new cluster has a globally unique id."""
 
     cluster_log_conf: Optional[ClusterLogConf] = None
-    """The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-    destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster.
-    If the conf is given, the logs will be delivered to the destination every `5 mins`. The
-    destination of driver logs is `$destination/$clusterId/driver`, while the destination of
-    executor logs is `$destination/$clusterId/executor`."""
+    """The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+    destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+    specified for one cluster. If the conf is given, the logs will be delivered to the destination
+    every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the
+    destination of executor logs is `$destination/$clusterId/executor`."""
 
     cluster_log_status: Optional[LogSyncStatus] = None
     """Cluster log delivery status."""
@@ -1428,11 +1428,16 @@ class ClusterLogConf:
     access s3, please make sure the cluster iam role in `instance_profile_arn` has permission to
     write data to the s3 destination."""
 
+    volumes: Optional[VolumesStorageInfo] = None
+    """destination needs to be provided. e.g. `{ "volumes" : { "destination" :
+    "/Volumes/catalog/schema/volume/cluster_log" } }`"""
+
     def as_dict(self) -> dict:
         """Serializes the ClusterLogConf into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.dbfs: body['dbfs'] = self.dbfs.as_dict()
         if self.s3: body['s3'] = self.s3.as_dict()
+        if self.volumes: body['volumes'] = self.volumes.as_dict()
         return body
 
     def as_shallow_dict(self) -> dict:
@@ -1440,12 +1445,15 @@ def as_shallow_dict(self) -> dict:
         body = {}
         if self.dbfs: body['dbfs'] = self.dbfs
         if self.s3: body['s3'] = self.s3
+        if self.volumes: body['volumes'] = self.volumes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterLogConf:
         """Deserializes the ClusterLogConf from a dictionary."""
-        return cls(dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo), s3=_from_dict(d, 's3', S3StorageInfo))
+        return cls(dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo),
+                   s3=_from_dict(d, 's3', S3StorageInfo),
+                   volumes=_from_dict(d, 'volumes', VolumesStorageInfo))
 
 
 @dataclass
@@ -1918,11 +1926,11 @@ class ClusterSpec:
     a set of default values will be used."""
 
     cluster_log_conf: Optional[ClusterLogConf] = None
-    """The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-    destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster.
-    If the conf is given, the logs will be delivered to the destination every `5 mins`. The
-    destination of driver logs is `$destination/$clusterId/driver`, while the destination of
-    executor logs is `$destination/$clusterId/executor`."""
+    """The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+    destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+    specified for one cluster. If the conf is given, the logs will be delivered to the destination
+    every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the
+    destination of executor logs is `$destination/$clusterId/executor`."""
 
     cluster_name: Optional[str] = None
     """Cluster name requested by the user. This doesn't have to be unique. If not specified at
@@ -2334,11 +2342,11 @@ class CreateCluster:
     cluster."""
 
     cluster_log_conf: Optional[ClusterLogConf] = None
-    """The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-    destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster.
-    If the conf is given, the logs will be delivered to the destination every `5 mins`. The
-    destination of driver logs is `$destination/$clusterId/driver`, while the destination of
-    executor logs is `$destination/$clusterId/executor`."""
+    """The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+    destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+    specified for one cluster. If the conf is given, the logs will be delivered to the destination
+    every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the
+    destination of executor logs is `$destination/$clusterId/executor`."""
 
     cluster_name: Optional[str] = None
     """Cluster name requested by the user. This doesn't have to be unique. If not specified at
@@ -3469,11 +3477,11 @@ class EditCluster:
     a set of default values will be used."""
 
     cluster_log_conf: Optional[ClusterLogConf] = None
-    """The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-    destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster.
-    If the conf is given, the logs will be delivered to the destination every `5 mins`. The
-    destination of driver logs is `$destination/$clusterId/driver`, while the destination of
-    executor logs is `$destination/$clusterId/executor`."""
+    """The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+    destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+    specified for one cluster. If the conf is given, the logs will be delivered to the destination
+    every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the
+    destination of executor logs is `$destination/$clusterId/executor`."""
 
     cluster_name: Optional[str] = None
     """Cluster name requested by the user. This doesn't have to be unique. If not specified at
@@ -7773,11 +7781,11 @@ class UpdateClusterResource:
     a set of default values will be used."""
 
     cluster_log_conf: Optional[ClusterLogConf] = None
-    """The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-    destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster.
-    If the conf is given, the logs will be delivered to the destination every `5 mins`. The
-    destination of driver logs is `$destination/$clusterId/driver`, while the destination of
-    executor logs is `$destination/$clusterId/executor`."""
+    """The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+    destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+    specified for one cluster. If the conf is given, the logs will be delivered to the destination
+    every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the
+    destination of executor logs is `$destination/$clusterId/executor`."""
 
     cluster_name: Optional[str] = None
     """Cluster name requested by the user. This doesn't have to be unique. If not specified at
@@ -8077,7 +8085,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
 @dataclass
 class VolumesStorageInfo:
     destination: str
-    """Unity Catalog Volumes file destination, e.g. `/Volumes/my-init.sh`"""
+    """Unity Catalog volumes file destination, e.g. `/Volumes/catalog/schema/volume/dir/file`"""
 
     def as_dict(self) -> dict:
         """Serializes the VolumesStorageInfo into a dictionary suitable for use as a JSON request body."""
@@ -8619,11 +8627,11 @@ def create(self,
         :param clone_from: :class:`CloneCluster` (optional)
           When specified, this clones libraries from a source cluster during the creation of a new cluster.
         :param cluster_log_conf: :class:`ClusterLogConf` (optional)
-          The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-          destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. If
-          the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of
-          driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is
-          `$destination/$clusterId/executor`.
+          The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+          destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+          specified for one cluster. If the conf is given, the logs will be delivered to the destination every
+          `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination
+          of executor logs is `$destination/$clusterId/executor`.
         :param cluster_name: str (optional)
           Cluster name requested by the user. This doesn't have to be unique. If not specified at creation,
           the cluster name will be an empty string.
@@ -8952,11 +8960,11 @@ def edit(self,
           Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a
           set of default values will be used.
         :param cluster_log_conf: :class:`ClusterLogConf` (optional)
-          The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-          destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. If
-          the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of
-          driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is
-          `$destination/$clusterId/executor`.
+          The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+          destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+          specified for one cluster. If the conf is given, the logs will be delivered to the destination every
+          `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination
+          of executor logs is `$destination/$clusterId/executor`.
         :param cluster_name: str (optional)
           Cluster name requested by the user. This doesn't have to be unique. If not specified at creation,
           the cluster name will be an empty string.
diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py
index 58a89ea55..ba01ba41d 100755
--- a/databricks/sdk/service/dashboards.py
+++ b/databricks/sdk/service/dashboards.py
@@ -827,6 +827,7 @@ class MessageErrorType(Enum):
     REPLY_PROCESS_TIMEOUT_EXCEPTION = 'REPLY_PROCESS_TIMEOUT_EXCEPTION'
     RETRYABLE_PROCESSING_EXCEPTION = 'RETRYABLE_PROCESSING_EXCEPTION'
     SQL_EXECUTION_EXCEPTION = 'SQL_EXECUTION_EXCEPTION'
+    STOP_PROCESS_DUE_TO_AUTO_REGENERATE = 'STOP_PROCESS_DUE_TO_AUTO_REGENERATE'
     TABLES_MISSING_EXCEPTION = 'TABLES_MISSING_EXCEPTION'
     TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION = 'TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION'
     TOO_MANY_TABLES_EXCEPTION = 'TOO_MANY_TABLES_EXCEPTION'
@@ -1743,6 +1744,33 @@ def get_message_query_result(self, space_id: str, conversation_id: str,
             headers=headers)
         return GenieGetMessageQueryResultResponse.from_dict(res)
 
+    def get_message_query_result_by_attachment(self, space_id: str, conversation_id: str, message_id: str,
+                                               attachment_id: str) -> GenieGetMessageQueryResultResponse:
+        """Get conversation message SQL query result by attachment id.
+        
+        Get the result of SQL query by attachment id This is only available if a message has a query
+        attachment and the message status is `EXECUTING_QUERY`.
+        
+        :param space_id: str
+          Genie space ID
+        :param conversation_id: str
+          Conversation ID
+        :param message_id: str
+          Message ID
+        :param attachment_id: str
+          Attachment ID
+        
+        :returns: :class:`GenieGetMessageQueryResultResponse`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'GET',
+            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result/{attachment_id}',
+            headers=headers)
+        return GenieGetMessageQueryResultResponse.from_dict(res)
+
     def start_conversation(self, space_id: str, content: str) -> Wait[GenieMessage]:
         """Start conversation.
         
diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py
index ae506d4b9..938445863 100755
--- a/databricks/sdk/service/serving.py
+++ b/databricks/sdk/service/serving.py
@@ -1247,7 +1247,7 @@ class ExternalModel:
     provider: ExternalModelProvider
     """The name of the provider for the external model. Currently, the supported providers are
     'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving',
-    'google-cloud-vertex-ai', 'openai', and 'palm'."""
+    'google-cloud-vertex-ai', 'openai', 'palm', and 'custom'."""
 
     name: str
     """The name of the external model."""
diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py
index 1990c7c54..2015f4ac5 100755
--- a/databricks/sdk/service/sharing.py
+++ b/databricks/sdk/service/sharing.py
@@ -1658,6 +1658,7 @@ def list(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET', '/api/2.1/unity-catalog/providers', query=query, headers=headers)
             if 'providers' in json:
@@ -1699,12 +1700,18 @@ def list_shares(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
-        json = self._api.do('GET',
-                            f'/api/2.1/unity-catalog/providers/{name}/shares',
-                            query=query,
-                            headers=headers)
-        parsed = ListProviderSharesResponse.from_dict(json).shares
-        return parsed if parsed is not None else []
+        if "max_results" not in query: query['max_results'] = 0
+        while True:
+            json = self._api.do('GET',
+                                f'/api/2.1/unity-catalog/providers/{name}/shares',
+                                query=query,
+                                headers=headers)
+            if 'shares' in json:
+                for v in json['shares']:
+                    yield ProviderShare.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
 
     def update(self,
                name: str,
@@ -1937,6 +1944,7 @@ def list(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET', '/api/2.1/unity-catalog/recipients', query=query, headers=headers)
             if 'recipients' in json:
@@ -2157,6 +2165,7 @@ def list(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET', '/api/2.1/unity-catalog/shares', query=query, headers=headers)
             if 'shares' in json:
diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py
index 1e79165d5..a262ca730 100644
--- a/databricks/sdk/version.py
+++ b/databricks/sdk/version.py
@@ -1 +1 @@
-__version__ = '0.43.0'
+__version__ = '0.44.0'
diff --git a/docs/account/billing/billable_usage.rst b/docs/account/billing/billable_usage.rst
index 181b91cc3..95a584e56 100644
--- a/docs/account/billing/billable_usage.rst
+++ b/docs/account/billing/billable_usage.rst
@@ -5,7 +5,7 @@
 .. py:class:: BillableUsageAPI
 
     This API allows you to download billable usage logs for the specified account and date range. This feature
-    works with all account types.
+works with all account types.
 
     .. py:method:: download(start_month: str, end_month: str [, personal_data: Optional[bool]]) -> DownloadResponse
 
@@ -21,24 +21,23 @@
             resp = a.billable_usage.download(start_month="2024-08", end_month="2024-09")
 
         Return billable usage logs.
-        
-        Returns billable usage logs in CSV format for the specified account and date range. For the data
-        schema, see [CSV file schema]. Note that this method might take multiple minutes to complete.
-        
-        **Warning**: Depending on the queried date range, the number of workspaces in the account, the size of
-        the response and the internet speed of the caller, this API may hit a timeout after a few minutes. If
-        you experience this, try to mitigate by calling the API with narrower date ranges.
-        
-        [CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema
-        
-        :param start_month: str
-          Format: `YYYY-MM`. First month to return billable usage logs for. This field is required.
-        :param end_month: str
-          Format: `YYYY-MM`. Last month to return billable usage logs for. This field is required.
-        :param personal_data: bool (optional)
-          Specify whether to include personally identifiable information in the billable usage logs, for
-          example the email addresses of cluster creators. Handle this information with care. Defaults to
-          false.
-        
-        :returns: :class:`DownloadResponse`
-        
\ No newline at end of file
+
+Returns billable usage logs in CSV format for the specified account and date range. For the data
+schema, see [CSV file schema]. Note that this method might take multiple minutes to complete.
+
+**Warning**: Depending on the queried date range, the number of workspaces in the account, the size of
+the response and the internet speed of the caller, this API may hit a timeout after a few minutes. If
+you experience this, try to mitigate by calling the API with narrower date ranges.
+
+[CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema
+
+:param start_month: str
+  Format: `YYYY-MM`. First month to return billable usage logs for. This field is required.
+:param end_month: str
+  Format: `YYYY-MM`. Last month to return billable usage logs for. This field is required.
+:param personal_data: bool (optional)
+  Specify whether to include personally identifiable information in the billable usage logs, for
+  example the email addresses of cluster creators. Handle this information with care. Defaults to
+  false.
+
+:returns: :class:`DownloadResponse`
diff --git a/docs/account/billing/budget_policy.rst b/docs/account/billing/budget_policy.rst
index 06490428e..dac33ddd2 100644
--- a/docs/account/billing/budget_policy.rst
+++ b/docs/account/billing/budget_policy.rst
@@ -9,78 +9,79 @@
     .. py:method:: create( [, custom_tags: Optional[List[compute.CustomPolicyTag]], policy_name: Optional[str], request_id: Optional[str]]) -> BudgetPolicy
 
         Create a budget policy.
-        
-        Creates a new policy.
-        
-        :param custom_tags: List[:class:`CustomPolicyTag`] (optional)
-          A list of tags defined by the customer. At most 40 entries are allowed per policy.
-        :param policy_name: str (optional)
-          The name of the policy. - Must be unique among active policies. - Can contain only characters of
-          0-9, a-z, A-Z, -, =, ., :, /, @, _, +, whitespace.
-        :param request_id: str (optional)
-          A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is
-          recommended. This request is only idempotent if a `request_id` is provided.
-        
-        :returns: :class:`BudgetPolicy`
-        
+
+Creates a new policy.
+
+:param custom_tags: List[:class:`CustomPolicyTag`] (optional)
+  A list of tags defined by the customer. At most 40 entries are allowed per policy.
+:param policy_name: str (optional)
+  The name of the policy. - Must be unique among active policies. - Can contain only characters of
+  0-9, a-z, A-Z, -, =, ., :, /, @, _, +, whitespace.
+:param request_id: str (optional)
+  A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is
+  recommended. This request is only idempotent if a `request_id` is provided.
+
+:returns: :class:`BudgetPolicy`
+
 
     .. py:method:: delete(policy_id: str)
 
         Delete a budget policy.
-        
-        Deletes a policy
-        
-        :param policy_id: str
-          The Id of the policy.
-        
-        
-        
+
+Deletes a policy
+
+:param policy_id: str
+  The Id of the policy.
+
+
+
 
     .. py:method:: get(policy_id: str) -> BudgetPolicy
 
         Get a budget policy.
-        
-        Retrieves a policy by it's ID.
-        
-        :param policy_id: str
-          The Id of the policy.
-        
-        :returns: :class:`BudgetPolicy`
-        
+
+Retrieves a policy by it's ID.
+
+:param policy_id: str
+  The Id of the policy.
+
+:returns: :class:`BudgetPolicy`
+
 
     .. py:method:: list( [, filter_by: Optional[Filter], page_size: Optional[int], page_token: Optional[str], sort_spec: Optional[SortSpec]]) -> Iterator[BudgetPolicy]
 
         List policies.
-        
-        Lists all policies. Policies are returned in the alphabetically ascending order of their names.
-        
-        :param filter_by: :class:`Filter` (optional)
-          A filter to apply to the list of policies.
-        :param page_size: int (optional)
-          The maximum number of budget policies to return. If unspecified, at most 100 budget policies will be
-          returned. The maximum value is 1000; values above 1000 will be coerced to 1000.
-        :param page_token: str (optional)
-          A page token, received from a previous `ListServerlessPolicies` call. Provide this to retrieve the
-          subsequent page. If unspecified, the first page will be returned.
-          
-          When paginating, all other parameters provided to `ListServerlessPoliciesRequest` must match the
-          call that provided the page token.
-        :param sort_spec: :class:`SortSpec` (optional)
-          The sort specification.
-        
-        :returns: Iterator over :class:`BudgetPolicy`
-        
-
-    .. py:method:: update(policy_id: str [, policy: Optional[BudgetPolicy]]) -> BudgetPolicy
+
+Lists all policies. Policies are returned in the alphabetically ascending order of their names.
+
+:param filter_by: :class:`Filter` (optional)
+  A filter to apply to the list of policies.
+:param page_size: int (optional)
+  The maximum number of budget policies to return. If unspecified, at most 100 budget policies will be
+  returned. The maximum value is 1000; values above 1000 will be coerced to 1000.
+:param page_token: str (optional)
+  A page token, received from a previous `ListServerlessPolicies` call. Provide this to retrieve the
+  subsequent page. If unspecified, the first page will be returned.
+  
+  When paginating, all other parameters provided to `ListServerlessPoliciesRequest` must match the
+  call that provided the page token.
+:param sort_spec: :class:`SortSpec` (optional)
+  The sort specification.
+
+:returns: Iterator over :class:`BudgetPolicy`
+
+
+    .. py:method:: update(policy_id: str [, limit_config: Optional[LimitConfig], policy: Optional[BudgetPolicy]]) -> BudgetPolicy
 
         Update a budget policy.
-        
-        Updates a policy
-        
-        :param policy_id: str
-          The Id of the policy. This field is generated by Databricks and globally unique.
-        :param policy: :class:`BudgetPolicy` (optional)
-          Contains the BudgetPolicy details.
-        
-        :returns: :class:`BudgetPolicy`
-        
\ No newline at end of file
+
+Updates a policy
+
+:param policy_id: str
+  The Id of the policy. This field is generated by Databricks and globally unique.
+:param limit_config: :class:`LimitConfig` (optional)
+  DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy
+:param policy: :class:`BudgetPolicy` (optional)
+  Contains the BudgetPolicy details.
+
+:returns: :class:`BudgetPolicy`
diff --git a/docs/account/billing/budgets.rst b/docs/account/billing/budgets.rst
index 43c77d00b..9acd2288a 100644
--- a/docs/account/billing/budgets.rst
+++ b/docs/account/billing/budgets.rst
@@ -5,8 +5,8 @@
 .. py:class:: BudgetsAPI
 
     These APIs manage budget configurations for this account. Budgets enable you to monitor usage across your
-    account. You can set up budgets to either track account-wide spending, or apply filters to track the
-    spending of specific teams, projects, or workspaces.
+account. You can set up budgets to either track account-wide spending, or apply filters to track the
+spending of specific teams, projects, or workspaces.
 
     .. py:method:: create(budget: CreateBudgetConfigurationBudget) -> CreateBudgetConfigurationResponse
 
@@ -47,28 +47,28 @@
             a.budgets.delete(budget_id=created.budget.budget_configuration_id)
 
         Create new budget.
-        
-        Create a new budget configuration for an account. For full details, see
-        https://docs.databricks.com/en/admin/account-settings/budgets.html.
-        
-        :param budget: :class:`CreateBudgetConfigurationBudget`
-          Properties of the new budget configuration.
-        
-        :returns: :class:`CreateBudgetConfigurationResponse`
-        
+
+Create a new budget configuration for an account. For full details, see
+https://docs.databricks.com/en/admin/account-settings/budgets.html.
+
+:param budget: :class:`CreateBudgetConfigurationBudget`
+  Properties of the new budget configuration.
+
+:returns: :class:`CreateBudgetConfigurationResponse`
+
 
     .. py:method:: delete(budget_id: str)
 
         Delete budget.
-        
-        Deletes a budget configuration for an account. Both account and budget configuration are specified by
-        ID. This cannot be undone.
-        
-        :param budget_id: str
-          The Databricks budget configuration ID.
-        
-        
-        
+
+Deletes a budget configuration for an account. Both account and budget configuration are specified by
+ID. This cannot be undone.
+
+:param budget_id: str
+  The Databricks budget configuration ID.
+
+
+
 
     .. py:method:: get(budget_id: str) -> GetBudgetConfigurationResponse
 
@@ -111,14 +111,14 @@
             a.budgets.delete(budget_id=created.budget.budget_configuration_id)
 
         Get budget.
-        
-        Gets a budget configuration for an account. Both account and budget configuration are specified by ID.
-        
-        :param budget_id: str
-          The budget configuration ID
-        
-        :returns: :class:`GetBudgetConfigurationResponse`
-        
+
+Gets a budget configuration for an account. Both account and budget configuration are specified by ID.
+
+:param budget_id: str
+  The budget configuration ID
+
+:returns: :class:`GetBudgetConfigurationResponse`
+
 
     .. py:method:: list( [, page_token: Optional[str]]) -> Iterator[BudgetConfiguration]
 
@@ -135,15 +135,15 @@
             all = a.budgets.list(billing.ListBudgetConfigurationsRequest())
 
         Get all budgets.
-        
-        Gets all budgets associated with this account.
-        
-        :param page_token: str (optional)
-          A page token received from a previous get all budget configurations call. This token can be used to
-          retrieve the subsequent page. Requests first page if absent.
-        
-        :returns: Iterator over :class:`BudgetConfiguration`
-        
+
+Gets all budgets associated with this account.
+
+:param page_token: str (optional)
+  A page token received from a previous get all budget configurations call. This token can be used to
+  retrieve the subsequent page. Requests first page if absent.
+
+:returns: Iterator over :class:`BudgetConfiguration`
+
 
     .. py:method:: update(budget_id: str, budget: UpdateBudgetConfigurationBudget) -> UpdateBudgetConfigurationResponse
 
@@ -205,14 +205,13 @@
             a.budgets.delete(budget_id=created.budget.budget_configuration_id)
 
         Modify budget.
-        
-        Updates a budget configuration for an account. Both account and budget configuration are specified by
-        ID.
-        
-        :param budget_id: str
-          The Databricks budget configuration ID.
-        :param budget: :class:`UpdateBudgetConfigurationBudget`
-          The updated budget. This will overwrite the budget specified by the budget ID.
-        
-        :returns: :class:`UpdateBudgetConfigurationResponse`
-        
\ No newline at end of file
+
+Updates a budget configuration for an account. Both account and budget configuration are specified by
+ID.
+
+:param budget_id: str
+  The Databricks budget configuration ID.
+:param budget: :class:`UpdateBudgetConfigurationBudget`
+  The updated budget. This will overwrite the budget specified by the budget ID.
+
+:returns: :class:`UpdateBudgetConfigurationResponse`
diff --git a/docs/account/billing/log_delivery.rst b/docs/account/billing/log_delivery.rst
index 04ef4e349..e8143a711 100644
--- a/docs/account/billing/log_delivery.rst
+++ b/docs/account/billing/log_delivery.rst
@@ -5,51 +5,51 @@
 .. py:class:: LogDeliveryAPI
 
     These APIs manage log delivery configurations for this account. The two supported log types for this API
-    are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This feature works with all
-    account ID types.
-    
-    Log delivery works with all account types. However, if your account is on the E2 version of the platform
-    or on a select custom plan that allows multiple workspaces per account, you can optionally configure
-    different storage destinations for each workspace. Log delivery status is also provided to know the latest
-    status of log delivery attempts. The high-level flow of billable usage delivery:
-    
-    1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy. Using
-    Databricks APIs, call the Account API to create a [storage configuration object](:method:Storage/Create)
-    that uses the bucket name. 2. **Create credentials**: In AWS, create the appropriate AWS IAM role. For
-    full details, including the required IAM role policies and trust relationship, see [Billable usage log
-    delivery]. Using Databricks APIs, call the Account API to create a [credential configuration
-    object](:method:Credentials/Create) that uses the IAM role"s ARN. 3. **Create log delivery
-    configuration**: Using Databricks APIs, call the Account API to [create a log delivery
-    configuration](:method:LogDelivery/Create) that uses the credential and storage configuration objects from
-    previous steps. You can specify if the logs should include all events of that log type in your account
-    (_Account level_ delivery) or only events for a specific set of workspaces (_workspace level_ delivery).
-    Account level log delivery applies to all current and future workspaces plus account level logs, while
-    workspace level log delivery solely delivers logs related to the specified workspaces. You can create
-    multiple types of delivery configurations per account.
-    
-    For billable usage delivery: * For more information about billable usage logs, see [Billable usage log
-    delivery]. For the CSV schema, see the [Usage page]. * The delivery location is
-    `//billable-usage/csv/`, where `` is the name of the optional delivery path
-    prefix you set up during log delivery configuration. Files are named
-    `workspaceId=-usageMonth=.csv`. * All billable usage logs apply to specific
-    workspaces (_workspace level_ logs). You can aggregate usage for your entire account by creating an
-    _account level_ delivery configuration that delivers logs for all current and future workspaces in your
-    account. * The files are delivered daily by overwriting the month's CSV file for each workspace.
-    
-    For audit log delivery: * For more information about about audit log delivery, see [Audit log delivery],
-    which includes information about the used JSON schema. * The delivery location is
-    `//workspaceId=/date=/auditlogs_.json`.
-    Files may get overwritten with the same content multiple times to achieve exactly-once delivery. * If the
-    audit log delivery configuration included specific workspace IDs, only _workspace-level_ audit logs for
-    those workspaces are delivered. If the log delivery configuration applies to the entire account (_account
-    level_ delivery configuration), the audit log delivery includes workspace-level audit logs for all
-    workspaces in the account as well as account-level audit logs. See [Audit log delivery] for details. *
-    Auditable events are typically available in logs within 15 minutes.
-    
-    [Audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
-    [Billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
-    [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html
-    [create a new AWS S3 bucket]: https://docs.databricks.com/administration-guide/account-api/aws-storage.html
+are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This feature works with all
+account ID types.
+
+Log delivery works with all account types. However, if your account is on the E2 version of the platform
+or on a select custom plan that allows multiple workspaces per account, you can optionally configure
+different storage destinations for each workspace. Log delivery status is also provided to know the latest
+status of log delivery attempts. The high-level flow of billable usage delivery:
+
+1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy. Using
+Databricks APIs, call the Account API to create a [storage configuration object](:method:Storage/Create)
+that uses the bucket name. 2. **Create credentials**: In AWS, create the appropriate AWS IAM role. For
+full details, including the required IAM role policies and trust relationship, see [Billable usage log
+delivery]. Using Databricks APIs, call the Account API to create a [credential configuration
+object](:method:Credentials/Create) that uses the IAM role"s ARN. 3. **Create log delivery
+configuration**: Using Databricks APIs, call the Account API to [create a log delivery
+configuration](:method:LogDelivery/Create) that uses the credential and storage configuration objects from
+previous steps. You can specify if the logs should include all events of that log type in your account
+(_Account level_ delivery) or only events for a specific set of workspaces (_workspace level_ delivery).
+Account level log delivery applies to all current and future workspaces plus account level logs, while
+workspace level log delivery solely delivers logs related to the specified workspaces. You can create
+multiple types of delivery configurations per account.
+
+For billable usage delivery: * For more information about billable usage logs, see [Billable usage log
+delivery]. For the CSV schema, see the [Usage page]. * The delivery location is
+`//billable-usage/csv/`, where `` is the name of the optional delivery path
+prefix you set up during log delivery configuration. Files are named
+`workspaceId=-usageMonth=.csv`. * All billable usage logs apply to specific
+workspaces (_workspace level_ logs). You can aggregate usage for your entire account by creating an
+_account level_ delivery configuration that delivers logs for all current and future workspaces in your
+account. * The files are delivered daily by overwriting the month's CSV file for each workspace.
+
+For audit log delivery: * For more information about about audit log delivery, see [Audit log delivery],
+which includes information about the used JSON schema. * The delivery location is
+`//workspaceId=/date=/auditlogs_.json`.
+Files may get overwritten with the same content multiple times to achieve exactly-once delivery. * If the
+audit log delivery configuration included specific workspace IDs, only _workspace-level_ audit logs for
+those workspaces are delivered. If the log delivery configuration applies to the entire account (_account
+level_ delivery configuration), the audit log delivery includes workspace-level audit logs for all
+workspaces in the account as well as account-level audit logs. See [Audit log delivery] for details. *
+Auditable events are typically available in logs within 15 minutes.
+
+[Audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
+[Billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
+[Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html
+[create a new AWS S3 bucket]: https://docs.databricks.com/administration-guide/account-api/aws-storage.html
 
     .. py:method:: create( [, log_delivery_configuration: Optional[CreateLogDeliveryConfigurationParams]]) -> WrappedLogDeliveryConfiguration
 
@@ -88,32 +88,32 @@
                                         status=billing.LogDeliveryConfigStatus.DISABLED)
 
         Create a new log delivery configuration.
-        
-        Creates a new Databricks log delivery configuration to enable delivery of the specified type of logs
-        to your storage location. This requires that you already created a [credential
-        object](:method:Credentials/Create) (which encapsulates a cross-account service IAM role) and a
-        [storage configuration object](:method:Storage/Create) (which encapsulates an S3 bucket).
-        
-        For full details, including the required IAM role policies and bucket policies, see [Deliver and
-        access billable usage logs] or [Configure audit logging].
-        
-        **Note**: There is a limit on the number of log delivery configurations available per account (each
-        limit applies separately to each log type including billable usage and audit logs). You can create a
-        maximum of two enabled account-level delivery configurations (configurations without a workspace
-        filter) per type. Additionally, you can create two enabled workspace-level delivery configurations per
-        workspace for each log type, which means that the same workspace ID can occur in the workspace filter
-        for no more than two delivery configurations per log type.
-        
-        You cannot delete a log delivery configuration, but you can disable it (see [Enable or disable log
-        delivery configuration](:method:LogDelivery/PatchStatus)).
-        
-        [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
-        [Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
-        
-        :param log_delivery_configuration: :class:`CreateLogDeliveryConfigurationParams` (optional)
-        
-        :returns: :class:`WrappedLogDeliveryConfiguration`
-        
+
+Creates a new Databricks log delivery configuration to enable delivery of the specified type of logs
+to your storage location. This requires that you already created a [credential
+object](:method:Credentials/Create) (which encapsulates a cross-account service IAM role) and a
+[storage configuration object](:method:Storage/Create) (which encapsulates an S3 bucket).
+
+For full details, including the required IAM role policies and bucket policies, see [Deliver and
+access billable usage logs] or [Configure audit logging].
+
+**Note**: There is a limit on the number of log delivery configurations available per account (each
+limit applies separately to each log type including billable usage and audit logs). You can create a
+maximum of two enabled account-level delivery configurations (configurations without a workspace
+filter) per type. Additionally, you can create two enabled workspace-level delivery configurations per
+workspace for each log type, which means that the same workspace ID can occur in the workspace filter
+for no more than two delivery configurations per log type.
+
+You cannot delete a log delivery configuration, but you can disable it (see [Enable or disable log
+delivery configuration](:method:LogDelivery/PatchStatus)).
+
+[Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
+[Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
+
+:param log_delivery_configuration: :class:`CreateLogDeliveryConfigurationParams` (optional)
+
+:returns: :class:`WrappedLogDeliveryConfiguration`
+
 
     .. py:method:: get(log_delivery_configuration_id: str) -> WrappedLogDeliveryConfiguration
 
@@ -154,14 +154,14 @@
                                         status=billing.LogDeliveryConfigStatus.DISABLED)
 
         Get log delivery configuration.
-        
-        Gets a Databricks log delivery configuration object for an account, both specified by ID.
-        
-        :param log_delivery_configuration_id: str
-          Databricks log delivery configuration ID
-        
-        :returns: :class:`WrappedLogDeliveryConfiguration`
-        
+
+Gets a Databricks log delivery configuration object for an account, both specified by ID.
+
+:param log_delivery_configuration_id: str
+  Databricks log delivery configuration ID
+
+:returns: :class:`WrappedLogDeliveryConfiguration`
+
 
     .. py:method:: list( [, credentials_id: Optional[str], status: Optional[LogDeliveryConfigStatus], storage_configuration_id: Optional[str]]) -> Iterator[LogDeliveryConfiguration]
 
@@ -178,35 +178,34 @@
             all = a.log_delivery.list(billing.ListLogDeliveryRequest())
 
         Get all log delivery configurations.
-        
-        Gets all Databricks log delivery configurations associated with an account specified by ID.
-        
-        :param credentials_id: str (optional)
-          Filter by credential configuration ID.
-        :param status: :class:`LogDeliveryConfigStatus` (optional)
-          Filter by status `ENABLED` or `DISABLED`.
-        :param storage_configuration_id: str (optional)
-          Filter by storage configuration ID.
-        
-        :returns: Iterator over :class:`LogDeliveryConfiguration`
-        
+
+Gets all Databricks log delivery configurations associated with an account specified by ID.
+
+:param credentials_id: str (optional)
+  Filter by credential configuration ID.
+:param status: :class:`LogDeliveryConfigStatus` (optional)
+  Filter by status `ENABLED` or `DISABLED`.
+:param storage_configuration_id: str (optional)
+  Filter by storage configuration ID.
+
+:returns: Iterator over :class:`LogDeliveryConfiguration`
+
 
     .. py:method:: patch_status(log_delivery_configuration_id: str, status: LogDeliveryConfigStatus)
 
         Enable or disable log delivery configuration.
-        
-        Enables or disables a log delivery configuration. Deletion of delivery configurations is not
-        supported, so disable log delivery configurations that are no longer needed. Note that you can't
-        re-enable a delivery configuration if this would violate the delivery configuration limits described
-        under [Create log delivery](:method:LogDelivery/Create).
-        
-        :param log_delivery_configuration_id: str
-          Databricks log delivery configuration ID
-        :param status: :class:`LogDeliveryConfigStatus`
-          Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults
-          to `ENABLED`. You can [enable or disable the
-          configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is
-          not supported, so disable a log delivery configuration that is no longer needed.
-        
-        
-        
\ No newline at end of file
+
+Enables or disables a log delivery configuration. Deletion of delivery configurations is not
+supported, so disable log delivery configurations that are no longer needed. Note that you can't
+re-enable a delivery configuration if this would violate the delivery configuration limits described
+under [Create log delivery](:method:LogDelivery/Create).
+
+:param log_delivery_configuration_id: str
+  Databricks log delivery configuration ID
+:param status: :class:`LogDeliveryConfigStatus`
+  Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults
+  to `ENABLED`. You can [enable or disable the
+  configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is
+  not supported, so disable a log delivery configuration that is no longer needed.
+
+
diff --git a/docs/account/billing/usage_dashboards.rst b/docs/account/billing/usage_dashboards.rst
index 350ef1f08..44a1c35eb 100644
--- a/docs/account/billing/usage_dashboards.rst
+++ b/docs/account/billing/usage_dashboards.rst
@@ -5,35 +5,34 @@
 .. py:class:: UsageDashboardsAPI
 
     These APIs manage usage dashboards for this account. Usage dashboards enable you to gain insights into
-    your usage with pre-built dashboards: visualize breakdowns, analyze tag attributions, and identify cost
-    drivers.
+your usage with pre-built dashboards: visualize breakdowns, analyze tag attributions, and identify cost
+drivers.
 
     .. py:method:: create( [, dashboard_type: Optional[UsageDashboardType], workspace_id: Optional[int]]) -> CreateBillingUsageDashboardResponse
 
         Create new usage dashboard.
-        
-        Create a usage dashboard specified by workspaceId, accountId, and dashboard type.
-        
-        :param dashboard_type: :class:`UsageDashboardType` (optional)
-          Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage
-          dashboard shows usage data for all workspaces in the account.
-        :param workspace_id: int (optional)
-          The workspace ID of the workspace in which the usage dashboard is created.
-        
-        :returns: :class:`CreateBillingUsageDashboardResponse`
-        
+
+Create a usage dashboard specified by workspaceId, accountId, and dashboard type.
+
+:param dashboard_type: :class:`UsageDashboardType` (optional)
+  Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage
+  dashboard shows usage data for all workspaces in the account.
+:param workspace_id: int (optional)
+  The workspace ID of the workspace in which the usage dashboard is created.
+
+:returns: :class:`CreateBillingUsageDashboardResponse`
+
 
     .. py:method:: get( [, dashboard_type: Optional[UsageDashboardType], workspace_id: Optional[int]]) -> GetBillingUsageDashboardResponse
 
         Get usage dashboard.
-        
-        Get a usage dashboard specified by workspaceId, accountId, and dashboard type.
-        
-        :param dashboard_type: :class:`UsageDashboardType` (optional)
-          Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage
-          dashboard shows usage data for all workspaces in the account.
-        :param workspace_id: int (optional)
-          The workspace ID of the workspace in which the usage dashboard is created.
-        
-        :returns: :class:`GetBillingUsageDashboardResponse`
-        
\ No newline at end of file
+
+Get a usage dashboard specified by workspaceId, accountId, and dashboard type.
+
+:param dashboard_type: :class:`UsageDashboardType` (optional)
+  Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage
+  dashboard shows usage data for all workspaces in the account.
+:param workspace_id: int (optional)
+  The workspace ID of the workspace in which the usage dashboard is created.
+
+:returns: :class:`GetBillingUsageDashboardResponse`
diff --git a/docs/account/catalog/metastore_assignments.rst b/docs/account/catalog/metastore_assignments.rst
index f5b00c6b3..00ea12a65 100644
--- a/docs/account/catalog/metastore_assignments.rst
+++ b/docs/account/catalog/metastore_assignments.rst
@@ -9,45 +9,45 @@
     .. py:method:: create(workspace_id: int, metastore_id: str [, metastore_assignment: Optional[CreateMetastoreAssignment]])
 
         Assigns a workspace to a metastore.
-        
-        Creates an assignment to a metastore for a workspace
-        
-        :param workspace_id: int
-          Workspace ID.
-        :param metastore_id: str
-          Unity Catalog metastore ID
-        :param metastore_assignment: :class:`CreateMetastoreAssignment` (optional)
-        
-        
-        
+
+Creates an assignment to a metastore for a workspace
+
+:param workspace_id: int
+  Workspace ID.
+:param metastore_id: str
+  Unity Catalog metastore ID
+:param metastore_assignment: :class:`CreateMetastoreAssignment` (optional)
+
+
+
 
     .. py:method:: delete(workspace_id: int, metastore_id: str)
 
         Delete a metastore assignment.
-        
-        Deletes a metastore assignment to a workspace, leaving the workspace with no metastore.
-        
-        :param workspace_id: int
-          Workspace ID.
-        :param metastore_id: str
-          Unity Catalog metastore ID
-        
-        
-        
+
+Deletes a metastore assignment to a workspace, leaving the workspace with no metastore.
+
+:param workspace_id: int
+  Workspace ID.
+:param metastore_id: str
+  Unity Catalog metastore ID
+
+
+
 
     .. py:method:: get(workspace_id: int) -> AccountsMetastoreAssignment
 
         Gets the metastore assignment for a workspace.
-        
-        Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is assigned
-        a metastore, the mappig will be returned. If no metastore is assigned to the workspace, the assignment
-        will not be found and a 404 returned.
-        
-        :param workspace_id: int
-          Workspace ID.
-        
-        :returns: :class:`AccountsMetastoreAssignment`
-        
+
+Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is assigned
+a metastore, the mappig will be returned. If no metastore is assigned to the workspace, the assignment
+will not be found and a 404 returned.
+
+:param workspace_id: int
+  Workspace ID.
+
+:returns: :class:`AccountsMetastoreAssignment`
+
 
     .. py:method:: list(metastore_id: str) -> Iterator[int]
 
@@ -65,27 +65,26 @@
             ws = a.metastore_assignments.list(metastore_id=os.environ["TEST_METASTORE_ID"])
 
         Get all workspaces assigned to a metastore.
-        
-        Gets a list of all Databricks workspace IDs that have been assigned to given metastore.
-        
-        :param metastore_id: str
-          Unity Catalog metastore ID
-        
-        :returns: Iterator over int
-        
+
+Gets a list of all Databricks workspace IDs that have been assigned to given metastore.
+
+:param metastore_id: str
+  Unity Catalog metastore ID
+
+:returns: Iterator over int
+
 
     .. py:method:: update(workspace_id: int, metastore_id: str [, metastore_assignment: Optional[UpdateMetastoreAssignment]])
 
         Updates a metastore assignment to a workspaces.
-        
-        Updates an assignment to a metastore for a workspace. Currently, only the default catalog may be
-        updated.
-        
-        :param workspace_id: int
-          Workspace ID.
-        :param metastore_id: str
-          Unity Catalog metastore ID
-        :param metastore_assignment: :class:`UpdateMetastoreAssignment` (optional)
-        
-        
-        
\ No newline at end of file
+
+Updates an assignment to a metastore for a workspace. Currently, only the default catalog may be
+updated.
+
+:param workspace_id: int
+  Workspace ID.
+:param metastore_id: str
+  Unity Catalog metastore ID
+:param metastore_assignment: :class:`UpdateMetastoreAssignment` (optional)
+
+
diff --git a/docs/account/catalog/metastores.rst b/docs/account/catalog/metastores.rst
index 15f39060d..4a7b66ed6 100644
--- a/docs/account/catalog/metastores.rst
+++ b/docs/account/catalog/metastores.rst
@@ -5,63 +5,62 @@
 .. py:class:: AccountMetastoresAPI
 
     These APIs manage Unity Catalog metastores for an account. A metastore contains catalogs that can be
-    associated with workspaces
+associated with workspaces
 
     .. py:method:: create( [, metastore_info: Optional[CreateMetastore]]) -> AccountsMetastoreInfo
 
         Create metastore.
-        
-        Creates a Unity Catalog metastore.
-        
-        :param metastore_info: :class:`CreateMetastore` (optional)
-        
-        :returns: :class:`AccountsMetastoreInfo`
-        
+
+Creates a Unity Catalog metastore.
+
+:param metastore_info: :class:`CreateMetastore` (optional)
+
+:returns: :class:`AccountsMetastoreInfo`
+
 
     .. py:method:: delete(metastore_id: str [, force: Optional[bool]])
 
         Delete a metastore.
-        
-        Deletes a Unity Catalog metastore for an account, both specified by ID.
-        
-        :param metastore_id: str
-          Unity Catalog metastore ID
-        :param force: bool (optional)
-          Force deletion even if the metastore is not empty. Default is false.
-        
-        
-        
+
+Deletes a Unity Catalog metastore for an account, both specified by ID.
+
+:param metastore_id: str
+  Unity Catalog metastore ID
+:param force: bool (optional)
+  Force deletion even if the metastore is not empty. Default is false.
+
+
+
 
     .. py:method:: get(metastore_id: str) -> AccountsMetastoreInfo
 
         Get a metastore.
-        
-        Gets a Unity Catalog metastore from an account, both specified by ID.
-        
-        :param metastore_id: str
-          Unity Catalog metastore ID
-        
-        :returns: :class:`AccountsMetastoreInfo`
-        
+
+Gets a Unity Catalog metastore from an account, both specified by ID.
+
+:param metastore_id: str
+  Unity Catalog metastore ID
+
+:returns: :class:`AccountsMetastoreInfo`
+
 
     .. py:method:: list() -> Iterator[MetastoreInfo]
 
         Get all metastores associated with an account.
-        
-        Gets all Unity Catalog metastores associated with an account specified by ID.
-        
-        :returns: Iterator over :class:`MetastoreInfo`
-        
+
+Gets all Unity Catalog metastores associated with an account specified by ID.
+
+:returns: Iterator over :class:`MetastoreInfo`
+
 
     .. py:method:: update(metastore_id: str [, metastore_info: Optional[UpdateMetastore]]) -> AccountsMetastoreInfo
 
         Update a metastore.
-        
-        Updates an existing Unity Catalog metastore.
-        
-        :param metastore_id: str
-          Unity Catalog metastore ID
-        :param metastore_info: :class:`UpdateMetastore` (optional)
-        
-        :returns: :class:`AccountsMetastoreInfo`
-        
\ No newline at end of file
+
+Updates an existing Unity Catalog metastore.
+
+:param metastore_id: str
+  Unity Catalog metastore ID
+:param metastore_info: :class:`UpdateMetastore` (optional)
+
+:returns: :class:`AccountsMetastoreInfo`
diff --git a/docs/account/catalog/storage_credentials.rst b/docs/account/catalog/storage_credentials.rst
index 453b3a1eb..65271efcf 100644
--- a/docs/account/catalog/storage_credentials.rst
+++ b/docs/account/catalog/storage_credentials.rst
@@ -9,78 +9,77 @@
     .. py:method:: create(metastore_id: str [, credential_info: Optional[CreateStorageCredential]]) -> AccountsStorageCredentialInfo
 
         Create a storage credential.
-        
-        Creates a new storage credential. The request object is specific to the cloud:
-        
-        * **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure credentials *
-        **GcpServiceAcountKey** for GCP credentials.
-        
-        The caller must be a metastore admin and have the **CREATE_STORAGE_CREDENTIAL** privilege on the
-        metastore.
-        
-        :param metastore_id: str
-          Unity Catalog metastore ID
-        :param credential_info: :class:`CreateStorageCredential` (optional)
-        
-        :returns: :class:`AccountsStorageCredentialInfo`
-        
+
+Creates a new storage credential. The request object is specific to the cloud:
+
+* **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure credentials *
+**GcpServiceAcountKey** for GCP credentials.
+
+The caller must be a metastore admin and have the **CREATE_STORAGE_CREDENTIAL** privilege on the
+metastore.
+
+:param metastore_id: str
+  Unity Catalog metastore ID
+:param credential_info: :class:`CreateStorageCredential` (optional)
+
+:returns: :class:`AccountsStorageCredentialInfo`
+
 
     .. py:method:: delete(metastore_id: str, storage_credential_name: str [, force: Optional[bool]])
 
         Delete a storage credential.
-        
-        Deletes a storage credential from the metastore. The caller must be an owner of the storage
-        credential.
-        
-        :param metastore_id: str
-          Unity Catalog metastore ID
-        :param storage_credential_name: str
-          Name of the storage credential.
-        :param force: bool (optional)
-          Force deletion even if the Storage Credential is not empty. Default is false.
-        
-        
-        
+
+Deletes a storage credential from the metastore. The caller must be an owner of the storage
+credential.
+
+:param metastore_id: str
+  Unity Catalog metastore ID
+:param storage_credential_name: str
+  Name of the storage credential.
+:param force: bool (optional)
+  Force deletion even if the Storage Credential is not empty. Default is false.
+
+
+
 
     .. py:method:: get(metastore_id: str, storage_credential_name: str) -> AccountsStorageCredentialInfo
 
         Gets the named storage credential.
-        
-        Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the
-        storage credential, or have a level of privilege on the storage credential.
-        
-        :param metastore_id: str
-          Unity Catalog metastore ID
-        :param storage_credential_name: str
-          Name of the storage credential.
-        
-        :returns: :class:`AccountsStorageCredentialInfo`
-        
+
+Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the
+storage credential, or have a level of privilege on the storage credential.
+
+:param metastore_id: str
+  Unity Catalog metastore ID
+:param storage_credential_name: str
+  Name of the storage credential.
+
+:returns: :class:`AccountsStorageCredentialInfo`
+
 
     .. py:method:: list(metastore_id: str) -> Iterator[StorageCredentialInfo]
 
         Get all storage credentials assigned to a metastore.
-        
-        Gets a list of all storage credentials that have been assigned to given metastore.
-        
-        :param metastore_id: str
-          Unity Catalog metastore ID
-        
-        :returns: Iterator over :class:`StorageCredentialInfo`
-        
+
+Gets a list of all storage credentials that have been assigned to given metastore.
+
+:param metastore_id: str
+  Unity Catalog metastore ID
+
+:returns: Iterator over :class:`StorageCredentialInfo`
+
 
     .. py:method:: update(metastore_id: str, storage_credential_name: str [, credential_info: Optional[UpdateStorageCredential]]) -> AccountsStorageCredentialInfo
 
         Updates a storage credential.
-        
-        Updates a storage credential on the metastore. The caller must be the owner of the storage credential.
-        If the caller is a metastore admin, only the __owner__ credential can be changed.
-        
-        :param metastore_id: str
-          Unity Catalog metastore ID
-        :param storage_credential_name: str
-          Name of the storage credential.
-        :param credential_info: :class:`UpdateStorageCredential` (optional)
-        
-        :returns: :class:`AccountsStorageCredentialInfo`
-        
\ No newline at end of file
+
+Updates a storage credential on the metastore. The caller must be the owner of the storage credential.
+If the caller is a metastore admin, only the __owner__ credential can be changed.
+
+:param metastore_id: str
+  Unity Catalog metastore ID
+:param storage_credential_name: str
+  Name of the storage credential.
+:param credential_info: :class:`UpdateStorageCredential` (optional)
+
+:returns: :class:`AccountsStorageCredentialInfo`
diff --git a/docs/account/iam/access_control.rst b/docs/account/iam/access_control.rst
index 2537e262c..80ab61361 100644
--- a/docs/account/iam/access_control.rst
+++ b/docs/account/iam/access_control.rst
@@ -5,52 +5,51 @@
 .. py:class:: AccountAccessControlAPI
 
     These APIs manage access rules on resources in an account. Currently, only grant rules are supported. A
-    grant rule specifies a role assigned to a set of principals. A list of rules attached to a resource is
-    called a rule set.
+grant rule specifies a role assigned to a set of principals. A list of rules attached to a resource is
+called a rule set.
 
     .. py:method:: get_assignable_roles_for_resource(resource: str) -> GetAssignableRolesForResourceResponse
 
         Get assignable roles for a resource.
-        
-        Gets all the roles that can be granted on an account level resource. A role is grantable if the rule
-        set on the resource can contain an access rule of the role.
-        
-        :param resource: str
-          The resource name for which assignable roles will be listed.
-        
-        :returns: :class:`GetAssignableRolesForResourceResponse`
-        
+
+Gets all the roles that can be granted on an account level resource. A role is grantable if the rule
+set on the resource can contain an access rule of the role.
+
+:param resource: str
+  The resource name for which assignable roles will be listed.
+
+:returns: :class:`GetAssignableRolesForResourceResponse`
+
 
     .. py:method:: get_rule_set(name: str, etag: str) -> RuleSetResponse
 
         Get a rule set.
-        
-        Get a rule set by its name. A rule set is always attached to a resource and contains a list of access
-        rules on the said resource. Currently only a default rule set for each resource is supported.
-        
-        :param name: str
-          The ruleset name associated with the request.
-        :param etag: str
-          Etag used for versioning. The response is at least as fresh as the eTag provided. Etag is used for
-          optimistic concurrency control as a way to help prevent simultaneous updates of a rule set from
-          overwriting each other. It is strongly suggested that systems make use of the etag in the read ->
-          modify -> write pattern to perform rule set updates in order to avoid race conditions that is get an
-          etag from a GET rule set request, and pass it with the PUT update request to identify the rule set
-          version you are updating.
-        
-        :returns: :class:`RuleSetResponse`
-        
+
+Get a rule set by its name. A rule set is always attached to a resource and contains a list of access
+rules on the said resource. Currently only a default rule set for each resource is supported.
+
+:param name: str
+  The ruleset name associated with the request.
+:param etag: str
+  Etag used for versioning. The response is at least as fresh as the eTag provided. Etag is used for
+  optimistic concurrency control as a way to help prevent simultaneous updates of a rule set from
+  overwriting each other. It is strongly suggested that systems make use of the etag in the read ->
+  modify -> write pattern to perform rule set updates in order to avoid race conditions that is get an
+  etag from a GET rule set request, and pass it with the PUT update request to identify the rule set
+  version you are updating.
+
+:returns: :class:`RuleSetResponse`
+
 
     .. py:method:: update_rule_set(name: str, rule_set: RuleSetUpdateRequest) -> RuleSetResponse
 
         Update a rule set.
-        
-        Replace the rules of a rule set. First, use get to read the current version of the rule set before
-        modifying it. This pattern helps prevent conflicts between concurrent updates.
-        
-        :param name: str
-          Name of the rule set.
-        :param rule_set: :class:`RuleSetUpdateRequest`
-        
-        :returns: :class:`RuleSetResponse`
-        
\ No newline at end of file
+
+Replace the rules of a rule set. First, use get to read the current version of the rule set before
+modifying it. This pattern helps prevent conflicts between concurrent updates.
+
+:param name: str
+  Name of the rule set.
+:param rule_set: :class:`RuleSetUpdateRequest`
+
+:returns: :class:`RuleSetResponse`
diff --git a/docs/account/iam/groups.rst b/docs/account/iam/groups.rst
index a9a4afeac..e7c243fe5 100644
--- a/docs/account/iam/groups.rst
+++ b/docs/account/iam/groups.rst
@@ -5,132 +5,131 @@
 .. py:class:: AccountGroupsAPI
 
     Groups simplify identity management, making it easier to assign access to Databricks account, data, and
-    other securable objects.
-    
-    It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups,
-    instead of to users individually. All Databricks account identities can be assigned as members of groups,
-    and members inherit permissions that are assigned to their group.
+other securable objects.
+
+It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups,
+instead of to users individually. All Databricks account identities can be assigned as members of groups,
+and members inherit permissions that are assigned to their group.
 
     .. py:method:: create( [, display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], id: Optional[str], members: Optional[List[ComplexValue]], meta: Optional[ResourceMeta], roles: Optional[List[ComplexValue]], schemas: Optional[List[GroupSchema]]]) -> Group
 
         Create a new group.
-        
-        Creates a group in the Databricks account with a unique name, using the supplied group details.
-        
-        :param display_name: str (optional)
-          String that represents a human-readable group name
-        :param entitlements: List[:class:`ComplexValue`] (optional)
-          Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
-          values.
-          
-          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-        :param external_id: str (optional)
-        :param groups: List[:class:`ComplexValue`] (optional)
-        :param id: str (optional)
-          Databricks group ID
-        :param members: List[:class:`ComplexValue`] (optional)
-        :param meta: :class:`ResourceMeta` (optional)
-          Container for the group identifier. Workspace local versus account.
-        :param roles: List[:class:`ComplexValue`] (optional)
-          Corresponds to AWS instance profile/arn role.
-        :param schemas: List[:class:`GroupSchema`] (optional)
-          The schema of the group.
-        
-        :returns: :class:`Group`
-        
+
+Creates a group in the Databricks account with a unique name, using the supplied group details.
+
+:param display_name: str (optional)
+  String that represents a human-readable group name
+:param entitlements: List[:class:`ComplexValue`] (optional)
+  Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
+  values.
+  
+  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+:param external_id: str (optional)
+:param groups: List[:class:`ComplexValue`] (optional)
+:param id: str (optional)
+  Databricks group ID
+:param members: List[:class:`ComplexValue`] (optional)
+:param meta: :class:`ResourceMeta` (optional)
+  Container for the group identifier. Workspace local versus account.
+:param roles: List[:class:`ComplexValue`] (optional)
+  Corresponds to AWS instance profile/arn role.
+:param schemas: List[:class:`GroupSchema`] (optional)
+  The schema of the group.
+
+:returns: :class:`Group`
+
 
     .. py:method:: delete(id: str)
 
         Delete a group.
-        
-        Deletes a group from the Databricks account.
-        
-        :param id: str
-          Unique ID for a group in the Databricks account.
-        
-        
-        
+
+Deletes a group from the Databricks account.
+
+:param id: str
+  Unique ID for a group in the Databricks account.
+
+
+
 
     .. py:method:: get(id: str) -> Group
 
         Get group details.
-        
-        Gets the information for a specific group in the Databricks account.
-        
-        :param id: str
-          Unique ID for a group in the Databricks account.
-        
-        :returns: :class:`Group`
-        
+
+Gets the information for a specific group in the Databricks account.
+
+:param id: str
+  Unique ID for a group in the Databricks account.
+
+:returns: :class:`Group`
+
 
     .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[Group]
 
         List group details.
-        
-        Gets all details of the groups associated with the Databricks account.
-        
-        :param attributes: str (optional)
-          Comma-separated list of attributes to return in response.
-        :param count: int (optional)
-          Desired number of results per page. Default is 10000.
-        :param excluded_attributes: str (optional)
-          Comma-separated list of attributes to exclude in response.
-        :param filter: str (optional)
-          Query by which the results have to be filtered. Supported operators are equals(`eq`),
-          contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
-          formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
-          only support simple expressions.
-          
-          [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
-        :param sort_by: str (optional)
-          Attribute to sort the results.
-        :param sort_order: :class:`ListSortOrder` (optional)
-          The order to sort the results.
-        :param start_index: int (optional)
-          Specifies the index of the first result. First item is number 1.
-        
-        :returns: Iterator over :class:`Group`
-        
+
+Gets all details of the groups associated with the Databricks account.
+
+:param attributes: str (optional)
+  Comma-separated list of attributes to return in response.
+:param count: int (optional)
+  Desired number of results per page. Default is 10000.
+:param excluded_attributes: str (optional)
+  Comma-separated list of attributes to exclude in response.
+:param filter: str (optional)
+  Query by which the results have to be filtered. Supported operators are equals(`eq`),
+  contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
+  formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
+  only support simple expressions.
+  
+  [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
+:param sort_by: str (optional)
+  Attribute to sort the results.
+:param sort_order: :class:`ListSortOrder` (optional)
+  The order to sort the results.
+:param start_index: int (optional)
+  Specifies the index of the first result. First item is number 1.
+
+:returns: Iterator over :class:`Group`
+
 
     .. py:method:: patch(id: str [, operations: Optional[List[Patch]], schemas: Optional[List[PatchSchema]]])
 
         Update group details.
-        
-        Partially updates the details of a group.
-        
-        :param id: str
-          Unique ID for a group in the Databricks account.
-        :param operations: List[:class:`Patch`] (optional)
-        :param schemas: List[:class:`PatchSchema`] (optional)
-          The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
-        
-        
-        
+
+Partially updates the details of a group.
+
+:param id: str
+  Unique ID for a group in the Databricks account.
+:param operations: List[:class:`Patch`] (optional)
+:param schemas: List[:class:`PatchSchema`] (optional)
+  The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
+
+
+
 
     .. py:method:: update(id: str [, display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], members: Optional[List[ComplexValue]], meta: Optional[ResourceMeta], roles: Optional[List[ComplexValue]], schemas: Optional[List[GroupSchema]]])
 
         Replace a group.
-        
-        Updates the details of a group by replacing the entire group entity.
-        
-        :param id: str
-          Databricks group ID
-        :param display_name: str (optional)
-          String that represents a human-readable group name
-        :param entitlements: List[:class:`ComplexValue`] (optional)
-          Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
-          values.
-          
-          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-        :param external_id: str (optional)
-        :param groups: List[:class:`ComplexValue`] (optional)
-        :param members: List[:class:`ComplexValue`] (optional)
-        :param meta: :class:`ResourceMeta` (optional)
-          Container for the group identifier. Workspace local versus account.
-        :param roles: List[:class:`ComplexValue`] (optional)
-          Corresponds to AWS instance profile/arn role.
-        :param schemas: List[:class:`GroupSchema`] (optional)
-          The schema of the group.
-        
-        
-        
\ No newline at end of file
+
+Updates the details of a group by replacing the entire group entity.
+
+:param id: str
+  Databricks group ID
+:param display_name: str (optional)
+  String that represents a human-readable group name
+:param entitlements: List[:class:`ComplexValue`] (optional)
+  Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
+  values.
+  
+  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+:param external_id: str (optional)
+:param groups: List[:class:`ComplexValue`] (optional)
+:param members: List[:class:`ComplexValue`] (optional)
+:param meta: :class:`ResourceMeta` (optional)
+  Container for the group identifier. Workspace local versus account.
+:param roles: List[:class:`ComplexValue`] (optional)
+  Corresponds to AWS instance profile/arn role.
+:param schemas: List[:class:`GroupSchema`] (optional)
+  The schema of the group.
+
+
diff --git a/docs/account/iam/service_principals.rst b/docs/account/iam/service_principals.rst
index 0631386a1..0f2f5b156 100644
--- a/docs/account/iam/service_principals.rst
+++ b/docs/account/iam/service_principals.rst
@@ -5,10 +5,10 @@
 .. py:class:: AccountServicePrincipalsAPI
 
     Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
-    Databricks recommends creating service principals to run production jobs or modify production data. If all
-    processes that act on production data run with service principals, interactive users do not need any
-    write, delete, or modify privileges in production. This eliminates the risk of a user overwriting
-    production data by accident.
+Databricks recommends creating service principals to run production jobs or modify production data. If all
+processes that act on production data run with service principals, interactive users do not need any
+write, delete, or modify privileges in production. This eliminates the risk of a user overwriting
+production data by accident.
 
     .. py:method:: create( [, active: Optional[bool], application_id: Optional[str], display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], id: Optional[str], roles: Optional[List[ComplexValue]], schemas: Optional[List[ServicePrincipalSchema]]]) -> ServicePrincipal
 
@@ -29,43 +29,43 @@
             a.service_principals.delete(id=sp_create.id)
 
         Create a service principal.
-        
-        Creates a new service principal in the Databricks account.
-        
-        :param active: bool (optional)
-          If this user is active
-        :param application_id: str (optional)
-          UUID relating to the service principal
-        :param display_name: str (optional)
-          String that represents a concatenation of given and family names.
-        :param entitlements: List[:class:`ComplexValue`] (optional)
-          Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
-          supported values.
-          
-          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-        :param external_id: str (optional)
-        :param groups: List[:class:`ComplexValue`] (optional)
-        :param id: str (optional)
-          Databricks service principal ID.
-        :param roles: List[:class:`ComplexValue`] (optional)
-          Corresponds to AWS instance profile/arn role.
-        :param schemas: List[:class:`ServicePrincipalSchema`] (optional)
-          The schema of the List response.
-        
-        :returns: :class:`ServicePrincipal`
-        
+
+Creates a new service principal in the Databricks account.
+
+:param active: bool (optional)
+  If this user is active
+:param application_id: str (optional)
+  UUID relating to the service principal
+:param display_name: str (optional)
+  String that represents a concatenation of given and family names.
+:param entitlements: List[:class:`ComplexValue`] (optional)
+  Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
+  supported values.
+  
+  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+:param external_id: str (optional)
+:param groups: List[:class:`ComplexValue`] (optional)
+:param id: str (optional)
+  Databricks service principal ID.
+:param roles: List[:class:`ComplexValue`] (optional)
+  Corresponds to AWS instance profile/arn role.
+:param schemas: List[:class:`ServicePrincipalSchema`] (optional)
+  The schema of the List response.
+
+:returns: :class:`ServicePrincipal`
+
 
     .. py:method:: delete(id: str)
 
         Delete a service principal.
-        
-        Delete a single service principal in the Databricks account.
-        
-        :param id: str
-          Unique ID for a service principal in the Databricks account.
-        
-        
-        
+
+Delete a single service principal in the Databricks account.
+
+:param id: str
+  Unique ID for a service principal in the Databricks account.
+
+
+
 
     .. py:method:: get(id: str) -> ServicePrincipal
 
@@ -88,14 +88,14 @@
             a.service_principals.delete(id=sp_create.id)
 
         Get service principal details.
-        
-        Gets the details for a single service principal define in the Databricks account.
-        
-        :param id: str
-          Unique ID for a service principal in the Databricks account.
-        
-        :returns: :class:`ServicePrincipal`
-        
+
+Gets the details for a single service principal define in the Databricks account.
+
+:param id: str
+  Unique ID for a service principal in the Databricks account.
+
+:returns: :class:`ServicePrincipal`
+
 
     .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[ServicePrincipal]
 
@@ -120,31 +120,31 @@
             a.service_principals.delete(id=sp_create.id)
 
         List service principals.
-        
-        Gets the set of service principals associated with a Databricks account.
-        
-        :param attributes: str (optional)
-          Comma-separated list of attributes to return in response.
-        :param count: int (optional)
-          Desired number of results per page. Default is 10000.
-        :param excluded_attributes: str (optional)
-          Comma-separated list of attributes to exclude in response.
-        :param filter: str (optional)
-          Query by which the results have to be filtered. Supported operators are equals(`eq`),
-          contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
-          formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
-          only support simple expressions.
-          
-          [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
-        :param sort_by: str (optional)
-          Attribute to sort the results.
-        :param sort_order: :class:`ListSortOrder` (optional)
-          The order to sort the results.
-        :param start_index: int (optional)
-          Specifies the index of the first result. First item is number 1.
-        
-        :returns: Iterator over :class:`ServicePrincipal`
-        
+
+Gets the set of service principals associated with a Databricks account.
+
+:param attributes: str (optional)
+  Comma-separated list of attributes to return in response.
+:param count: int (optional)
+  Desired number of results per page. Default is 10000.
+:param excluded_attributes: str (optional)
+  Comma-separated list of attributes to exclude in response.
+:param filter: str (optional)
+  Query by which the results have to be filtered. Supported operators are equals(`eq`),
+  contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
+  formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
+  only support simple expressions.
+  
+  [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
+:param sort_by: str (optional)
+  Attribute to sort the results.
+:param sort_order: :class:`ListSortOrder` (optional)
+  The order to sort the results.
+:param start_index: int (optional)
+  Specifies the index of the first result. First item is number 1.
+
+:returns: Iterator over :class:`ServicePrincipal`
+
 
     .. py:method:: patch(id: str [, operations: Optional[List[Patch]], schemas: Optional[List[PatchSchema]]])
 
@@ -172,17 +172,17 @@
             a.service_principals.delete(id=sp_create.id)
 
         Update service principal details.
-        
-        Partially updates the details of a single service principal in the Databricks account.
-        
-        :param id: str
-          Unique ID for a service principal in the Databricks account.
-        :param operations: List[:class:`Patch`] (optional)
-        :param schemas: List[:class:`PatchSchema`] (optional)
-          The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
-        
-        
-        
+
+Partially updates the details of a single service principal in the Databricks account.
+
+:param id: str
+  Unique ID for a service principal in the Databricks account.
+:param operations: List[:class:`Patch`] (optional)
+:param schemas: List[:class:`PatchSchema`] (optional)
+  The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
+
+
+
 
     .. py:method:: update(id: str [, active: Optional[bool], application_id: Optional[str], display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], roles: Optional[List[ComplexValue]], schemas: Optional[List[ServicePrincipalSchema]]])
 
@@ -207,30 +207,29 @@
             a.service_principals.delete(id=sp_create.id)
 
         Replace service principal.
-        
-        Updates the details of a single service principal.
-        
-        This action replaces the existing service principal with the same name.
-        
-        :param id: str
-          Databricks service principal ID.
-        :param active: bool (optional)
-          If this user is active
-        :param application_id: str (optional)
-          UUID relating to the service principal
-        :param display_name: str (optional)
-          String that represents a concatenation of given and family names.
-        :param entitlements: List[:class:`ComplexValue`] (optional)
-          Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
-          supported values.
-          
-          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-        :param external_id: str (optional)
-        :param groups: List[:class:`ComplexValue`] (optional)
-        :param roles: List[:class:`ComplexValue`] (optional)
-          Corresponds to AWS instance profile/arn role.
-        :param schemas: List[:class:`ServicePrincipalSchema`] (optional)
-          The schema of the List response.
-        
-        
-        
\ No newline at end of file
+
+Updates the details of a single service principal.
+
+This action replaces the existing service principal with the same name.
+
+:param id: str
+  Databricks service principal ID.
+:param active: bool (optional)
+  If this user is active
+:param application_id: str (optional)
+  UUID relating to the service principal
+:param display_name: str (optional)
+  String that represents a concatenation of given and family names.
+:param entitlements: List[:class:`ComplexValue`] (optional)
+  Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
+  supported values.
+  
+  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+:param external_id: str (optional)
+:param groups: List[:class:`ComplexValue`] (optional)
+:param roles: List[:class:`ComplexValue`] (optional)
+  Corresponds to AWS instance profile/arn role.
+:param schemas: List[:class:`ServicePrincipalSchema`] (optional)
+  The schema of the List response.
+
+
diff --git a/docs/account/iam/users.rst b/docs/account/iam/users.rst
index 4b8b5bb08..77c96d67c 100644
--- a/docs/account/iam/users.rst
+++ b/docs/account/iam/users.rst
@@ -5,14 +5,14 @@
 .. py:class:: AccountUsersAPI
 
     User identities recognized by Databricks and represented by email addresses.
-    
-    Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity
-    provider to your Databricks account. SCIM streamlines onboarding a new employee or team by using your
-    identity provider to create users and groups in Databricks account and give them the proper level of
-    access. When a user leaves your organization or no longer needs access to Databricks account, admins can
-    terminate the user in your identity provider and that user’s account will also be removed from
-    Databricks account. This ensures a consistent offboarding process and prevents unauthorized users from
-    accessing sensitive data.
+
+Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity
+provider to your Databricks account. SCIM streamlines onboarding a new employee or team by using your
+identity provider to create users and groups in Databricks account and give them the proper level of
+access. When a user leaves your organization or no longer needs access to Databricks account, admins can
+terminate the user in your identity provider and that user’s account will also be removed from
+Databricks account. This ensures a consistent offboarding process and prevents unauthorized users from
+accessing sensitive data.
 
     .. py:method:: create( [, active: Optional[bool], display_name: Optional[str], emails: Optional[List[ComplexValue]], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], id: Optional[str], name: Optional[Name], roles: Optional[List[ComplexValue]], schemas: Optional[List[UserSchema]], user_name: Optional[str]]) -> User
 
@@ -33,40 +33,40 @@
             a.users.delete(id=user.id)
 
         Create a new user.
-        
-        Creates a new user in the Databricks account. This new user will also be added to the Databricks
-        account.
-        
-        :param active: bool (optional)
-          If this user is active
-        :param display_name: str (optional)
-          String that represents a concatenation of given and family names. For example `John Smith`. This
-          field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
-          Account SCIM APIs to update `displayName`.
-          
-          [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
-        :param emails: List[:class:`ComplexValue`] (optional)
-          All the emails associated with the Databricks user.
-        :param entitlements: List[:class:`ComplexValue`] (optional)
-          Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
-          
-          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-        :param external_id: str (optional)
-          External ID is not currently supported. It is reserved for future use.
-        :param groups: List[:class:`ComplexValue`] (optional)
-        :param id: str (optional)
-          Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
-          be ignored.
-        :param name: :class:`Name` (optional)
-        :param roles: List[:class:`ComplexValue`] (optional)
-          Corresponds to AWS instance profile/arn role.
-        :param schemas: List[:class:`UserSchema`] (optional)
-          The schema of the user.
-        :param user_name: str (optional)
-          Email address of the Databricks user.
-        
-        :returns: :class:`User`
-        
+
+Creates a new user in the Databricks account. This new user will also be added to the Databricks
+account.
+
+:param active: bool (optional)
+  If this user is active
+:param display_name: str (optional)
+  String that represents a concatenation of given and family names. For example `John Smith`. This
+  field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
+  Account SCIM APIs to update `displayName`.
+  
+  [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
+:param emails: List[:class:`ComplexValue`] (optional)
+  All the emails associated with the Databricks user.
+:param entitlements: List[:class:`ComplexValue`] (optional)
+  Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
+  
+  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+:param external_id: str (optional)
+  External ID is not currently supported. It is reserved for future use.
+:param groups: List[:class:`ComplexValue`] (optional)
+:param id: str (optional)
+  Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
+  be ignored.
+:param name: :class:`Name` (optional)
+:param roles: List[:class:`ComplexValue`] (optional)
+  Corresponds to AWS instance profile/arn role.
+:param schemas: List[:class:`UserSchema`] (optional)
+  The schema of the user.
+:param user_name: str (optional)
+  Email address of the Databricks user.
+
+:returns: :class:`User`
+
 
     .. py:method:: delete(id: str)
 
@@ -86,15 +86,15 @@
             a.users.delete(id=user.id)
 
         Delete a user.
-        
-        Deletes a user. Deleting a user from a Databricks account also removes objects associated with the
-        user.
-        
-        :param id: str
-          Unique ID for a user in the Databricks account.
-        
-        
-        
+
+Deletes a user. Deleting a user from a Databricks account also removes objects associated with the
+user.
+
+:param id: str
+  Unique ID for a user in the Databricks account.
+
+
+
 
     .. py:method:: get(id: str [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[GetSortOrder], start_index: Optional[int]]) -> User
 
@@ -117,64 +117,64 @@
             a.users.delete(id=user.id)
 
         Get user details.
-        
-        Gets information for a specific user in Databricks account.
-        
-        :param id: str
-          Unique ID for a user in the Databricks account.
-        :param attributes: str (optional)
-          Comma-separated list of attributes to return in response.
-        :param count: int (optional)
-          Desired number of results per page. Default is 10000.
-        :param excluded_attributes: str (optional)
-          Comma-separated list of attributes to exclude in response.
-        :param filter: str (optional)
-          Query by which the results have to be filtered. Supported operators are equals(`eq`),
-          contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
-          formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
-          only support simple expressions.
-          
-          [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
-        :param sort_by: str (optional)
-          Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
-          `name.givenName`, and `emails`.
-        :param sort_order: :class:`GetSortOrder` (optional)
-          The order to sort the results.
-        :param start_index: int (optional)
-          Specifies the index of the first result. First item is number 1.
-        
-        :returns: :class:`User`
-        
+
+Gets information for a specific user in Databricks account.
+
+:param id: str
+  Unique ID for a user in the Databricks account.
+:param attributes: str (optional)
+  Comma-separated list of attributes to return in response.
+:param count: int (optional)
+  Desired number of results per page. Default is 10000.
+:param excluded_attributes: str (optional)
+  Comma-separated list of attributes to exclude in response.
+:param filter: str (optional)
+  Query by which the results have to be filtered. Supported operators are equals(`eq`),
+  contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
+  formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
+  only support simple expressions.
+  
+  [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
+:param sort_by: str (optional)
+  Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
+  `name.givenName`, and `emails`.
+:param sort_order: :class:`GetSortOrder` (optional)
+  The order to sort the results.
+:param start_index: int (optional)
+  Specifies the index of the first result. First item is number 1.
+
+:returns: :class:`User`
+
 
     .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[User]
 
         List users.
-        
-        Gets details for all the users associated with a Databricks account.
-        
-        :param attributes: str (optional)
-          Comma-separated list of attributes to return in response.
-        :param count: int (optional)
-          Desired number of results per page. Default is 10000.
-        :param excluded_attributes: str (optional)
-          Comma-separated list of attributes to exclude in response.
-        :param filter: str (optional)
-          Query by which the results have to be filtered. Supported operators are equals(`eq`),
-          contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
-          formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
-          only support simple expressions.
-          
-          [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
-        :param sort_by: str (optional)
-          Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
-          `name.givenName`, and `emails`.
-        :param sort_order: :class:`ListSortOrder` (optional)
-          The order to sort the results.
-        :param start_index: int (optional)
-          Specifies the index of the first result. First item is number 1.
-        
-        :returns: Iterator over :class:`User`
-        
+
+Gets details for all the users associated with a Databricks account.
+
+:param attributes: str (optional)
+  Comma-separated list of attributes to return in response.
+:param count: int (optional)
+  Desired number of results per page. Default is 10000.
+:param excluded_attributes: str (optional)
+  Comma-separated list of attributes to exclude in response.
+:param filter: str (optional)
+  Query by which the results have to be filtered. Supported operators are equals(`eq`),
+  contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
+  formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
+  only support simple expressions.
+  
+  [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
+:param sort_by: str (optional)
+  Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
+  `name.givenName`, and `emails`.
+:param sort_order: :class:`ListSortOrder` (optional)
+  The order to sort the results.
+:param start_index: int (optional)
+  Specifies the index of the first result. First item is number 1.
+
+:returns: Iterator over :class:`User`
+
 
     .. py:method:: patch(id: str [, operations: Optional[List[Patch]], schemas: Optional[List[PatchSchema]]])
 
@@ -203,51 +203,50 @@
             a.users.delete(id=user.id)
 
         Update user details.
-        
-        Partially updates a user resource by applying the supplied operations on specific user attributes.
-        
-        :param id: str
-          Unique ID for a user in the Databricks account.
-        :param operations: List[:class:`Patch`] (optional)
-        :param schemas: List[:class:`PatchSchema`] (optional)
-          The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
-        
-        
-        
+
+Partially updates a user resource by applying the supplied operations on specific user attributes.
+
+:param id: str
+  Unique ID for a user in the Databricks account.
+:param operations: List[:class:`Patch`] (optional)
+:param schemas: List[:class:`PatchSchema`] (optional)
+  The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
+
+
+
 
     .. py:method:: update(id: str [, active: Optional[bool], display_name: Optional[str], emails: Optional[List[ComplexValue]], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], name: Optional[Name], roles: Optional[List[ComplexValue]], schemas: Optional[List[UserSchema]], user_name: Optional[str]])
 
         Replace a user.
-        
-        Replaces a user's information with the data supplied in request.
-        
-        :param id: str
-          Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
-          be ignored.
-        :param active: bool (optional)
-          If this user is active
-        :param display_name: str (optional)
-          String that represents a concatenation of given and family names. For example `John Smith`. This
-          field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
-          Account SCIM APIs to update `displayName`.
-          
-          [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
-        :param emails: List[:class:`ComplexValue`] (optional)
-          All the emails associated with the Databricks user.
-        :param entitlements: List[:class:`ComplexValue`] (optional)
-          Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
-          
-          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-        :param external_id: str (optional)
-          External ID is not currently supported. It is reserved for future use.
-        :param groups: List[:class:`ComplexValue`] (optional)
-        :param name: :class:`Name` (optional)
-        :param roles: List[:class:`ComplexValue`] (optional)
-          Corresponds to AWS instance profile/arn role.
-        :param schemas: List[:class:`UserSchema`] (optional)
-          The schema of the user.
-        :param user_name: str (optional)
-          Email address of the Databricks user.
-        
-        
-        
\ No newline at end of file
+
+Replaces a user's information with the data supplied in request.
+
+:param id: str
+  Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
+  be ignored.
+:param active: bool (optional)
+  If this user is active
+:param display_name: str (optional)
+  String that represents a concatenation of given and family names. For example `John Smith`. This
+  field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
+  Account SCIM APIs to update `displayName`.
+  
+  [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
+:param emails: List[:class:`ComplexValue`] (optional)
+  All the emails associated with the Databricks user.
+:param entitlements: List[:class:`ComplexValue`] (optional)
+  Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
+  
+  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+:param external_id: str (optional)
+  External ID is not currently supported. It is reserved for future use.
+:param groups: List[:class:`ComplexValue`] (optional)
+:param name: :class:`Name` (optional)
+:param roles: List[:class:`ComplexValue`] (optional)
+  Corresponds to AWS instance profile/arn role.
+:param schemas: List[:class:`UserSchema`] (optional)
+  The schema of the user.
+:param user_name: str (optional)
+  Email address of the Databricks user.
+
+
diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst
index 697f0a5da..a6e912d93 100644
--- a/docs/account/iam/workspace_assignment.rst
+++ b/docs/account/iam/workspace_assignment.rst
@@ -5,34 +5,34 @@
 .. py:class:: WorkspaceAssignmentAPI
 
     The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your
-    account.
+account.
 
     .. py:method:: delete(workspace_id: int, principal_id: int)
 
         Delete permissions assignment.
-        
-        Deletes the workspace permissions assignment in a given account and workspace for the specified
-        principal.
-        
-        :param workspace_id: int
-          The workspace ID for the account.
-        :param principal_id: int
-          The ID of the user, service principal, or group.
-        
-        
-        
+
+Deletes the workspace permissions assignment in a given account and workspace for the specified
+principal.
+
+:param workspace_id: int
+  The workspace ID for the account.
+:param principal_id: int
+  The ID of the user, service principal, or group.
+
+
+
 
     .. py:method:: get(workspace_id: int) -> WorkspacePermissions
 
         List workspace permissions.
-        
-        Get an array of workspace permissions for the specified account and workspace.
-        
-        :param workspace_id: int
-          The workspace ID.
-        
-        :returns: :class:`WorkspacePermissions`
-        
+
+Get an array of workspace permissions for the specified account and workspace.
+
+:param workspace_id: int
+  The workspace ID.
+
+:returns: :class:`WorkspacePermissions`
+
 
     .. py:method:: list(workspace_id: int) -> Iterator[PermissionAssignment]
 
@@ -52,14 +52,14 @@
             all = a.workspace_assignment.list(list=workspace_id)
 
         Get permission assignments.
-        
-        Get the permission assignments for the specified Databricks account and Databricks workspace.
-        
-        :param workspace_id: int
-          The workspace ID for the account.
-        
-        :returns: Iterator over :class:`PermissionAssignment`
-        
+
+Get the permission assignments for the specified Databricks account and Databricks workspace.
+
+:param workspace_id: int
+  The workspace ID for the account.
+
+:returns: Iterator over :class:`PermissionAssignment`
+
 
     .. py:method:: update(workspace_id: int, principal_id: int [, permissions: Optional[List[WorkspacePermission]]]) -> PermissionAssignment
 
@@ -87,20 +87,19 @@
                                               permissions=[iam.WorkspacePermission.USER])
 
         Create or update permissions assignment.
-        
-        Creates or updates the workspace permissions assignment in a given account and workspace for the
-        specified principal.
-        
-        :param workspace_id: int
-          The workspace ID.
-        :param principal_id: int
-          The ID of the user, service principal, or group.
-        :param permissions: List[:class:`WorkspacePermission`] (optional)
-          Array of permissions assignments to update on the workspace. Valid values are "USER" and "ADMIN"
-          (case-sensitive). If both "USER" and "ADMIN" are provided, "ADMIN" takes precedence. Other values
-          will be ignored. Note that excluding this field, or providing unsupported values, will have the same
-          effect as providing an empty list, which will result in the deletion of all permissions for the
-          principal.
-        
-        :returns: :class:`PermissionAssignment`
-        
\ No newline at end of file
+
+Creates or updates the workspace permissions assignment in a given account and workspace for the
+specified principal.
+
+:param workspace_id: int
+  The workspace ID.
+:param principal_id: int
+  The ID of the user, service principal, or group.
+:param permissions: List[:class:`WorkspacePermission`] (optional)
+  Array of permissions assignments to update on the workspace. Valid values are "USER" and "ADMIN"
+  (case-sensitive). If both "USER" and "ADMIN" are provided, "ADMIN" takes precedence. Other values
+  will be ignored. Note that excluding this field, or providing unsupported values, will have the same
+  effect as providing an empty list, which will result in the deletion of all permissions for the
+  principal.
+
+:returns: :class:`PermissionAssignment`
diff --git a/docs/account/oauth2/custom_app_integration.rst b/docs/account/oauth2/custom_app_integration.rst
index 7043a343b..6d85306f9 100644
--- a/docs/account/oauth2/custom_app_integration.rst
+++ b/docs/account/oauth2/custom_app_integration.rst
@@ -5,89 +5,88 @@
 .. py:class:: CustomAppIntegrationAPI
 
     These APIs enable administrators to manage custom OAuth app integrations, which is required for
-    adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
+adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
 
     .. py:method:: create( [, confidential: Optional[bool], name: Optional[str], redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy], user_authorized_scopes: Optional[List[str]]]) -> CreateCustomAppIntegrationOutput
 
         Create Custom OAuth App Integration.
-        
-        Create Custom OAuth App Integration.
-        
-        You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get.
-        
-        :param confidential: bool (optional)
-          This field indicates whether an OAuth client secret is required to authenticate this client.
-        :param name: str (optional)
-          Name of the custom OAuth app
-        :param redirect_urls: List[str] (optional)
-          List of OAuth redirect urls
-        :param scopes: List[str] (optional)
-          OAuth scopes granted to the application. Supported scopes: all-apis, sql, offline_access, openid,
-          profile, email.
-        :param token_access_policy: :class:`TokenAccessPolicy` (optional)
-          Token access policy
-        :param user_authorized_scopes: List[str] (optional)
-          Scopes that will need to be consented by end user to mint the access token. If the user does not
-          authorize the access token will not be minted. Must be a subset of scopes.
-        
-        :returns: :class:`CreateCustomAppIntegrationOutput`
-        
+
+Create Custom OAuth App Integration.
+
+You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get.
+
+:param confidential: bool (optional)
+  This field indicates whether an OAuth client secret is required to authenticate this client.
+:param name: str (optional)
+  Name of the custom OAuth app
+:param redirect_urls: List[str] (optional)
+  List of OAuth redirect urls
+:param scopes: List[str] (optional)
+  OAuth scopes granted to the application. Supported scopes: all-apis, sql, offline_access, openid,
+  profile, email.
+:param token_access_policy: :class:`TokenAccessPolicy` (optional)
+  Token access policy
+:param user_authorized_scopes: List[str] (optional)
+  Scopes that will need to be consented by end user to mint the access token. If the user does not
+  authorize the access token will not be minted. Must be a subset of scopes.
+
+:returns: :class:`CreateCustomAppIntegrationOutput`
+
 
     .. py:method:: delete(integration_id: str)
 
         Delete Custom OAuth App Integration.
-        
-        Delete an existing Custom OAuth App Integration. You can retrieve the custom OAuth app integration via
-        :method:CustomAppIntegration/get.
-        
-        :param integration_id: str
-        
-        
-        
+
+Delete an existing Custom OAuth App Integration. You can retrieve the custom OAuth app integration via
+:method:CustomAppIntegration/get.
+
+:param integration_id: str
+
+
+
 
     .. py:method:: get(integration_id: str) -> GetCustomAppIntegrationOutput
 
         Get OAuth Custom App Integration.
-        
-        Gets the Custom OAuth App Integration for the given integration id.
-        
-        :param integration_id: str
-          The OAuth app integration ID.
-        
-        :returns: :class:`GetCustomAppIntegrationOutput`
-        
+
+Gets the Custom OAuth App Integration for the given integration id.
+
+:param integration_id: str
+  The OAuth app integration ID.
+
+:returns: :class:`GetCustomAppIntegrationOutput`
+
 
     .. py:method:: list( [, include_creator_username: Optional[bool], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[GetCustomAppIntegrationOutput]
 
         Get custom oauth app integrations.
-        
-        Get the list of custom OAuth app integrations for the specified Databricks account
-        
-        :param include_creator_username: bool (optional)
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`GetCustomAppIntegrationOutput`
-        
+
+Get the list of custom OAuth app integrations for the specified Databricks account
+
+:param include_creator_username: bool (optional)
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`GetCustomAppIntegrationOutput`
+
 
     .. py:method:: update(integration_id: str [, redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy], user_authorized_scopes: Optional[List[str]]])
 
         Updates Custom OAuth App Integration.
-        
-        Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration
-        via :method:CustomAppIntegration/get.
-        
-        :param integration_id: str
-        :param redirect_urls: List[str] (optional)
-          List of OAuth redirect urls to be updated in the custom OAuth app integration
-        :param scopes: List[str] (optional)
-          List of OAuth scopes to be updated in the custom OAuth app integration, similar to redirect URIs
-          this will fully replace the existing values instead of appending
-        :param token_access_policy: :class:`TokenAccessPolicy` (optional)
-          Token access policy to be updated in the custom OAuth app integration
-        :param user_authorized_scopes: List[str] (optional)
-          Scopes that will need to be consented by end user to mint the access token. If the user does not
-          authorize the access token will not be minted. Must be a subset of scopes.
-        
-        
-        
\ No newline at end of file
+
+Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration
+via :method:CustomAppIntegration/get.
+
+:param integration_id: str
+:param redirect_urls: List[str] (optional)
+  List of OAuth redirect urls to be updated in the custom OAuth app integration
+:param scopes: List[str] (optional)
+  List of OAuth scopes to be updated in the custom OAuth app integration, similar to redirect URIs
+  this will fully replace the existing values instead of appending
+:param token_access_policy: :class:`TokenAccessPolicy` (optional)
+  Token access policy to be updated in the custom OAuth app integration
+:param user_authorized_scopes: List[str] (optional)
+  Scopes that will need to be consented by end user to mint the access token. If the user does not
+  authorize the access token will not be minted. Must be a subset of scopes.
+
+
diff --git a/docs/account/oauth2/federation_policy.rst b/docs/account/oauth2/federation_policy.rst
index c95bf563c..c1ea393c7 100644
--- a/docs/account/oauth2/federation_policy.rst
+++ b/docs/account/oauth2/federation_policy.rst
@@ -5,101 +5,100 @@
 .. py:class:: AccountFederationPolicyAPI
 
     These APIs manage account federation policies.
-    
-    Account federation policies allow users and service principals in your Databricks account to securely
-    access Databricks APIs using tokens from your trusted identity providers (IdPs).
-    
-    With token federation, your users and service principals can exchange tokens from your IdP for Databricks
-    OAuth tokens, which can be used to access Databricks APIs. Token federation eliminates the need to manage
-    Databricks secrets, and allows you to centralize management of token issuance policies in your IdP.
-    Databricks token federation is typically used in combination with [SCIM], so users in your IdP are
-    synchronized into your Databricks account.
-    
-    Token federation is configured in your Databricks account using an account federation policy. An account
-    federation policy specifies: * which IdP, or issuer, your Databricks account should accept tokens from *
-    how to determine which Databricks user, or subject, a token is issued for
-    
-    To configure a federation policy, you provide the following: * The required token __issuer__, as specified
-    in the “iss” claim of your tokens. The issuer is an https URL that identifies your IdP. * The allowed
-    token __audiences__, as specified in the “aud” claim of your tokens. This identifier is intended to
-    represent the recipient of the token. As long as the audience in the token matches at least one audience
-    in the policy, the token is considered a match. If unspecified, the default value is your Databricks
-    account id. * The __subject claim__, which indicates which token claim contains the Databricks username of
-    the user the token was issued for. If unspecified, the default value is “sub”. * Optionally, the
-    public keys used to validate the signature of your tokens, in JWKS format. If unspecified (recommended),
-    Databricks automatically fetches the public keys from your issuer’s well known endpoint. Databricks
-    strongly recommends relying on your issuer’s well known endpoint for discovering public keys.
-    
-    An example federation policy is: ``` issuer: "https://idp.mycompany.com/oidc" audiences: ["databricks"]
-    subject_claim: "sub" ```
-    
-    An example JWT token body that matches this policy and could be used to authenticate to Databricks as user
-    `username@mycompany.com` is: ``` { "iss": "https://idp.mycompany.com/oidc", "aud": "databricks", "sub":
-    "username@mycompany.com" } ```
-    
-    You may also need to configure your IdP to generate tokens for your users to exchange with Databricks, if
-    your users do not already have the ability to generate tokens that are compatible with your federation
-    policy.
-    
-    You do not need to configure an OAuth application in Databricks to use token federation.
-    
-    [SCIM]: https://docs.databricks.com/admin/users-groups/scim/index.html
+
+Account federation policies allow users and service principals in your Databricks account to securely
+access Databricks APIs using tokens from your trusted identity providers (IdPs).
+
+With token federation, your users and service principals can exchange tokens from your IdP for Databricks
+OAuth tokens, which can be used to access Databricks APIs. Token federation eliminates the need to manage
+Databricks secrets, and allows you to centralize management of token issuance policies in your IdP.
+Databricks token federation is typically used in combination with [SCIM], so users in your IdP are
+synchronized into your Databricks account.
+
+Token federation is configured in your Databricks account using an account federation policy. An account
+federation policy specifies: * which IdP, or issuer, your Databricks account should accept tokens from *
+how to determine which Databricks user, or subject, a token is issued for
+
+To configure a federation policy, you provide the following: * The required token __issuer__, as specified
+in the “iss” claim of your tokens. The issuer is an https URL that identifies your IdP. * The allowed
+token __audiences__, as specified in the “aud” claim of your tokens. This identifier is intended to
+represent the recipient of the token. As long as the audience in the token matches at least one audience
+in the policy, the token is considered a match. If unspecified, the default value is your Databricks
+account id. * The __subject claim__, which indicates which token claim contains the Databricks username of
+the user the token was issued for. If unspecified, the default value is “sub”. * Optionally, the
+public keys used to validate the signature of your tokens, in JWKS format. If unspecified (recommended),
+Databricks automatically fetches the public keys from your issuer’s well known endpoint. Databricks
+strongly recommends relying on your issuer’s well known endpoint for discovering public keys.
+
+An example federation policy is: ``` issuer: "https://idp.mycompany.com/oidc" audiences: ["databricks"]
+subject_claim: "sub" ```
+
+An example JWT token body that matches this policy and could be used to authenticate to Databricks as user
+`username@mycompany.com` is: ``` { "iss": "https://idp.mycompany.com/oidc", "aud": "databricks", "sub":
+"username@mycompany.com" } ```
+
+You may also need to configure your IdP to generate tokens for your users to exchange with Databricks, if
+your users do not already have the ability to generate tokens that are compatible with your federation
+policy.
+
+You do not need to configure an OAuth application in Databricks to use token federation.
+
+[SCIM]: https://docs.databricks.com/admin/users-groups/scim/index.html
 
     .. py:method:: create( [, policy: Optional[FederationPolicy], policy_id: Optional[str]]) -> FederationPolicy
 
         Create account federation policy.
-        
-        :param policy: :class:`FederationPolicy` (optional)
-        :param policy_id: str (optional)
-          The identifier for the federation policy. The identifier must contain only lowercase alphanumeric
-          characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks.
-        
-        :returns: :class:`FederationPolicy`
-        
+
+:param policy: :class:`FederationPolicy` (optional)
+:param policy_id: str (optional)
+  The identifier for the federation policy. The identifier must contain only lowercase alphanumeric
+  characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks.
+
+:returns: :class:`FederationPolicy`
+
 
     .. py:method:: delete(policy_id: str)
 
         Delete account federation policy.
-        
-        :param policy_id: str
-          The identifier for the federation policy.
-        
-        
-        
+
+:param policy_id: str
+  The identifier for the federation policy.
+
+
+
 
     .. py:method:: get(policy_id: str) -> FederationPolicy
 
         Get account federation policy.
-        
-        :param policy_id: str
-          The identifier for the federation policy.
-        
-        :returns: :class:`FederationPolicy`
-        
+
+:param policy_id: str
+  The identifier for the federation policy.
+
+:returns: :class:`FederationPolicy`
+
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FederationPolicy]
 
         List account federation policies.
-        
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`FederationPolicy`
-        
+
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`FederationPolicy`
+
 
     .. py:method:: update(policy_id: str [, policy: Optional[FederationPolicy], update_mask: Optional[str]]) -> FederationPolicy
 
         Update account federation policy.
-        
-        :param policy_id: str
-          The identifier for the federation policy.
-        :param policy: :class:`FederationPolicy` (optional)
-        :param update_mask: str (optional)
-          The field mask specifies which fields of the policy to update. To specify multiple fields in the
-          field mask, use comma as the separator (no space). The special value '*' indicates that all fields
-          should be updated (full replacement). If unspecified, all fields that are set in the policy provided
-          in the update request will overwrite the corresponding fields in the existing policy. Example value:
-          'description,oidc_policy.audiences'.
-        
-        :returns: :class:`FederationPolicy`
-        
\ No newline at end of file
+
+:param policy_id: str
+  The identifier for the federation policy.
+:param policy: :class:`FederationPolicy` (optional)
+:param update_mask: str (optional)
+  The field mask specifies which fields of the policy to update. To specify multiple fields in the
+  field mask, use comma as the separator (no space). The special value '*' indicates that all fields
+  should be updated (full replacement). If unspecified, all fields that are set in the policy provided
+  in the update request will overwrite the corresponding fields in the existing policy. Example value:
+  'description,oidc_policy.audiences'.
+
+:returns: :class:`FederationPolicy`
diff --git a/docs/account/oauth2/o_auth_published_apps.rst b/docs/account/oauth2/o_auth_published_apps.rst
index 18c07c326..e0dc2e303 100644
--- a/docs/account/oauth2/o_auth_published_apps.rst
+++ b/docs/account/oauth2/o_auth_published_apps.rst
@@ -5,19 +5,18 @@
 .. py:class:: OAuthPublishedAppsAPI
 
     These APIs enable administrators to view all the available published OAuth applications in Databricks.
-    Administrators can add the published OAuth applications to their account through the OAuth Published App
-    Integration APIs.
+Administrators can add the published OAuth applications to their account through the OAuth Published App
+Integration APIs.
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[PublishedAppOutput]
 
         Get all the published OAuth apps.
-        
-        Get all the available published OAuth apps in Databricks.
-        
-        :param page_size: int (optional)
-          The max number of OAuth published apps to return in one page.
-        :param page_token: str (optional)
-          A token that can be used to get the next page of results.
-        
-        :returns: Iterator over :class:`PublishedAppOutput`
-        
\ No newline at end of file
+
+Get all the available published OAuth apps in Databricks.
+
+:param page_size: int (optional)
+  The max number of OAuth published apps to return in one page.
+:param page_token: str (optional)
+  A token that can be used to get the next page of results.
+
+:returns: Iterator over :class:`PublishedAppOutput`
diff --git a/docs/account/oauth2/published_app_integration.rst b/docs/account/oauth2/published_app_integration.rst
index f59f2c4aa..11135e341 100644
--- a/docs/account/oauth2/published_app_integration.rst
+++ b/docs/account/oauth2/published_app_integration.rst
@@ -5,69 +5,68 @@
 .. py:class:: PublishedAppIntegrationAPI
 
     These APIs enable administrators to manage published OAuth app integrations, which is required for
-    adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.
+adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.
 
     .. py:method:: create( [, app_id: Optional[str], token_access_policy: Optional[TokenAccessPolicy]]) -> CreatePublishedAppIntegrationOutput
 
         Create Published OAuth App Integration.
-        
-        Create Published OAuth App Integration.
-        
-        You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get.
-        
-        :param app_id: str (optional)
-          App id of the OAuth published app integration. For example power-bi, tableau-deskop
-        :param token_access_policy: :class:`TokenAccessPolicy` (optional)
-          Token access policy
-        
-        :returns: :class:`CreatePublishedAppIntegrationOutput`
-        
+
+Create Published OAuth App Integration.
+
+You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get.
+
+:param app_id: str (optional)
+  App id of the OAuth published app integration. For example power-bi, tableau-deskop
+:param token_access_policy: :class:`TokenAccessPolicy` (optional)
+  Token access policy
+
+:returns: :class:`CreatePublishedAppIntegrationOutput`
+
 
     .. py:method:: delete(integration_id: str)
 
         Delete Published OAuth App Integration.
-        
-        Delete an existing Published OAuth App Integration. You can retrieve the published OAuth app
-        integration via :method:PublishedAppIntegration/get.
-        
-        :param integration_id: str
-        
-        
-        
+
+Delete an existing Published OAuth App Integration. You can retrieve the published OAuth app
+integration via :method:PublishedAppIntegration/get.
+
+:param integration_id: str
+
+
+
 
     .. py:method:: get(integration_id: str) -> GetPublishedAppIntegrationOutput
 
         Get OAuth Published App Integration.
-        
-        Gets the Published OAuth App Integration for the given integration id.
-        
-        :param integration_id: str
-        
-        :returns: :class:`GetPublishedAppIntegrationOutput`
-        
+
+Gets the Published OAuth App Integration for the given integration id.
+
+:param integration_id: str
+
+:returns: :class:`GetPublishedAppIntegrationOutput`
+
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[GetPublishedAppIntegrationOutput]
 
         Get published oauth app integrations.
-        
-        Get the list of published OAuth app integrations for the specified Databricks account
-        
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`GetPublishedAppIntegrationOutput`
-        
+
+Get the list of published OAuth app integrations for the specified Databricks account
+
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`GetPublishedAppIntegrationOutput`
+
 
     .. py:method:: update(integration_id: str [, token_access_policy: Optional[TokenAccessPolicy]])
 
         Updates Published OAuth App Integration.
-        
-        Updates an existing published OAuth App Integration. You can retrieve the published OAuth app
-        integration via :method:PublishedAppIntegration/get.
-        
-        :param integration_id: str
-        :param token_access_policy: :class:`TokenAccessPolicy` (optional)
-          Token access policy to be updated in the published OAuth app integration
-        
-        
-        
\ No newline at end of file
+
+Updates an existing published OAuth App Integration. You can retrieve the published OAuth app
+integration via :method:PublishedAppIntegration/get.
+
+:param integration_id: str
+:param token_access_policy: :class:`TokenAccessPolicy` (optional)
+  Token access policy to be updated in the published OAuth app integration
+
+
diff --git a/docs/account/oauth2/service_principal_federation_policy.rst b/docs/account/oauth2/service_principal_federation_policy.rst
index 2e0577ba4..66ed4505f 100644
--- a/docs/account/oauth2/service_principal_federation_policy.rst
+++ b/docs/account/oauth2/service_principal_federation_policy.rst
@@ -5,111 +5,110 @@
 .. py:class:: ServicePrincipalFederationPolicyAPI
 
     These APIs manage service principal federation policies.
-    
-    Service principal federation, also known as Workload Identity Federation, allows your automated workloads
-    running outside of Databricks to securely access Databricks APIs without the need for Databricks secrets.
-    With Workload Identity Federation, your application (or workload) authenticates to Databricks as a
-    Databricks service principal, using tokens provided by the workload runtime.
-    
-    Databricks strongly recommends using Workload Identity Federation to authenticate to Databricks from
-    automated workloads, over alternatives such as OAuth client secrets or Personal Access Tokens, whenever
-    possible. Workload Identity Federation is supported by many popular services, including Github Actions,
-    Azure DevOps, GitLab, Terraform Cloud, and Kubernetes clusters, among others.
-    
-    Workload identity federation is configured in your Databricks account using a service principal federation
-    policy. A service principal federation policy specifies: * which IdP, or issuer, the service principal is
-    allowed to authenticate from * which workload identity, or subject, is allowed to authenticate as the
-    Databricks service principal
-    
-    To configure a federation policy, you provide the following: * The required token __issuer__, as specified
-    in the “iss” claim of workload identity tokens. The issuer is an https URL that identifies the
-    workload identity provider. * The required token __subject__, as specified in the “sub” claim of
-    workload identity tokens. The subject uniquely identifies the workload in the workload runtime
-    environment. * The allowed token __audiences__, as specified in the “aud” claim of workload identity
-    tokens. The audience is intended to represent the recipient of the token. As long as the audience in the
-    token matches at least one audience in the policy, the token is considered a match. If unspecified, the
-    default value is your Databricks account id. * Optionally, the public keys used to validate the signature
-    of the workload identity tokens, in JWKS format. If unspecified (recommended), Databricks automatically
-    fetches the public keys from the issuer’s well known endpoint. Databricks strongly recommends relying on
-    the issuer’s well known endpoint for discovering public keys.
-    
-    An example service principal federation policy, for a Github Actions workload, is: ``` issuer:
-    "https://token.actions.githubusercontent.com" audiences: ["https://github.com/my-github-org"] subject:
-    "repo:my-github-org/my-repo:environment:prod" ```
-    
-    An example JWT token body that matches this policy and could be used to authenticate to Databricks is: ```
-    { "iss": "https://token.actions.githubusercontent.com", "aud": "https://github.com/my-github-org", "sub":
-    "repo:my-github-org/my-repo:environment:prod" } ```
-    
-    You may also need to configure the workload runtime to generate tokens for your workloads.
-    
-    You do not need to configure an OAuth application in Databricks to use token federation.
+
+Service principal federation, also known as Workload Identity Federation, allows your automated workloads
+running outside of Databricks to securely access Databricks APIs without the need for Databricks secrets.
+With Workload Identity Federation, your application (or workload) authenticates to Databricks as a
+Databricks service principal, using tokens provided by the workload runtime.
+
+Databricks strongly recommends using Workload Identity Federation to authenticate to Databricks from
+automated workloads, over alternatives such as OAuth client secrets or Personal Access Tokens, whenever
+possible. Workload Identity Federation is supported by many popular services, including Github Actions,
+Azure DevOps, GitLab, Terraform Cloud, and Kubernetes clusters, among others.
+
+Workload identity federation is configured in your Databricks account using a service principal federation
+policy. A service principal federation policy specifies: * which IdP, or issuer, the service principal is
+allowed to authenticate from * which workload identity, or subject, is allowed to authenticate as the
+Databricks service principal
+
+To configure a federation policy, you provide the following: * The required token __issuer__, as specified
+in the “iss” claim of workload identity tokens. The issuer is an https URL that identifies the
+workload identity provider. * The required token __subject__, as specified in the “sub” claim of
+workload identity tokens. The subject uniquely identifies the workload in the workload runtime
+environment. * The allowed token __audiences__, as specified in the “aud” claim of workload identity
+tokens. The audience is intended to represent the recipient of the token. As long as the audience in the
+token matches at least one audience in the policy, the token is considered a match. If unspecified, the
+default value is your Databricks account id. * Optionally, the public keys used to validate the signature
+of the workload identity tokens, in JWKS format. If unspecified (recommended), Databricks automatically
+fetches the public keys from the issuer’s well known endpoint. Databricks strongly recommends relying on
+the issuer’s well known endpoint for discovering public keys.
+
+An example service principal federation policy, for a Github Actions workload, is: ``` issuer:
+"https://token.actions.githubusercontent.com" audiences: ["https://github.com/my-github-org"] subject:
+"repo:my-github-org/my-repo:environment:prod" ```
+
+An example JWT token body that matches this policy and could be used to authenticate to Databricks is: ```
+{ "iss": "https://token.actions.githubusercontent.com", "aud": "https://github.com/my-github-org", "sub":
+"repo:my-github-org/my-repo:environment:prod" } ```
+
+You may also need to configure the workload runtime to generate tokens for your workloads.
+
+You do not need to configure an OAuth application in Databricks to use token federation.
 
     .. py:method:: create(service_principal_id: int [, policy: Optional[FederationPolicy], policy_id: Optional[str]]) -> FederationPolicy
 
         Create service principal federation policy.
-        
-        :param service_principal_id: int
-          The service principal id for the federation policy.
-        :param policy: :class:`FederationPolicy` (optional)
-        :param policy_id: str (optional)
-          The identifier for the federation policy. The identifier must contain only lowercase alphanumeric
-          characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks.
-        
-        :returns: :class:`FederationPolicy`
-        
+
+:param service_principal_id: int
+  The service principal id for the federation policy.
+:param policy: :class:`FederationPolicy` (optional)
+:param policy_id: str (optional)
+  The identifier for the federation policy. The identifier must contain only lowercase alphanumeric
+  characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks.
+
+:returns: :class:`FederationPolicy`
+
 
     .. py:method:: delete(service_principal_id: int, policy_id: str)
 
         Delete service principal federation policy.
-        
-        :param service_principal_id: int
-          The service principal id for the federation policy.
-        :param policy_id: str
-          The identifier for the federation policy.
-        
-        
-        
+
+:param service_principal_id: int
+  The service principal id for the federation policy.
+:param policy_id: str
+  The identifier for the federation policy.
+
+
+
 
     .. py:method:: get(service_principal_id: int, policy_id: str) -> FederationPolicy
 
         Get service principal federation policy.
-        
-        :param service_principal_id: int
-          The service principal id for the federation policy.
-        :param policy_id: str
-          The identifier for the federation policy.
-        
-        :returns: :class:`FederationPolicy`
-        
+
+:param service_principal_id: int
+  The service principal id for the federation policy.
+:param policy_id: str
+  The identifier for the federation policy.
+
+:returns: :class:`FederationPolicy`
+
 
     .. py:method:: list(service_principal_id: int [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FederationPolicy]
 
         List service principal federation policies.
-        
-        :param service_principal_id: int
-          The service principal id for the federation policy.
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`FederationPolicy`
-        
+
+:param service_principal_id: int
+  The service principal id for the federation policy.
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`FederationPolicy`
+
 
     .. py:method:: update(service_principal_id: int, policy_id: str [, policy: Optional[FederationPolicy], update_mask: Optional[str]]) -> FederationPolicy
 
         Update service principal federation policy.
-        
-        :param service_principal_id: int
-          The service principal id for the federation policy.
-        :param policy_id: str
-          The identifier for the federation policy.
-        :param policy: :class:`FederationPolicy` (optional)
-        :param update_mask: str (optional)
-          The field mask specifies which fields of the policy to update. To specify multiple fields in the
-          field mask, use comma as the separator (no space). The special value '*' indicates that all fields
-          should be updated (full replacement). If unspecified, all fields that are set in the policy provided
-          in the update request will overwrite the corresponding fields in the existing policy. Example value:
-          'description,oidc_policy.audiences'.
-        
-        :returns: :class:`FederationPolicy`
-        
\ No newline at end of file
+
+:param service_principal_id: int
+  The service principal id for the federation policy.
+:param policy_id: str
+  The identifier for the federation policy.
+:param policy: :class:`FederationPolicy` (optional)
+:param update_mask: str (optional)
+  The field mask specifies which fields of the policy to update. To specify multiple fields in the
+  field mask, use comma as the separator (no space). The special value '*' indicates that all fields
+  should be updated (full replacement). If unspecified, all fields that are set in the policy provided
+  in the update request will overwrite the corresponding fields in the existing policy. Example value:
+  'description,oidc_policy.audiences'.
+
+:returns: :class:`FederationPolicy`
diff --git a/docs/account/oauth2/service_principal_secrets.rst b/docs/account/oauth2/service_principal_secrets.rst
index 955d6da53..3e0bb9b74 100644
--- a/docs/account/oauth2/service_principal_secrets.rst
+++ b/docs/account/oauth2/service_principal_secrets.rst
@@ -5,59 +5,58 @@
 .. py:class:: ServicePrincipalSecretsAPI
 
     These APIs enable administrators to manage service principal secrets.
-    
-    You can use the generated secrets to obtain OAuth access tokens for a service principal, which can then be
-    used to access Databricks Accounts and Workspace APIs. For more information, see [Authentication using
-    OAuth tokens for service principals],
-    
-    In addition, the generated secrets can be used to configure the Databricks Terraform Provider to
-    authenticate with the service principal. For more information, see [Databricks Terraform Provider].
-    
-    [Authentication using OAuth tokens for service principals]: https://docs.databricks.com/dev-tools/authentication-oauth.html
-    [Databricks Terraform Provider]: https://github.com/databricks/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-service-principal
+
+You can use the generated secrets to obtain OAuth access tokens for a service principal, which can then be
+used to access Databricks Accounts and Workspace APIs. For more information, see [Authentication using
+OAuth tokens for service principals],
+
+In addition, the generated secrets can be used to configure the Databricks Terraform Provider to
+authenticate with the service principal. For more information, see [Databricks Terraform Provider].
+
+[Authentication using OAuth tokens for service principals]: https://docs.databricks.com/dev-tools/authentication-oauth.html
+[Databricks Terraform Provider]: https://github.com/databricks/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-service-principal
 
     .. py:method:: create(service_principal_id: int) -> CreateServicePrincipalSecretResponse
 
         Create service principal secret.
-        
-        Create a secret for the given service principal.
-        
-        :param service_principal_id: int
-          The service principal ID.
-        
-        :returns: :class:`CreateServicePrincipalSecretResponse`
-        
+
+Create a secret for the given service principal.
+
+:param service_principal_id: int
+  The service principal ID.
+
+:returns: :class:`CreateServicePrincipalSecretResponse`
+
 
     .. py:method:: delete(service_principal_id: int, secret_id: str)
 
         Delete service principal secret.
-        
-        Delete a secret from the given service principal.
-        
-        :param service_principal_id: int
-          The service principal ID.
-        :param secret_id: str
-          The secret ID.
-        
-        
-        
+
+Delete a secret from the given service principal.
+
+:param service_principal_id: int
+  The service principal ID.
+:param secret_id: str
+  The secret ID.
+
+
+
 
     .. py:method:: list(service_principal_id: int [, page_token: Optional[str]]) -> Iterator[SecretInfo]
 
         List service principal secrets.
-        
-        List all secrets associated with the given service principal. This operation only returns information
-        about the secrets themselves and does not include the secret values.
-        
-        :param service_principal_id: int
-          The service principal ID.
-        :param page_token: str (optional)
-          An opaque page token which was the `next_page_token` in the response of the previous request to list
-          the secrets for this service principal. Provide this token to retrieve the next page of secret
-          entries. When providing a `page_token`, all other parameters provided to the request must match the
-          previous request. To list all of the secrets for a service principal, it is necessary to continue
-          requesting pages of entries until the response contains no `next_page_token`. Note that the number
-          of entries returned must not be used to determine when the listing is complete.
-        
-        :returns: Iterator over :class:`SecretInfo`
-        
\ No newline at end of file
+
+List all secrets associated with the given service principal. This operation only returns information
+about the secrets themselves and does not include the secret values.
+
+:param service_principal_id: int
+  The service principal ID.
+:param page_token: str (optional)
+  An opaque page token which was the `next_page_token` in the response of the previous request to list
+  the secrets for this service principal. Provide this token to retrieve the next page of secret
+  entries. When providing a `page_token`, all other parameters provided to the request must match the
+  previous request. To list all of the secrets for a service principal, it is necessary to continue
+  requesting pages of entries until the response contains no `next_page_token`. Note that the number
+  of entries returned must not be used to determine when the listing is complete.
+
+:returns: Iterator over :class:`SecretInfo`
diff --git a/docs/account/provisioning/credentials.rst b/docs/account/provisioning/credentials.rst
index 5255a6a29..a411febab 100644
--- a/docs/account/provisioning/credentials.rst
+++ b/docs/account/provisioning/credentials.rst
@@ -5,9 +5,9 @@
 .. py:class:: CredentialsAPI
 
     These APIs manage credential configurations for this workspace. Databricks needs access to a cross-account
-    service IAM role in your AWS account so that Databricks can deploy clusters in the appropriate VPC for the
-    new workspace. A credential configuration encapsulates this role information, and its ID is used when
-    creating a new workspace.
+service IAM role in your AWS account so that Databricks can deploy clusters in the appropriate VPC for the
+new workspace. A credential configuration encapsulates this role information, and its ID is used when
+creating a new workspace.
 
     .. py:method:: create(credentials_name: str, aws_credentials: CreateCredentialAwsCredentials) -> Credential
 
@@ -33,39 +33,39 @@
             a.credentials.delete(credentials_id=role.credentials_id)
 
         Create credential configuration.
-        
-        Creates a Databricks credential configuration that represents cloud cross-account credentials for a
-        specified account. Databricks uses this to set up network infrastructure properly to host Databricks
-        clusters. For your AWS IAM role, you need to trust the External ID (the Databricks Account API account
-        ID) in the returned credential object, and configure the required access policy.
-        
-        Save the response's `credentials_id` field, which is the ID for your new credential configuration
-        object.
-        
-        For information about how to create a new workspace with this API, see [Create a new workspace using
-        the Account API]
-        
-        [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
-        
-        :param credentials_name: str
-          The human-readable name of the credential configuration object.
-        :param aws_credentials: :class:`CreateCredentialAwsCredentials`
-        
-        :returns: :class:`Credential`
-        
+
+Creates a Databricks credential configuration that represents cloud cross-account credentials for a
+specified account. Databricks uses this to set up network infrastructure properly to host Databricks
+clusters. For your AWS IAM role, you need to trust the External ID (the Databricks Account API account
+ID) in the returned credential object, and configure the required access policy.
+
+Save the response's `credentials_id` field, which is the ID for your new credential configuration
+object.
+
+For information about how to create a new workspace with this API, see [Create a new workspace using
+the Account API]
+
+[Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
+
+:param credentials_name: str
+  The human-readable name of the credential configuration object.
+:param aws_credentials: :class:`CreateCredentialAwsCredentials`
+
+:returns: :class:`Credential`
+
 
     .. py:method:: delete(credentials_id: str)
 
         Delete credential configuration.
-        
-        Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot
-        delete a credential that is associated with any workspace.
-        
-        :param credentials_id: str
-          Databricks Account API credential configuration ID
-        
-        
-        
+
+Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot
+delete a credential that is associated with any workspace.
+
+:param credentials_id: str
+  Databricks Account API credential configuration ID
+
+
+
 
     .. py:method:: get(credentials_id: str) -> Credential
 
@@ -93,14 +93,14 @@
             a.credentials.delete(credentials_id=role.credentials_id)
 
         Get credential configuration.
-        
-        Gets a Databricks credential configuration object for an account, both specified by ID.
-        
-        :param credentials_id: str
-          Databricks Account API credential configuration ID
-        
-        :returns: :class:`Credential`
-        
+
+Gets a Databricks credential configuration object for an account, both specified by ID.
+
+:param credentials_id: str
+  Databricks Account API credential configuration ID
+
+:returns: :class:`Credential`
+
 
     .. py:method:: list() -> Iterator[Credential]
 
@@ -116,8 +116,7 @@
             configs = a.credentials.list()
 
         Get all credential configurations.
-        
-        Gets all Databricks credential configurations associated with an account specified by ID.
-        
-        :returns: Iterator over :class:`Credential`
-        
\ No newline at end of file
+
+Gets all Databricks credential configurations associated with an account specified by ID.
+
+:returns: Iterator over :class:`Credential`
diff --git a/docs/account/provisioning/encryption_keys.rst b/docs/account/provisioning/encryption_keys.rst
index c711727c5..ad26bb033 100644
--- a/docs/account/provisioning/encryption_keys.rst
+++ b/docs/account/provisioning/encryption_keys.rst
@@ -5,18 +5,18 @@
 .. py:class:: EncryptionKeysAPI
 
     These APIs manage encryption key configurations for this workspace (optional). A key configuration
-    encapsulates the AWS KMS key information and some information about how the key configuration can be used.
-    There are two possible uses for key configurations:
-    
-    * Managed services: A key configuration can be used to encrypt a workspace's notebook and secret data in
-    the control plane, as well as Databricks SQL queries and query history. * Storage: A key configuration can
-    be used to encrypt a workspace's DBFS and EBS data in the data plane.
-    
-    In both of these cases, the key configuration's ID is used when creating a new workspace. This Preview
-    feature is available if your account is on the E2 version of the platform. Updating a running workspace
-    with workspace storage encryption requires that the workspace is on the E2 version of the platform. If you
-    have an older workspace, it might not be on the E2 version of the platform. If you are not sure, contact
-    your Databricks representative.
+encapsulates the AWS KMS key information and some information about how the key configuration can be used.
+There are two possible uses for key configurations:
+
+* Managed services: A key configuration can be used to encrypt a workspace's notebook and secret data in
+the control plane, as well as Databricks SQL queries and query history. * Storage: A key configuration can
+be used to encrypt a workspace's DBFS and EBS data in the data plane.
+
+In both of these cases, the key configuration's ID is used when creating a new workspace. This Preview
+feature is available if your account is on the E2 version of the platform. Updating a running workspace
+with workspace storage encryption requires that the workspace is on the E2 version of the platform. If you
+have an older workspace, it might not be on the E2 version of the platform. If you are not sure, contact
+your Databricks representative.
 
     .. py:method:: create(use_cases: List[KeyUseCase] [, aws_key_info: Optional[CreateAwsKeyInfo], gcp_key_info: Optional[CreateGcpKeyInfo]]) -> CustomerManagedKey
 
@@ -40,41 +40,41 @@
             a.encryption_keys.delete(customer_managed_key_id=created.customer_managed_key_id)
 
         Create encryption key configuration.
-        
-        Creates a customer-managed key configuration object for an account, specified by ID. This operation
-        uploads a reference to a customer-managed key to Databricks. If the key is assigned as a workspace's
-        customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks
-        and secrets in the control plane, in addition to Databricks SQL queries and query history. If it is
-        specified as a workspace's customer-managed key for workspace storage, the key encrypts the
-        workspace's root S3 bucket (which contains the workspace's root DBFS and system data) and, optionally,
-        cluster EBS volume data.
-        
-        **Important**: Customer-managed keys are supported only for some deployment types, subscription types,
-        and AWS regions that currently support creation of Databricks workspaces.
-        
-        This operation is available only if your account is on the E2 version of the platform or on a select
-        custom plan that allows multiple workspaces per account.
-        
-        :param use_cases: List[:class:`KeyUseCase`]
-          The cases that the key can be used for.
-        :param aws_key_info: :class:`CreateAwsKeyInfo` (optional)
-        :param gcp_key_info: :class:`CreateGcpKeyInfo` (optional)
-        
-        :returns: :class:`CustomerManagedKey`
-        
+
+Creates a customer-managed key configuration object for an account, specified by ID. This operation
+uploads a reference to a customer-managed key to Databricks. If the key is assigned as a workspace's
+customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks
+and secrets in the control plane, in addition to Databricks SQL queries and query history. If it is
+specified as a workspace's customer-managed key for workspace storage, the key encrypts the
+workspace's root S3 bucket (which contains the workspace's root DBFS and system data) and, optionally,
+cluster EBS volume data.
+
+**Important**: Customer-managed keys are supported only for some deployment types, subscription types,
+and AWS regions that currently support creation of Databricks workspaces.
+
+This operation is available only if your account is on the E2 version of the platform or on a select
+custom plan that allows multiple workspaces per account.
+
+:param use_cases: List[:class:`KeyUseCase`]
+  The cases that the key can be used for.
+:param aws_key_info: :class:`CreateAwsKeyInfo` (optional)
+:param gcp_key_info: :class:`CreateGcpKeyInfo` (optional)
+
+:returns: :class:`CustomerManagedKey`
+
 
     .. py:method:: delete(customer_managed_key_id: str)
 
         Delete encryption key configuration.
-        
-        Deletes a customer-managed key configuration object for an account. You cannot delete a configuration
-        that is associated with a running workspace.
-        
-        :param customer_managed_key_id: str
-          Databricks encryption key configuration ID.
-        
-        
-        
+
+Deletes a customer-managed key configuration object for an account. You cannot delete a configuration
+that is associated with a running workspace.
+
+:param customer_managed_key_id: str
+  Databricks encryption key configuration ID.
+
+
+
 
     .. py:method:: get(customer_managed_key_id: str) -> CustomerManagedKey
 
@@ -100,25 +100,25 @@
             a.encryption_keys.delete(customer_managed_key_id=created.customer_managed_key_id)
 
         Get encryption key configuration.
-        
-        Gets a customer-managed key configuration object for an account, specified by ID. This operation
-        uploads a reference to a customer-managed key to Databricks. If assigned as a workspace's
-        customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks
-        and secrets in the control plane, in addition to Databricks SQL queries and query history. If it is
-        specified as a workspace's customer-managed key for storage, the key encrypts the workspace's root S3
-        bucket (which contains the workspace's root DBFS and system data) and, optionally, cluster EBS volume
-        data.
-        
-        **Important**: Customer-managed keys are supported only for some deployment types, subscription types,
-        and AWS regions.
-        
-        This operation is available only if your account is on the E2 version of the platform.",
-        
-        :param customer_managed_key_id: str
-          Databricks encryption key configuration ID.
-        
-        :returns: :class:`CustomerManagedKey`
-        
+
+Gets a customer-managed key configuration object for an account, specified by ID. This operation
+uploads a reference to a customer-managed key to Databricks. If assigned as a workspace's
+customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks
+and secrets in the control plane, in addition to Databricks SQL queries and query history. If it is
+specified as a workspace's customer-managed key for storage, the key encrypts the workspace's root S3
+bucket (which contains the workspace's root DBFS and system data) and, optionally, cluster EBS volume
+data.
+
+**Important**: Customer-managed keys are supported only for some deployment types, subscription types,
+and AWS regions.
+
+This operation is available only if your account is on the E2 version of the platform.",
+
+:param customer_managed_key_id: str
+  Databricks encryption key configuration ID.
+
+:returns: :class:`CustomerManagedKey`
+
 
     .. py:method:: list() -> Iterator[CustomerManagedKey]
 
@@ -134,17 +134,16 @@
             all = a.encryption_keys.list()
 
         Get all encryption key configurations.
-        
-        Gets all customer-managed key configuration objects for an account. If the key is specified as a
-        workspace's managed services customer-managed key, Databricks uses the key to encrypt the workspace's
-        notebooks and secrets in the control plane, in addition to Databricks SQL queries and query history.
-        If the key is specified as a workspace's storage customer-managed key, the key is used to encrypt the
-        workspace's root S3 bucket and optionally can encrypt cluster EBS volumes data in the data plane.
-        
-        **Important**: Customer-managed keys are supported only for some deployment types, subscription types,
-        and AWS regions.
-        
-        This operation is available only if your account is on the E2 version of the platform.
-        
-        :returns: Iterator over :class:`CustomerManagedKey`
-        
\ No newline at end of file
+
+Gets all customer-managed key configuration objects for an account. If the key is specified as a
+workspace's managed services customer-managed key, Databricks uses the key to encrypt the workspace's
+notebooks and secrets in the control plane, in addition to Databricks SQL queries and query history.
+If the key is specified as a workspace's storage customer-managed key, the key is used to encrypt the
+workspace's root S3 bucket and optionally can encrypt cluster EBS volumes data in the data plane.
+
+**Important**: Customer-managed keys are supported only for some deployment types, subscription types,
+and AWS regions.
+
+This operation is available only if your account is on the E2 version of the platform.
+
+:returns: Iterator over :class:`CustomerManagedKey`
diff --git a/docs/account/provisioning/networks.rst b/docs/account/provisioning/networks.rst
index e7491f202..bfe9abfd4 100644
--- a/docs/account/provisioning/networks.rst
+++ b/docs/account/provisioning/networks.rst
@@ -5,7 +5,7 @@
 .. py:class:: NetworksAPI
 
     These APIs manage network configurations for customer-managed VPCs (optional). Its ID is used when
-    creating a new workspace if you use customer-managed VPCs.
+creating a new workspace if you use customer-managed VPCs.
 
     .. py:method:: create(network_name: str [, gcp_network_info: Optional[GcpNetworkInfo], security_group_ids: Optional[List[str]], subnet_ids: Optional[List[str]], vpc_endpoints: Optional[NetworkVpcEndpoints], vpc_id: Optional[str]]) -> Network
 
@@ -27,47 +27,47 @@
                                      security_group_ids=[hex(time.time_ns())[2:]])
 
         Create network configuration.
-        
-        Creates a Databricks network configuration that represents an VPC and its resources. The VPC will be
-        used for new Databricks clusters. This requires a pre-existing VPC and subnets.
-        
-        :param network_name: str
-          The human-readable name of the network configuration.
-        :param gcp_network_info: :class:`GcpNetworkInfo` (optional)
-          The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and
-          secondary IP ranges).
-        :param security_group_ids: List[str] (optional)
-          IDs of one to five security groups associated with this network. Security group IDs **cannot** be
-          used in multiple network configurations.
-        :param subnet_ids: List[str] (optional)
-          IDs of at least two subnets associated with this network. Subnet IDs **cannot** be used in multiple
-          network configurations.
-        :param vpc_endpoints: :class:`NetworkVpcEndpoints` (optional)
-          If specified, contains the VPC endpoints used to allow cluster communication from this VPC over [AWS
-          PrivateLink].
-          
-          [AWS PrivateLink]: https://aws.amazon.com/privatelink/
-        :param vpc_id: str (optional)
-          The ID of the VPC associated with this network. VPC IDs can be used in multiple network
-          configurations.
-        
-        :returns: :class:`Network`
-        
+
+Creates a Databricks network configuration that represents an VPC and its resources. The VPC will be
+used for new Databricks clusters. This requires a pre-existing VPC and subnets.
+
+:param network_name: str
+  The human-readable name of the network configuration.
+:param gcp_network_info: :class:`GcpNetworkInfo` (optional)
+  The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and
+  secondary IP ranges).
+:param security_group_ids: List[str] (optional)
+  IDs of one to five security groups associated with this network. Security group IDs **cannot** be
+  used in multiple network configurations.
+:param subnet_ids: List[str] (optional)
+  IDs of at least two subnets associated with this network. Subnet IDs **cannot** be used in multiple
+  network configurations.
+:param vpc_endpoints: :class:`NetworkVpcEndpoints` (optional)
+  If specified, contains the VPC endpoints used to allow cluster communication from this VPC over [AWS
+  PrivateLink].
+  
+  [AWS PrivateLink]: https://aws.amazon.com/privatelink/
+:param vpc_id: str (optional)
+  The ID of the VPC associated with this network. VPC IDs can be used in multiple network
+  configurations.
+
+:returns: :class:`Network`
+
 
     .. py:method:: delete(network_id: str)
 
         Delete a network configuration.
-        
-        Deletes a Databricks network configuration, which represents a cloud VPC and its resources. You cannot
-        delete a network that is associated with a workspace.
-        
-        This operation is available only if your account is on the E2 version of the platform.
-        
-        :param network_id: str
-          Databricks Account API network configuration ID.
-        
-        
-        
+
+Deletes a Databricks network configuration, which represents a cloud VPC and its resources. You cannot
+delete a network that is associated with a workspace.
+
+This operation is available only if your account is on the E2 version of the platform.
+
+:param network_id: str
+  Databricks Account API network configuration ID.
+
+
+
 
     .. py:method:: get(network_id: str) -> Network
 
@@ -91,14 +91,14 @@
             by_id = a.networks.get(network_id=netw.network_id)
 
         Get a network configuration.
-        
-        Gets a Databricks network configuration, which represents a cloud VPC and its resources.
-        
-        :param network_id: str
-          Databricks Account API network configuration ID.
-        
-        :returns: :class:`Network`
-        
+
+Gets a Databricks network configuration, which represents a cloud VPC and its resources.
+
+:param network_id: str
+  Databricks Account API network configuration ID.
+
+:returns: :class:`Network`
+
 
     .. py:method:: list() -> Iterator[Network]
 
@@ -114,10 +114,9 @@
             configs = a.networks.list()
 
         Get all network configurations.
-        
-        Gets a list of all Databricks network configurations for an account, specified by ID.
-        
-        This operation is available only if your account is on the E2 version of the platform.
-        
-        :returns: Iterator over :class:`Network`
-        
\ No newline at end of file
+
+Gets a list of all Databricks network configurations for an account, specified by ID.
+
+This operation is available only if your account is on the E2 version of the platform.
+
+:returns: Iterator over :class:`Network`
diff --git a/docs/account/provisioning/private_access.rst b/docs/account/provisioning/private_access.rst
index 10022068e..c51b7567f 100644
--- a/docs/account/provisioning/private_access.rst
+++ b/docs/account/provisioning/private_access.rst
@@ -27,68 +27,68 @@
             a.private_access.delete(private_access_settings_id=created.private_access_settings_id)
 
         Create private access settings.
-        
-        Creates a private access settings object, which specifies how your workspace is accessed over [AWS
-        PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object
-        referenced by ID in the workspace's `private_access_settings_id` property.
-        
-        You can share one private access settings with multiple workspaces in a single account. However,
-        private access settings are specific to AWS regions, so only workspaces in the same AWS region can use
-        a given private access settings object.
-        
-        Before configuring PrivateLink, read the [Databricks article about PrivateLink].
-        
-        [AWS PrivateLink]: https://aws.amazon.com/privatelink
-        [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-        
-        :param private_access_settings_name: str
-          The human-readable name of the private access settings object.
-        :param region: str
-          The cloud region for workspaces associated with this private access settings object.
-        :param allowed_vpc_endpoint_ids: List[str] (optional)
-          An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering
-          the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in
-          AWS.
-          
-          Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints
-          that in your account that can connect to your workspace over AWS PrivateLink.
-          
-          If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this
-          control only works for PrivateLink connections. To control how your workspace is accessed via public
-          internet, see [IP access lists].
-          
-          [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html
-        :param private_access_level: :class:`PrivateAccessLevel` (optional)
-          The private access level controls which VPC endpoints can connect to the UI or API of any workspace
-          that attaches this private access settings object. * `ACCOUNT` level access (the default) allows
-          only VPC endpoints that are registered in your Databricks account connect to your workspace. *
-          `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details,
-          see `allowed_vpc_endpoint_ids`.
-        :param public_access_enabled: bool (optional)
-          Determines if the workspace can be accessed over public internet. For fully private workspaces, you
-          can optionally specify `false`, but only if you implement both the front-end and the back-end
-          PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled.
-        
-        :returns: :class:`PrivateAccessSettings`
-        
+
+Creates a private access settings object, which specifies how your workspace is accessed over [AWS
+PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object
+referenced by ID in the workspace's `private_access_settings_id` property.
+
+You can share one private access settings with multiple workspaces in a single account. However,
+private access settings are specific to AWS regions, so only workspaces in the same AWS region can use
+a given private access settings object.
+
+Before configuring PrivateLink, read the [Databricks article about PrivateLink].
+
+[AWS PrivateLink]: https://aws.amazon.com/privatelink
+[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
+
+:param private_access_settings_name: str
+  The human-readable name of the private access settings object.
+:param region: str
+  The cloud region for workspaces associated with this private access settings object.
+:param allowed_vpc_endpoint_ids: List[str] (optional)
+  An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering
+  the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in
+  AWS.
+  
+  Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints
+  that in your account that can connect to your workspace over AWS PrivateLink.
+  
+  If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this
+  control only works for PrivateLink connections. To control how your workspace is accessed via public
+  internet, see [IP access lists].
+  
+  [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html
+:param private_access_level: :class:`PrivateAccessLevel` (optional)
+  The private access level controls which VPC endpoints can connect to the UI or API of any workspace
+  that attaches this private access settings object. * `ACCOUNT` level access (the default) allows
+  only VPC endpoints that are registered in your Databricks account connect to your workspace. *
+  `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details,
+  see `allowed_vpc_endpoint_ids`.
+:param public_access_enabled: bool (optional)
+  Determines if the workspace can be accessed over public internet. For fully private workspaces, you
+  can optionally specify `false`, but only if you implement both the front-end and the back-end
+  PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled.
+
+:returns: :class:`PrivateAccessSettings`
+
 
     .. py:method:: delete(private_access_settings_id: str)
 
         Delete a private access settings object.
-        
-        Deletes a private access settings object, which determines how your workspace is accessed over [AWS
-        PrivateLink].
-        
-        Before configuring PrivateLink, read the [Databricks article about PrivateLink].",
-        
-        [AWS PrivateLink]: https://aws.amazon.com/privatelink
-        [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-        
-        :param private_access_settings_id: str
-          Databricks Account API private access settings ID.
-        
-        
-        
+
+Deletes a private access settings object, which determines how your workspace is accessed over [AWS
+PrivateLink].
+
+Before configuring PrivateLink, read the [Databricks article about PrivateLink].",
+
+[AWS PrivateLink]: https://aws.amazon.com/privatelink
+[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
+
+:param private_access_settings_id: str
+  Databricks Account API private access settings ID.
+
+
+
 
     .. py:method:: get(private_access_settings_id: str) -> PrivateAccessSettings
 
@@ -113,20 +113,20 @@
             a.private_access.delete(private_access_settings_id=created.private_access_settings_id)
 
         Get a private access settings object.
-        
-        Gets a private access settings object, which specifies how your workspace is accessed over [AWS
-        PrivateLink].
-        
-        Before configuring PrivateLink, read the [Databricks article about PrivateLink].",
-        
-        [AWS PrivateLink]: https://aws.amazon.com/privatelink
-        [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-        
-        :param private_access_settings_id: str
-          Databricks Account API private access settings ID.
-        
-        :returns: :class:`PrivateAccessSettings`
-        
+
+Gets a private access settings object, which specifies how your workspace is accessed over [AWS
+PrivateLink].
+
+Before configuring PrivateLink, read the [Databricks article about PrivateLink].",
+
+[AWS PrivateLink]: https://aws.amazon.com/privatelink
+[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
+
+:param private_access_settings_id: str
+  Databricks Account API private access settings ID.
+
+:returns: :class:`PrivateAccessSettings`
+
 
     .. py:method:: list() -> Iterator[PrivateAccessSettings]
 
@@ -142,11 +142,11 @@
             all = a.private_access.list()
 
         Get all private access settings objects.
-        
-        Gets a list of all private access settings objects for an account, specified by ID.
-        
-        :returns: Iterator over :class:`PrivateAccessSettings`
-        
+
+Gets a list of all private access settings objects for an account, specified by ID.
+
+:returns: Iterator over :class:`PrivateAccessSettings`
+
 
     .. py:method:: replace(private_access_settings_id: str, private_access_settings_name: str, region: str [, allowed_vpc_endpoint_ids: Optional[List[str]], private_access_level: Optional[PrivateAccessLevel], public_access_enabled: Optional[bool]])
 
@@ -173,54 +173,53 @@
             a.private_access.delete(private_access_settings_id=created.private_access_settings_id)
 
         Replace private access settings.
-        
-        Updates an existing private access settings object, which specifies how your workspace is accessed
-        over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object
-        referenced by ID in the workspace's `private_access_settings_id` property.
-        
-        This operation completely overwrites your existing private access settings object attached to your
-        workspaces. All workspaces attached to the private access settings are affected by any change. If
-        `public_access_enabled`, `private_access_level`, or `allowed_vpc_endpoint_ids` are updated, effects of
-        these changes might take several minutes to propagate to the workspace API.
-        
-        You can share one private access settings object with multiple workspaces in a single account.
-        However, private access settings are specific to AWS regions, so only workspaces in the same AWS
-        region can use a given private access settings object.
-        
-        Before configuring PrivateLink, read the [Databricks article about PrivateLink].
-        
-        [AWS PrivateLink]: https://aws.amazon.com/privatelink
-        [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-        
-        :param private_access_settings_id: str
-          Databricks Account API private access settings ID.
-        :param private_access_settings_name: str
-          The human-readable name of the private access settings object.
-        :param region: str
-          The cloud region for workspaces associated with this private access settings object.
-        :param allowed_vpc_endpoint_ids: List[str] (optional)
-          An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering
-          the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in
-          AWS.
-          
-          Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints
-          that in your account that can connect to your workspace over AWS PrivateLink.
-          
-          If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this
-          control only works for PrivateLink connections. To control how your workspace is accessed via public
-          internet, see [IP access lists].
-          
-          [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html
-        :param private_access_level: :class:`PrivateAccessLevel` (optional)
-          The private access level controls which VPC endpoints can connect to the UI or API of any workspace
-          that attaches this private access settings object. * `ACCOUNT` level access (the default) allows
-          only VPC endpoints that are registered in your Databricks account connect to your workspace. *
-          `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details,
-          see `allowed_vpc_endpoint_ids`.
-        :param public_access_enabled: bool (optional)
-          Determines if the workspace can be accessed over public internet. For fully private workspaces, you
-          can optionally specify `false`, but only if you implement both the front-end and the back-end
-          PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled.
-        
-        
-        
\ No newline at end of file
+
+Updates an existing private access settings object, which specifies how your workspace is accessed
+over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object
+referenced by ID in the workspace's `private_access_settings_id` property.
+
+This operation completely overwrites your existing private access settings object attached to your
+workspaces. All workspaces attached to the private access settings are affected by any change. If
+`public_access_enabled`, `private_access_level`, or `allowed_vpc_endpoint_ids` are updated, effects of
+these changes might take several minutes to propagate to the workspace API.
+
+You can share one private access settings object with multiple workspaces in a single account.
+However, private access settings are specific to AWS regions, so only workspaces in the same AWS
+region can use a given private access settings object.
+
+Before configuring PrivateLink, read the [Databricks article about PrivateLink].
+
+[AWS PrivateLink]: https://aws.amazon.com/privatelink
+[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
+
+:param private_access_settings_id: str
+  Databricks Account API private access settings ID.
+:param private_access_settings_name: str
+  The human-readable name of the private access settings object.
+:param region: str
+  The cloud region for workspaces associated with this private access settings object.
+:param allowed_vpc_endpoint_ids: List[str] (optional)
+  An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering
+  the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in
+  AWS.
+  
+  Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints
+  that in your account that can connect to your workspace over AWS PrivateLink.
+  
+  If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this
+  control only works for PrivateLink connections. To control how your workspace is accessed via public
+  internet, see [IP access lists].
+  
+  [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html
+:param private_access_level: :class:`PrivateAccessLevel` (optional)
+  The private access level controls which VPC endpoints can connect to the UI or API of any workspace
+  that attaches this private access settings object. * `ACCOUNT` level access (the default) allows
+  only VPC endpoints that are registered in your Databricks account connect to your workspace. *
+  `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details,
+  see `allowed_vpc_endpoint_ids`.
+:param public_access_enabled: bool (optional)
+  Determines if the workspace can be accessed over public internet. For fully private workspaces, you
+  can optionally specify `false`, but only if you implement both the front-end and the back-end
+  PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled.
+
+
diff --git a/docs/account/provisioning/storage.rst b/docs/account/provisioning/storage.rst
index 611a8cdc6..c538ca1d7 100644
--- a/docs/account/provisioning/storage.rst
+++ b/docs/account/provisioning/storage.rst
@@ -5,9 +5,9 @@
 .. py:class:: StorageAPI
 
     These APIs manage storage configurations for this workspace. A root storage S3 bucket in your account is
-    required to store objects like cluster logs, notebook revisions, and job results. You can also use the
-    root storage S3 bucket for storage of non-production DBFS data. A storage configuration encapsulates this
-    bucket information, and its ID is used when creating a new workspace.
+required to store objects like cluster logs, notebook revisions, and job results. You can also use the
+root storage S3 bucket for storage of non-production DBFS data. A storage configuration encapsulates this
+bucket information, and its ID is used when creating a new workspace.
 
     .. py:method:: create(storage_configuration_name: str, root_bucket_info: RootBucketInfo) -> StorageConfiguration
 
@@ -32,37 +32,37 @@
             a.storage.delete(storage_configuration_id=storage.storage_configuration_id)
 
         Create new storage configuration.
-        
-        Creates new storage configuration for an account, specified by ID. Uploads a storage configuration
-        object that represents the root AWS S3 bucket in your account. Databricks stores related workspace
-        assets including DBFS, cluster logs, and job results. For the AWS S3 bucket, you need to configure the
-        required bucket policy.
-        
-        For information about how to create a new workspace with this API, see [Create a new workspace using
-        the Account API]
-        
-        [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
-        
-        :param storage_configuration_name: str
-          The human-readable name of the storage configuration.
-        :param root_bucket_info: :class:`RootBucketInfo`
-          Root S3 bucket information.
-        
-        :returns: :class:`StorageConfiguration`
-        
+
+Creates new storage configuration for an account, specified by ID. Uploads a storage configuration
+object that represents the root AWS S3 bucket in your account. Databricks stores related workspace
+assets including DBFS, cluster logs, and job results. For the AWS S3 bucket, you need to configure the
+required bucket policy.
+
+For information about how to create a new workspace with this API, see [Create a new workspace using
+the Account API]
+
+[Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
+
+:param storage_configuration_name: str
+  The human-readable name of the storage configuration.
+:param root_bucket_info: :class:`RootBucketInfo`
+  Root S3 bucket information.
+
+:returns: :class:`StorageConfiguration`
+
 
     .. py:method:: delete(storage_configuration_id: str)
 
         Delete storage configuration.
-        
-        Deletes a Databricks storage configuration. You cannot delete a storage configuration that is
-        associated with any workspace.
-        
-        :param storage_configuration_id: str
-          Databricks Account API storage configuration ID.
-        
-        
-        
+
+Deletes a Databricks storage configuration. You cannot delete a storage configuration that is
+associated with any workspace.
+
+:param storage_configuration_id: str
+  Databricks Account API storage configuration ID.
+
+
+
 
     .. py:method:: get(storage_configuration_id: str) -> StorageConfiguration
 
@@ -84,14 +84,14 @@
             by_id = a.storage.get(storage_configuration_id=storage.storage_configuration_id)
 
         Get storage configuration.
-        
-        Gets a Databricks storage configuration for an account, both specified by ID.
-        
-        :param storage_configuration_id: str
-          Databricks Account API storage configuration ID.
-        
-        :returns: :class:`StorageConfiguration`
-        
+
+Gets a Databricks storage configuration for an account, both specified by ID.
+
+:param storage_configuration_id: str
+  Databricks Account API storage configuration ID.
+
+:returns: :class:`StorageConfiguration`
+
 
     .. py:method:: list() -> Iterator[StorageConfiguration]
 
@@ -107,8 +107,7 @@
             configs = a.storage.list()
 
         Get all storage configurations.
-        
-        Gets a list of all Databricks storage configurations for your account, specified by ID.
-        
-        :returns: Iterator over :class:`StorageConfiguration`
-        
\ No newline at end of file
+
+Gets a list of all Databricks storage configurations for your account, specified by ID.
+
+:returns: Iterator over :class:`StorageConfiguration`
diff --git a/docs/account/provisioning/vpc_endpoints.rst b/docs/account/provisioning/vpc_endpoints.rst
index d2622dc0f..2b9657b5e 100644
--- a/docs/account/provisioning/vpc_endpoints.rst
+++ b/docs/account/provisioning/vpc_endpoints.rst
@@ -28,50 +28,50 @@
             a.vpc_endpoints.delete(vpc_endpoint_id=created.vpc_endpoint_id)
 
         Create VPC endpoint configuration.
-        
-        Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to
-        communicate privately with Databricks over [AWS PrivateLink].
-        
-        After you create the VPC endpoint configuration, the Databricks [endpoint service] automatically
-        accepts the VPC endpoint.
-        
-        Before configuring PrivateLink, read the [Databricks article about PrivateLink].
-        
-        [AWS PrivateLink]: https://aws.amazon.com/privatelink
-        [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-        [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/vpc-endpoints.html
-        [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/privatelink-share-your-services.html
-        
-        :param vpc_endpoint_name: str
-          The human-readable name of the storage configuration.
-        :param aws_vpc_endpoint_id: str (optional)
-          The ID of the VPC endpoint object in AWS.
-        :param gcp_vpc_endpoint_info: :class:`GcpVpcEndpointInfo` (optional)
-          The Google Cloud specific information for this Private Service Connect endpoint.
-        :param region: str (optional)
-          The AWS region in which this VPC endpoint object exists.
-        
-        :returns: :class:`VpcEndpoint`
-        
+
+Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to
+communicate privately with Databricks over [AWS PrivateLink].
+
+After you create the VPC endpoint configuration, the Databricks [endpoint service] automatically
+accepts the VPC endpoint.
+
+Before configuring PrivateLink, read the [Databricks article about PrivateLink].
+
+[AWS PrivateLink]: https://aws.amazon.com/privatelink
+[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
+[VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/vpc-endpoints.html
+[endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/privatelink-share-your-services.html
+
+:param vpc_endpoint_name: str
+  The human-readable name of the storage configuration.
+:param aws_vpc_endpoint_id: str (optional)
+  The ID of the VPC endpoint object in AWS.
+:param gcp_vpc_endpoint_info: :class:`GcpVpcEndpointInfo` (optional)
+  The Google Cloud specific information for this Private Service Connect endpoint.
+:param region: str (optional)
+  The AWS region in which this VPC endpoint object exists.
+
+:returns: :class:`VpcEndpoint`
+
 
     .. py:method:: delete(vpc_endpoint_id: str)
 
         Delete VPC endpoint configuration.
-        
-        Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate
-        privately with Databricks over [AWS PrivateLink].
-        
-        Before configuring PrivateLink, read the [Databricks article about PrivateLink].
-        
-        [AWS PrivateLink]: https://aws.amazon.com/privatelink
-        [AWS VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html
-        [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-        
-        :param vpc_endpoint_id: str
-          Databricks VPC endpoint ID.
-        
-        
-        
+
+Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate
+privately with Databricks over [AWS PrivateLink].
+
+Before configuring PrivateLink, read the [Databricks article about PrivateLink].
+
+[AWS PrivateLink]: https://aws.amazon.com/privatelink
+[AWS VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html
+[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
+
+:param vpc_endpoint_id: str
+  Databricks VPC endpoint ID.
+
+
+
 
     .. py:method:: get(vpc_endpoint_id: str) -> VpcEndpoint
 
@@ -97,18 +97,18 @@
             a.vpc_endpoints.delete(vpc_endpoint_id=created.vpc_endpoint_id)
 
         Get a VPC endpoint configuration.
-        
-        Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate
-        privately with Databricks over [AWS PrivateLink].
-        
-        [AWS PrivateLink]: https://aws.amazon.com/privatelink
-        [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html
-        
-        :param vpc_endpoint_id: str
-          Databricks VPC endpoint ID.
-        
-        :returns: :class:`VpcEndpoint`
-        
+
+Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate
+privately with Databricks over [AWS PrivateLink].
+
+[AWS PrivateLink]: https://aws.amazon.com/privatelink
+[VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html
+
+:param vpc_endpoint_id: str
+  Databricks VPC endpoint ID.
+
+:returns: :class:`VpcEndpoint`
+
 
     .. py:method:: list() -> Iterator[VpcEndpoint]
 
@@ -124,12 +124,11 @@
             all = a.vpc_endpoints.list()
 
         Get all VPC endpoint configurations.
-        
-        Gets a list of all VPC endpoints for an account, specified by ID.
-        
-        Before configuring PrivateLink, read the [Databricks article about PrivateLink].
-        
-        [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-        
-        :returns: Iterator over :class:`VpcEndpoint`
-        
\ No newline at end of file
+
+Gets a list of all VPC endpoints for an account, specified by ID.
+
+Before configuring PrivateLink, read the [Databricks article about PrivateLink].
+
+[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
+
+:returns: Iterator over :class:`VpcEndpoint`
diff --git a/docs/account/provisioning/workspaces.rst b/docs/account/provisioning/workspaces.rst
index ad8a75942..3e312984a 100644
--- a/docs/account/provisioning/workspaces.rst
+++ b/docs/account/provisioning/workspaces.rst
@@ -5,11 +5,11 @@
 .. py:class:: WorkspacesAPI
 
     These APIs manage workspaces for this account. A Databricks workspace is an environment for accessing all
-    of your Databricks assets. The workspace organizes objects (notebooks, libraries, and experiments) into
-    folders, and provides access to data and computational resources such as clusters and jobs.
-    
-    These endpoints are available if your account is on the E2 version of the platform or on a select custom
-    plan that allows multiple workspaces per account.
+of your Databricks assets. The workspace organizes objects (notebooks, libraries, and experiments) into
+folders, and provides access to data and computational resources such as clusters and jobs.
+
+These endpoints are available if your account is on the E2 version of the platform or on a select custom
+plan that allows multiple workspaces per account.
 
     .. py:method:: create(workspace_name: str [, aws_region: Optional[str], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], is_no_public_ip_enabled: Optional[bool], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str]]) -> Wait[Workspace]
 
@@ -46,109 +46,109 @@
             a.workspaces.delete(workspace_id=waiter.workspace_id)
 
         Create a new workspace.
-        
-        Creates a new workspace.
-        
-        **Important**: This operation is asynchronous. A response with HTTP status code 200 means the request
-        has been accepted and is in progress, but does not mean that the workspace deployed successfully and
-        is running. The initial workspace status is typically `PROVISIONING`. Use the workspace ID
-        (`workspace_id`) field in the response to identify the new workspace and make repeated `GET` requests
-        with the workspace ID and check its status. The workspace becomes available when the status changes to
-        `RUNNING`.
-        
-        :param workspace_name: str
-          The workspace's human-readable name.
-        :param aws_region: str (optional)
-          The AWS region of the workspace's data plane.
-        :param cloud: str (optional)
-          The cloud provider which the workspace uses. For Google Cloud workspaces, always set this field to
-          `gcp`.
-        :param cloud_resource_container: :class:`CloudResourceContainer` (optional)
-          The general workspace configurations that are specific to cloud providers.
-        :param credentials_id: str (optional)
-          ID of the workspace's credential configuration object.
-        :param custom_tags: Dict[str,str] (optional)
-          The custom tags key-value pairing that is attached to this workspace. The key-value pair is a string
-          of utf-8 characters. The value can be an empty string, with maximum length of 255 characters. The
-          key can be of maximum length of 127 characters, and cannot be empty.
-        :param deployment_name: str (optional)
-          The deployment name defines part of the subdomain for the workspace. The workspace URL for the web
-          application and REST APIs is `.cloud.databricks.com`. For example, if the
-          deployment name is `abcsales`, your workspace URL will be `https://abcsales.cloud.databricks.com`.
-          Hyphens are allowed. This property supports only the set of characters that are allowed in a
-          subdomain.
-          
-          To set this value, you must have a deployment name prefix. Contact your Databricks account team to
-          add an account deployment name prefix to your account.
-          
-          Workspace deployment names follow the account prefix and a hyphen. For example, if your account's
-          deployment prefix is `acme` and the workspace deployment name is `workspace-1`, the JSON response
-          for the `deployment_name` field becomes `acme-workspace-1`. The workspace URL would be
-          `acme-workspace-1.cloud.databricks.com`.
-          
-          You can also set the `deployment_name` to the reserved keyword `EMPTY` if you want the deployment
-          name to only include the deployment prefix. For example, if your account's deployment prefix is
-          `acme` and the workspace deployment name is `EMPTY`, the `deployment_name` becomes `acme` only and
-          the workspace URL is `acme.cloud.databricks.com`.
-          
-          This value must be unique across all non-deleted deployments across all AWS regions.
-          
-          If a new workspace omits this property, the server generates a unique deployment name for you with
-          the pattern `dbc-xxxxxxxx-xxxx`.
-        :param gcp_managed_network_config: :class:`GcpManagedNetworkConfig` (optional)
-          The network settings for the workspace. The configurations are only for Databricks-managed VPCs. It
-          is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP range
-          configurations must be mutually exclusive. An attempt to create a workspace fails if Databricks
-          detects an IP range overlap.
-          
-          Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and all IP
-          addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`,
-          `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`.
-          
-          The sizes of these IP ranges affect the maximum number of nodes for the workspace.
-          
-          **Important**: Confirm the IP ranges used by your Databricks workspace before creating the
-          workspace. You cannot change them after your workspace is deployed. If the IP address ranges for
-          your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To
-          determine the address range sizes that you need, Databricks provides a calculator as a Microsoft
-          Excel spreadsheet. See [calculate subnet sizes for a new workspace].
-          
-          [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html
-        :param gke_config: :class:`GkeConfig` (optional)
-          The configurations for the GKE cluster of a Databricks workspace.
-        :param is_no_public_ip_enabled: bool (optional)
-          Whether no public IP is enabled for the workspace.
-        :param location: str (optional)
-          The Google Cloud region of the workspace data plane in your Google account. For example, `us-east4`.
-        :param managed_services_customer_managed_key_id: str (optional)
-          The ID of the workspace's managed services encryption key configuration object. This is used to help
-          protect and control access to the workspace's notebooks, secrets, Databricks SQL queries, and query
-          history. The provided key configuration object property `use_cases` must contain `MANAGED_SERVICES`.
-        :param network_id: str (optional)
-        :param pricing_tier: :class:`PricingTier` (optional)
-          The pricing tier of the workspace. For pricing tier information, see [AWS Pricing].
-          
-          [AWS Pricing]: https://databricks.com/product/aws-pricing
-        :param private_access_settings_id: str (optional)
-          ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be
-          specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace connection),
-          back-end (data plane to control plane connection), or both connection types.
-          
-          Before configuring PrivateLink, read the [Databricks article about PrivateLink].",
-          
-          [AWS PrivateLink]: https://aws.amazon.com/privatelink/
-          [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-        :param storage_configuration_id: str (optional)
-          The ID of the workspace's storage configuration object.
-        :param storage_customer_managed_key_id: str (optional)
-          The ID of the workspace's storage encryption key configuration object. This is used to encrypt the
-          workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS volumes. The
-          provided key configuration object property `use_cases` must contain `STORAGE`.
-        
-        :returns:
-          Long-running operation waiter for :class:`Workspace`.
-          See :method:wait_get_workspace_running for more details.
-        
+
+Creates a new workspace.
+
+**Important**: This operation is asynchronous. A response with HTTP status code 200 means the request
+has been accepted and is in progress, but does not mean that the workspace deployed successfully and
+is running. The initial workspace status is typically `PROVISIONING`. Use the workspace ID
+(`workspace_id`) field in the response to identify the new workspace and make repeated `GET` requests
+with the workspace ID and check its status. The workspace becomes available when the status changes to
+`RUNNING`.
+
+:param workspace_name: str
+  The workspace's human-readable name.
+:param aws_region: str (optional)
+  The AWS region of the workspace's data plane.
+:param cloud: str (optional)
+  The cloud provider which the workspace uses. For Google Cloud workspaces, always set this field to
+  `gcp`.
+:param cloud_resource_container: :class:`CloudResourceContainer` (optional)
+  The general workspace configurations that are specific to cloud providers.
+:param credentials_id: str (optional)
+  ID of the workspace's credential configuration object.
+:param custom_tags: Dict[str,str] (optional)
+  The custom tags key-value pairing that is attached to this workspace. The key-value pair is a string
+  of utf-8 characters. The value can be an empty string, with maximum length of 255 characters. The
+  key can be of maximum length of 127 characters, and cannot be empty.
+:param deployment_name: str (optional)
+  The deployment name defines part of the subdomain for the workspace. The workspace URL for the web
+  application and REST APIs is `.cloud.databricks.com`. For example, if the
+  deployment name is `abcsales`, your workspace URL will be `https://abcsales.cloud.databricks.com`.
+  Hyphens are allowed. This property supports only the set of characters that are allowed in a
+  subdomain.
+  
+  To set this value, you must have a deployment name prefix. Contact your Databricks account team to
+  add an account deployment name prefix to your account.
+  
+  Workspace deployment names follow the account prefix and a hyphen. For example, if your account's
+  deployment prefix is `acme` and the workspace deployment name is `workspace-1`, the JSON response
+  for the `deployment_name` field becomes `acme-workspace-1`. The workspace URL would be
+  `acme-workspace-1.cloud.databricks.com`.
+  
+  You can also set the `deployment_name` to the reserved keyword `EMPTY` if you want the deployment
+  name to only include the deployment prefix. For example, if your account's deployment prefix is
+  `acme` and the workspace deployment name is `EMPTY`, the `deployment_name` becomes `acme` only and
+  the workspace URL is `acme.cloud.databricks.com`.
+  
+  This value must be unique across all non-deleted deployments across all AWS regions.
+  
+  If a new workspace omits this property, the server generates a unique deployment name for you with
+  the pattern `dbc-xxxxxxxx-xxxx`.
+:param gcp_managed_network_config: :class:`GcpManagedNetworkConfig` (optional)
+  The network settings for the workspace. The configurations are only for Databricks-managed VPCs. It
+  is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP range
+  configurations must be mutually exclusive. An attempt to create a workspace fails if Databricks
+  detects an IP range overlap.
+  
+  Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and all IP
+  addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`,
+  `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`.
+  
+  The sizes of these IP ranges affect the maximum number of nodes for the workspace.
+  
+  **Important**: Confirm the IP ranges used by your Databricks workspace before creating the
+  workspace. You cannot change them after your workspace is deployed. If the IP address ranges for
+  your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To
+  determine the address range sizes that you need, Databricks provides a calculator as a Microsoft
+  Excel spreadsheet. See [calculate subnet sizes for a new workspace].
+  
+  [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html
+:param gke_config: :class:`GkeConfig` (optional)
+  The configurations for the GKE cluster of a Databricks workspace.
+:param is_no_public_ip_enabled: bool (optional)
+  Whether no public IP is enabled for the workspace.
+:param location: str (optional)
+  The Google Cloud region of the workspace data plane in your Google account. For example, `us-east4`.
+:param managed_services_customer_managed_key_id: str (optional)
+  The ID of the workspace's managed services encryption key configuration object. This is used to help
+  protect and control access to the workspace's notebooks, secrets, Databricks SQL queries, and query
+  history. The provided key configuration object property `use_cases` must contain `MANAGED_SERVICES`.
+:param network_id: str (optional)
+:param pricing_tier: :class:`PricingTier` (optional)
+  The pricing tier of the workspace. For pricing tier information, see [AWS Pricing].
+  
+  [AWS Pricing]: https://databricks.com/product/aws-pricing
+:param private_access_settings_id: str (optional)
+  ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be
+  specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace connection),
+  back-end (data plane to control plane connection), or both connection types.
+  
+  Before configuring PrivateLink, read the [Databricks article about PrivateLink].",
+  
+  [AWS PrivateLink]: https://aws.amazon.com/privatelink/
+  [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
+:param storage_configuration_id: str (optional)
+  The ID of the workspace's storage configuration object.
+:param storage_customer_managed_key_id: str (optional)
+  The ID of the workspace's storage encryption key configuration object. This is used to encrypt the
+  workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS volumes. The
+  provided key configuration object property `use_cases` must contain `STORAGE`.
+
+:returns:
+  Long-running operation waiter for :class:`Workspace`.
+  See :method:wait_get_workspace_running for more details.
+
 
     .. py:method:: create_and_wait(workspace_name: str [, aws_region: Optional[str], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], is_no_public_ip_enabled: Optional[bool], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace
 
@@ -156,19 +156,19 @@
     .. py:method:: delete(workspace_id: int)
 
         Delete a workspace.
-        
-        Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate.
-        However, it might take a few minutes for all workspaces resources to be deleted, depending on the size
-        and number of workspace resources.
-        
-        This operation is available only if your account is on the E2 version of the platform or on a select
-        custom plan that allows multiple workspaces per account.
-        
-        :param workspace_id: int
-          Workspace ID.
-        
-        
-        
+
+Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate.
+However, it might take a few minutes for all workspaces resources to be deleted, depending on the size
+and number of workspace resources.
+
+This operation is available only if your account is on the E2 version of the platform or on a select
+custom plan that allows multiple workspaces per account.
+
+:param workspace_id: int
+  Workspace ID.
+
+
+
 
     .. py:method:: get(workspace_id: int) -> Workspace
 
@@ -186,25 +186,25 @@
             by_id = a.workspaces.get(workspace_id=created.workspace_id)
 
         Get a workspace.
-        
-        Gets information including status for a Databricks workspace, specified by ID. In the response, the
-        `workspace_status` field indicates the current status. After initial workspace creation (which is
-        asynchronous), make repeated `GET` requests with the workspace ID and check its status. The workspace
-        becomes available when the status changes to `RUNNING`.
-        
-        For information about how to create a new workspace with this API **including error handling**, see
-        [Create a new workspace using the Account API].
-        
-        This operation is available only if your account is on the E2 version of the platform or on a select
-        custom plan that allows multiple workspaces per account.
-        
-        [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
-        
-        :param workspace_id: int
-          Workspace ID.
-        
-        :returns: :class:`Workspace`
-        
+
+Gets information including status for a Databricks workspace, specified by ID. In the response, the
+`workspace_status` field indicates the current status. After initial workspace creation (which is
+asynchronous), make repeated `GET` requests with the workspace ID and check its status. The workspace
+becomes available when the status changes to `RUNNING`.
+
+For information about how to create a new workspace with this API **including error handling**, see
+[Create a new workspace using the Account API].
+
+This operation is available only if your account is on the E2 version of the platform or on a select
+custom plan that allows multiple workspaces per account.
+
+[Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
+
+:param workspace_id: int
+  Workspace ID.
+
+:returns: :class:`Workspace`
+
 
     .. py:method:: list() -> Iterator[Workspace]
 
@@ -220,14 +220,14 @@
             all = a.workspaces.list()
 
         Get all workspaces.
-        
-        Gets a list of all workspaces associated with an account, specified by ID.
-        
-        This operation is available only if your account is on the E2 version of the platform or on a select
-        custom plan that allows multiple workspaces per account.
-        
-        :returns: Iterator over :class:`Workspace`
-        
+
+Gets a list of all workspaces associated with an account, specified by ID.
+
+This operation is available only if your account is on the E2 version of the platform or on a select
+custom plan that allows multiple workspaces per account.
+
+:returns: Iterator over :class:`Workspace`
+
 
     .. py:method:: update(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str]]) -> Wait[Workspace]
 
@@ -257,135 +257,135 @@
             a.credentials.delete(credentials_id=update_role.credentials_id)
 
         Update workspace configuration.
-        
-        Updates a workspace configuration for either a running workspace or a failed workspace. The elements
-        that can be updated varies between these two use cases.
-        
-        ### Update a failed workspace You can update a Databricks workspace configuration for failed workspace
-        deployment for some fields, but not all fields. For a failed workspace, this request supports updates
-        to the following fields only: - Credential configuration ID - Storage configuration ID - Network
-        configuration ID. Used only to add or change a network configuration for a customer-managed VPC. For a
-        failed workspace only, you can convert a workspace with Databricks-managed VPC to use a
-        customer-managed VPC by adding this ID. You cannot downgrade a workspace with a customer-managed VPC
-        to be a Databricks-managed VPC. You can update the network configuration for a failed or running
-        workspace to add PrivateLink support, though you must also add a private access settings object. - Key
-        configuration ID for managed services (control plane storage, such as notebook source and Databricks
-        SQL queries). Used only if you use customer-managed keys for managed services. - Key configuration ID
-        for workspace storage (root S3 bucket and, optionally, EBS volumes). Used only if you use
-        customer-managed keys for workspace storage. **Important**: If the workspace was ever in the running
-        state, even if briefly before becoming a failed workspace, you cannot add a new key configuration ID
-        for workspace storage. - Private access settings ID to add PrivateLink support. You can add or update
-        the private access settings ID to upgrade a workspace to add support for front-end, back-end, or both
-        types of connectivity. You cannot remove (downgrade) any existing front-end or back-end PrivateLink
-        support on a workspace. - Custom tags. Given you provide an empty custom tags, the update would not be
-        applied. - Network connectivity configuration ID to add serverless stable IP support. You can add or
-        update the network connectivity configuration ID to ensure the workspace uses the same set of stable
-        IP CIDR blocks to access your resources. You cannot remove a network connectivity configuration from
-        the workspace once attached, you can only switch to another one.
-        
-        After calling the `PATCH` operation to update the workspace configuration, make repeated `GET`
-        requests with the workspace ID and check the workspace status. The workspace is successful if the
-        status changes to `RUNNING`.
-        
-        For information about how to create a new workspace with this API **including error handling**, see
-        [Create a new workspace using the Account API].
-        
-        ### Update a running workspace You can update a Databricks workspace configuration for running
-        workspaces for some fields, but not all fields. For a running workspace, this request supports
-        updating the following fields only: - Credential configuration ID - Network configuration ID. Used
-        only if you already use a customer-managed VPC. You cannot convert a running workspace from a
-        Databricks-managed VPC to a customer-managed VPC. You can use a network configuration update in this
-        API for a failed or running workspace to add support for PrivateLink, although you also need to add a
-        private access settings object. - Key configuration ID for managed services (control plane storage,
-        such as notebook source and Databricks SQL queries). Databricks does not directly encrypt the data
-        with the customer-managed key (CMK). Databricks uses both the CMK and the Databricks managed key (DMK)
-        that is unique to your workspace to encrypt the Data Encryption Key (DEK). Databricks uses the DEK to
-        encrypt your workspace's managed services persisted data. If the workspace does not already have a CMK
-        for managed services, adding this ID enables managed services encryption for new or updated data.
-        Existing managed services data that existed before adding the key remains not encrypted with the DEK
-        until it is modified. If the workspace already has customer-managed keys for managed services, this
-        request rotates (changes) the CMK keys and the DEK is re-encrypted with the DMK and the new CMK. - Key
-        configuration ID for workspace storage (root S3 bucket and, optionally, EBS volumes). You can set this
-        only if the workspace does not already have a customer-managed key configuration for workspace
-        storage. - Private access settings ID to add PrivateLink support. You can add or update the private
-        access settings ID to upgrade a workspace to add support for front-end, back-end, or both types of
-        connectivity. You cannot remove (downgrade) any existing front-end or back-end PrivateLink support on
-        a workspace. - Custom tags. Given you provide an empty custom tags, the update would not be applied. -
-        Network connectivity configuration ID to add serverless stable IP support. You can add or update the
-        network connectivity configuration ID to ensure the workspace uses the same set of stable IP CIDR
-        blocks to access your resources. You cannot remove a network connectivity configuration from the
-        workspace once attached, you can only switch to another one.
-        
-        **Important**: To update a running workspace, your workspace must have no running compute resources
-        that run in your workspace's VPC in the Classic data plane. For example, stop all all-purpose
-        clusters, job clusters, pools with running clusters, and Classic SQL warehouses. If you do not
-        terminate all cluster instances in the workspace before calling this API, the request will fail.
-        
-        ### Wait until changes take effect. After calling the `PATCH` operation to update the workspace
-        configuration, make repeated `GET` requests with the workspace ID and check the workspace status and
-        the status of the fields. * For workspaces with a Databricks-managed VPC, the workspace status becomes
-        `PROVISIONING` temporarily (typically under 20 minutes). If the workspace update is successful, the
-        workspace status changes to `RUNNING`. Note that you can also check the workspace status in the
-        [Account Console]. However, you cannot use or create clusters for another 20 minutes after that status
-        change. This results in a total of up to 40 minutes in which you cannot create clusters. If you create
-        or use clusters before this time interval elapses, clusters do not launch successfully, fail, or could
-        cause other unexpected behavior. * For workspaces with a customer-managed VPC, the workspace status
-        stays at status `RUNNING` and the VPC change happens immediately. A change to the storage
-        customer-managed key configuration ID might take a few minutes to update, so continue to check the
-        workspace until you observe that it has been updated. If the update fails, the workspace might revert
-        silently to its original configuration. After the workspace has been updated, you cannot use or create
-        clusters for another 20 minutes. If you create or use clusters before this time interval elapses,
-        clusters do not launch successfully, fail, or could cause other unexpected behavior.
-        
-        If you update the _storage_ customer-managed key configurations, it takes 20 minutes for the changes
-        to fully take effect. During the 20 minute wait, it is important that you stop all REST API calls to
-        the DBFS API. If you are modifying _only the managed services key configuration_, you can omit the 20
-        minute wait.
-        
-        **Important**: Customer-managed keys and customer-managed VPCs are supported by only some deployment
-        types and subscription types. If you have questions about availability, contact your Databricks
-        representative.
-        
-        This operation is available only if your account is on the E2 version of the platform or on a select
-        custom plan that allows multiple workspaces per account.
-        
-        [Account Console]: https://docs.databricks.com/administration-guide/account-settings-e2/account-console-e2.html
-        [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
-        
-        :param workspace_id: int
-          Workspace ID.
-        :param aws_region: str (optional)
-          The AWS region of the workspace's data plane (for example, `us-west-2`). This parameter is available
-          only for updating failed workspaces.
-        :param credentials_id: str (optional)
-          ID of the workspace's credential configuration object. This parameter is available for updating both
-          failed and running workspaces.
-        :param custom_tags: Dict[str,str] (optional)
-          The custom tags key-value pairing that is attached to this workspace. The key-value pair is a string
-          of utf-8 characters. The value can be an empty string, with maximum length of 255 characters. The
-          key can be of maximum length of 127 characters, and cannot be empty.
-        :param managed_services_customer_managed_key_id: str (optional)
-          The ID of the workspace's managed services encryption key configuration object. This parameter is
-          available only for updating failed workspaces.
-        :param network_connectivity_config_id: str (optional)
-        :param network_id: str (optional)
-          The ID of the workspace's network configuration object. Used only if you already use a
-          customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a
-          customer-managed VPC by updating the workspace to add a network configuration ID.
-        :param private_access_settings_id: str (optional)
-          The ID of the workspace's private access settings configuration object. This parameter is available
-          only for updating failed workspaces.
-        :param storage_configuration_id: str (optional)
-          The ID of the workspace's storage configuration object. This parameter is available only for
-          updating failed workspaces.
-        :param storage_customer_managed_key_id: str (optional)
-          The ID of the key configuration object for workspace storage. This parameter is available for
-          updating both failed and running workspaces.
-        
-        :returns:
-          Long-running operation waiter for :class:`Workspace`.
-          See :method:wait_get_workspace_running for more details.
-        
+
+Updates a workspace configuration for either a running workspace or a failed workspace. The elements
+that can be updated varies between these two use cases.
+
+### Update a failed workspace You can update a Databricks workspace configuration for failed workspace
+deployment for some fields, but not all fields. For a failed workspace, this request supports updates
+to the following fields only: - Credential configuration ID - Storage configuration ID - Network
+configuration ID. Used only to add or change a network configuration for a customer-managed VPC. For a
+failed workspace only, you can convert a workspace with Databricks-managed VPC to use a
+customer-managed VPC by adding this ID. You cannot downgrade a workspace with a customer-managed VPC
+to be a Databricks-managed VPC. You can update the network configuration for a failed or running
+workspace to add PrivateLink support, though you must also add a private access settings object. - Key
+configuration ID for managed services (control plane storage, such as notebook source and Databricks
+SQL queries). Used only if you use customer-managed keys for managed services. - Key configuration ID
+for workspace storage (root S3 bucket and, optionally, EBS volumes). Used only if you use
+customer-managed keys for workspace storage. **Important**: If the workspace was ever in the running
+state, even if briefly before becoming a failed workspace, you cannot add a new key configuration ID
+for workspace storage. - Private access settings ID to add PrivateLink support. You can add or update
+the private access settings ID to upgrade a workspace to add support for front-end, back-end, or both
+types of connectivity. You cannot remove (downgrade) any existing front-end or back-end PrivateLink
+support on a workspace. - Custom tags. Given you provide an empty custom tags, the update would not be
+applied. - Network connectivity configuration ID to add serverless stable IP support. You can add or
+update the network connectivity configuration ID to ensure the workspace uses the same set of stable
+IP CIDR blocks to access your resources. You cannot remove a network connectivity configuration from
+the workspace once attached, you can only switch to another one.
+
+After calling the `PATCH` operation to update the workspace configuration, make repeated `GET`
+requests with the workspace ID and check the workspace status. The workspace is successful if the
+status changes to `RUNNING`.
+
+For information about how to create a new workspace with this API **including error handling**, see
+[Create a new workspace using the Account API].
+
+### Update a running workspace You can update a Databricks workspace configuration for running
+workspaces for some fields, but not all fields. For a running workspace, this request supports
+updating the following fields only: - Credential configuration ID - Network configuration ID. Used
+only if you already use a customer-managed VPC. You cannot convert a running workspace from a
+Databricks-managed VPC to a customer-managed VPC. You can use a network configuration update in this
+API for a failed or running workspace to add support for PrivateLink, although you also need to add a
+private access settings object. - Key configuration ID for managed services (control plane storage,
+such as notebook source and Databricks SQL queries). Databricks does not directly encrypt the data
+with the customer-managed key (CMK). Databricks uses both the CMK and the Databricks managed key (DMK)
+that is unique to your workspace to encrypt the Data Encryption Key (DEK). Databricks uses the DEK to
+encrypt your workspace's managed services persisted data. If the workspace does not already have a CMK
+for managed services, adding this ID enables managed services encryption for new or updated data.
+Existing managed services data that existed before adding the key remains not encrypted with the DEK
+until it is modified. If the workspace already has customer-managed keys for managed services, this
+request rotates (changes) the CMK keys and the DEK is re-encrypted with the DMK and the new CMK. - Key
+configuration ID for workspace storage (root S3 bucket and, optionally, EBS volumes). You can set this
+only if the workspace does not already have a customer-managed key configuration for workspace
+storage. - Private access settings ID to add PrivateLink support. You can add or update the private
+access settings ID to upgrade a workspace to add support for front-end, back-end, or both types of
+connectivity. You cannot remove (downgrade) any existing front-end or back-end PrivateLink support on
+a workspace. - Custom tags. Given you provide an empty custom tags, the update would not be applied. -
+Network connectivity configuration ID to add serverless stable IP support. You can add or update the
+network connectivity configuration ID to ensure the workspace uses the same set of stable IP CIDR
+blocks to access your resources. You cannot remove a network connectivity configuration from the
+workspace once attached, you can only switch to another one.
+
+**Important**: To update a running workspace, your workspace must have no running compute resources
+that run in your workspace's VPC in the Classic data plane. For example, stop all all-purpose
+clusters, job clusters, pools with running clusters, and Classic SQL warehouses. If you do not
+terminate all cluster instances in the workspace before calling this API, the request will fail.
+
+### Wait until changes take effect. After calling the `PATCH` operation to update the workspace
+configuration, make repeated `GET` requests with the workspace ID and check the workspace status and
+the status of the fields. * For workspaces with a Databricks-managed VPC, the workspace status becomes
+`PROVISIONING` temporarily (typically under 20 minutes). If the workspace update is successful, the
+workspace status changes to `RUNNING`. Note that you can also check the workspace status in the
+[Account Console]. However, you cannot use or create clusters for another 20 minutes after that status
+change. This results in a total of up to 40 minutes in which you cannot create clusters. If you create
+or use clusters before this time interval elapses, clusters do not launch successfully, fail, or could
+cause other unexpected behavior. * For workspaces with a customer-managed VPC, the workspace status
+stays at status `RUNNING` and the VPC change happens immediately. A change to the storage
+customer-managed key configuration ID might take a few minutes to update, so continue to check the
+workspace until you observe that it has been updated. If the update fails, the workspace might revert
+silently to its original configuration. After the workspace has been updated, you cannot use or create
+clusters for another 20 minutes. If you create or use clusters before this time interval elapses,
+clusters do not launch successfully, fail, or could cause other unexpected behavior.
+
+If you update the _storage_ customer-managed key configurations, it takes 20 minutes for the changes
+to fully take effect. During the 20 minute wait, it is important that you stop all REST API calls to
+the DBFS API. If you are modifying _only the managed services key configuration_, you can omit the 20
+minute wait.
+
+**Important**: Customer-managed keys and customer-managed VPCs are supported by only some deployment
+types and subscription types. If you have questions about availability, contact your Databricks
+representative.
+
+This operation is available only if your account is on the E2 version of the platform or on a select
+custom plan that allows multiple workspaces per account.
+
+[Account Console]: https://docs.databricks.com/administration-guide/account-settings-e2/account-console-e2.html
+[Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
+
+:param workspace_id: int
+  Workspace ID.
+:param aws_region: str (optional)
+  The AWS region of the workspace's data plane (for example, `us-west-2`). This parameter is available
+  only for updating failed workspaces.
+:param credentials_id: str (optional)
+  ID of the workspace's credential configuration object. This parameter is available for updating both
+  failed and running workspaces.
+:param custom_tags: Dict[str,str] (optional)
+  The custom tags key-value pairing that is attached to this workspace. The key-value pair is a string
+  of utf-8 characters. The value can be an empty string, with maximum length of 255 characters. The
+  key can be of maximum length of 127 characters, and cannot be empty.
+:param managed_services_customer_managed_key_id: str (optional)
+  The ID of the workspace's managed services encryption key configuration object. This parameter is
+  available only for updating failed workspaces.
+:param network_connectivity_config_id: str (optional)
+:param network_id: str (optional)
+  The ID of the workspace's network configuration object. Used only if you already use a
+  customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a
+  customer-managed VPC by updating the workspace to add a network configuration ID.
+:param private_access_settings_id: str (optional)
+  The ID of the workspace's private access settings configuration object. This parameter is available
+  only for updating failed workspaces.
+:param storage_configuration_id: str (optional)
+  The ID of the workspace's storage configuration object. This parameter is available only for
+  updating failed workspaces.
+:param storage_customer_managed_key_id: str (optional)
+  The ID of the key configuration object for workspace storage. This parameter is available for
+  updating both failed and running workspaces.
+
+:returns:
+  Long-running operation waiter for :class:`Workspace`.
+  See :method:wait_get_workspace_running for more details.
+
 
     .. py:method:: update_and_wait(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace
 
diff --git a/docs/account/settings/csp_enablement_account.rst b/docs/account/settings/csp_enablement_account.rst
index 885aae89f..5c8b0bc5b 100644
--- a/docs/account/settings/csp_enablement_account.rst
+++ b/docs/account/settings/csp_enablement_account.rst
@@ -5,47 +5,46 @@
 .. py:class:: CspEnablementAccountAPI
 
     The compliance security profile settings at the account level control whether to enable it for new
-    workspaces. By default, this account-level setting is disabled for new workspaces. After workspace
-    creation, account admins can enable the compliance security profile individually for each workspace.
-    
-    This settings can be disabled so that new workspaces do not have compliance security profile enabled by
-    default.
+workspaces. By default, this account-level setting is disabled for new workspaces. After workspace
+creation, account admins can enable the compliance security profile individually for each workspace.
+
+This settings can be disabled so that new workspaces do not have compliance security profile enabled by
+default.
 
     .. py:method:: get( [, etag: Optional[str]]) -> CspEnablementAccountSetting
 
         Get the compliance security profile setting for new workspaces.
-        
-        Gets the compliance security profile setting for new workspaces.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`CspEnablementAccountSetting`
-        
+
+Gets the compliance security profile setting for new workspaces.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`CspEnablementAccountSetting`
+
 
     .. py:method:: update(allow_missing: bool, setting: CspEnablementAccountSetting, field_mask: str) -> CspEnablementAccountSetting
 
         Update the compliance security profile setting for new workspaces.
-        
-        Updates the value of the compliance security profile setting for new workspaces.
-        
-        :param allow_missing: bool
-          This should always be set to true for Settings API. Added for AIP compliance.
-        :param setting: :class:`CspEnablementAccountSetting`
-        :param field_mask: str
-          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-          the entire collection field can be specified. Field names must exactly match the resource field
-          names.
-          
-          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-          changes in the future.
-        
-        :returns: :class:`CspEnablementAccountSetting`
-        
\ No newline at end of file
+
+Updates the value of the compliance security profile setting for new workspaces.
+
+:param allow_missing: bool
+  This should always be set to true for Settings API. Added for AIP compliance.
+:param setting: :class:`CspEnablementAccountSetting`
+:param field_mask: str
+  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+  the entire collection field can be specified. Field names must exactly match the resource field
+  names.
+  
+  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+  changes in the future.
+
+:returns: :class:`CspEnablementAccountSetting`
diff --git a/docs/account/settings/disable_legacy_features.rst b/docs/account/settings/disable_legacy_features.rst
index b10d7e2dc..5d6590a0f 100644
--- a/docs/account/settings/disable_legacy_features.rst
+++ b/docs/account/settings/disable_legacy_features.rst
@@ -5,62 +5,61 @@
 .. py:class:: DisableLegacyFeaturesAPI
 
     Disable legacy features for new Databricks workspaces.
-    
-    For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be
-    provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions
-    prior to 13.3LTS.
+
+For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be
+provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions
+prior to 13.3LTS.
 
     .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyFeaturesResponse
 
         Delete the disable legacy features setting.
-        
-        Deletes the disable legacy features setting.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`DeleteDisableLegacyFeaturesResponse`
-        
+
+Deletes the disable legacy features setting.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`DeleteDisableLegacyFeaturesResponse`
+
 
     .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyFeatures
 
         Get the disable legacy features setting.
-        
-        Gets the value of the disable legacy features setting.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`DisableLegacyFeatures`
-        
+
+Gets the value of the disable legacy features setting.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`DisableLegacyFeatures`
+
 
     .. py:method:: update(allow_missing: bool, setting: DisableLegacyFeatures, field_mask: str) -> DisableLegacyFeatures
 
         Update the disable legacy features setting.
-        
-        Updates the value of the disable legacy features setting.
-        
-        :param allow_missing: bool
-          This should always be set to true for Settings API. Added for AIP compliance.
-        :param setting: :class:`DisableLegacyFeatures`
-        :param field_mask: str
-          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-          the entire collection field can be specified. Field names must exactly match the resource field
-          names.
-          
-          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-          changes in the future.
-        
-        :returns: :class:`DisableLegacyFeatures`
-        
\ No newline at end of file
+
+Updates the value of the disable legacy features setting.
+
+:param allow_missing: bool
+  This should always be set to true for Settings API. Added for AIP compliance.
+:param setting: :class:`DisableLegacyFeatures`
+:param field_mask: str
+  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+  the entire collection field can be specified. Field names must exactly match the resource field
+  names.
+  
+  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+  changes in the future.
+
+:returns: :class:`DisableLegacyFeatures`
diff --git a/docs/account/settings/enable_ip_access_lists.rst b/docs/account/settings/enable_ip_access_lists.rst
index 9485b7332..30d066165 100644
--- a/docs/account/settings/enable_ip_access_lists.rst
+++ b/docs/account/settings/enable_ip_access_lists.rst
@@ -5,59 +5,58 @@
 .. py:class:: EnableIpAccessListsAPI
 
     Controls the enforcement of IP access lists for accessing the account console. Allowing you to enable or
-    disable restricted access based on IP addresses.
+disable restricted access based on IP addresses.
 
     .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAccountIpAccessEnableResponse
 
         Delete the account IP access toggle setting.
-        
-        Reverts the value of the account IP access toggle setting to default (ON)
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`DeleteAccountIpAccessEnableResponse`
-        
+
+Reverts the value of the account IP access toggle setting to default (ON)
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`DeleteAccountIpAccessEnableResponse`
+
 
     .. py:method:: get( [, etag: Optional[str]]) -> AccountIpAccessEnable
 
         Get the account IP access toggle setting.
-        
-        Gets the value of the account IP access toggle setting.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`AccountIpAccessEnable`
-        
+
+Gets the value of the account IP access toggle setting.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`AccountIpAccessEnable`
+
 
     .. py:method:: update(allow_missing: bool, setting: AccountIpAccessEnable, field_mask: str) -> AccountIpAccessEnable
 
         Update the account IP access toggle setting.
-        
-        Updates the value of the account IP access toggle setting.
-        
-        :param allow_missing: bool
-          This should always be set to true for Settings API. Added for AIP compliance.
-        :param setting: :class:`AccountIpAccessEnable`
-        :param field_mask: str
-          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-          the entire collection field can be specified. Field names must exactly match the resource field
-          names.
-          
-          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-          changes in the future.
-        
-        :returns: :class:`AccountIpAccessEnable`
-        
\ No newline at end of file
+
+Updates the value of the account IP access toggle setting.
+
+:param allow_missing: bool
+  This should always be set to true for Settings API. Added for AIP compliance.
+:param setting: :class:`AccountIpAccessEnable`
+:param field_mask: str
+  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+  the entire collection field can be specified. Field names must exactly match the resource field
+  names.
+  
+  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+  changes in the future.
+
+:returns: :class:`AccountIpAccessEnable`
diff --git a/docs/account/settings/esm_enablement_account.rst b/docs/account/settings/esm_enablement_account.rst
index e9359d907..14e2a514f 100644
--- a/docs/account/settings/esm_enablement_account.rst
+++ b/docs/account/settings/esm_enablement_account.rst
@@ -5,44 +5,43 @@
 .. py:class:: EsmEnablementAccountAPI
 
     The enhanced security monitoring setting at the account level controls whether to enable the feature on
-    new workspaces. By default, this account-level setting is disabled for new workspaces. After workspace
-    creation, account admins can enable enhanced security monitoring individually for each workspace.
+new workspaces. By default, this account-level setting is disabled for new workspaces. After workspace
+creation, account admins can enable enhanced security monitoring individually for each workspace.
 
     .. py:method:: get( [, etag: Optional[str]]) -> EsmEnablementAccountSetting
 
         Get the enhanced security monitoring setting for new workspaces.
-        
-        Gets the enhanced security monitoring setting for new workspaces.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`EsmEnablementAccountSetting`
-        
+
+Gets the enhanced security monitoring setting for new workspaces.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`EsmEnablementAccountSetting`
+
 
     .. py:method:: update(allow_missing: bool, setting: EsmEnablementAccountSetting, field_mask: str) -> EsmEnablementAccountSetting
 
         Update the enhanced security monitoring setting for new workspaces.
-        
-        Updates the value of the enhanced security monitoring setting for new workspaces.
-        
-        :param allow_missing: bool
-          This should always be set to true for Settings API. Added for AIP compliance.
-        :param setting: :class:`EsmEnablementAccountSetting`
-        :param field_mask: str
-          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-          the entire collection field can be specified. Field names must exactly match the resource field
-          names.
-          
-          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-          changes in the future.
-        
-        :returns: :class:`EsmEnablementAccountSetting`
-        
\ No newline at end of file
+
+Updates the value of the enhanced security monitoring setting for new workspaces.
+
+:param allow_missing: bool
+  This should always be set to true for Settings API. Added for AIP compliance.
+:param setting: :class:`EsmEnablementAccountSetting`
+:param field_mask: str
+  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+  the entire collection field can be specified. Field names must exactly match the resource field
+  names.
+  
+  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+  changes in the future.
+
+:returns: :class:`EsmEnablementAccountSetting`
diff --git a/docs/account/settings/ip_access_lists.rst b/docs/account/settings/ip_access_lists.rst
index 7718d0c54..b3b2a0aa4 100644
--- a/docs/account/settings/ip_access_lists.rst
+++ b/docs/account/settings/ip_access_lists.rst
@@ -5,147 +5,146 @@
 .. py:class:: AccountIpAccessListsAPI
 
     The Accounts IP Access List API enables account admins to configure IP access lists for access to the
-    account console.
-    
-    Account IP Access Lists affect web application access and REST API access to the account console and
-    account APIs. If the feature is disabled for the account, all access is allowed for this account. There is
-    support for allow lists (inclusion) and block lists (exclusion).
-    
-    When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address
-    matches any block list, the connection is rejected. 2. **If the connection was not rejected by block
-    lists**, the IP address is compared with the allow lists.
-    
-    If there is at least one allow list for the account, the connection is allowed only if the IP address
-    matches an allow list. If there are no allow lists for the account, all IP addresses are allowed.
-    
-    For all allow lists and block lists combined, the account supports a maximum of 1000 IP/CIDR values, where
-    one CIDR counts as a single value.
-    
-    After changes to the account-level IP access lists, it can take a few minutes for changes to take effect.
+account console.
+
+Account IP Access Lists affect web application access and REST API access to the account console and
+account APIs. If the feature is disabled for the account, all access is allowed for this account. There is
+support for allow lists (inclusion) and block lists (exclusion).
+
+When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address
+matches any block list, the connection is rejected. 2. **If the connection was not rejected by block
+lists**, the IP address is compared with the allow lists.
+
+If there is at least one allow list for the account, the connection is allowed only if the IP address
+matches an allow list. If there are no allow lists for the account, all IP addresses are allowed.
+
+For all allow lists and block lists combined, the account supports a maximum of 1000 IP/CIDR values, where
+one CIDR counts as a single value.
+
+After changes to the account-level IP access lists, it can take a few minutes for changes to take effect.
 
     .. py:method:: create(label: str, list_type: ListType [, ip_addresses: Optional[List[str]]]) -> CreateIpAccessListResponse
 
         Create access list.
-        
-        Creates an IP access list for the account.
-        
-        A list can be an allow list or a block list. See the top of this file for a description of how the
-        server treats allow lists and block lists at runtime.
-        
-        When creating or updating an IP access list:
-        
-        * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
-        where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
-        `error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP,
-        error 400 is returned with `error_code` value `INVALID_STATE`.
-        
-        It can take a few minutes for the changes to take effect.
-        
-        :param label: str
-          Label for the IP access list. This **cannot** be empty.
-        :param list_type: :class:`ListType`
-          Type of IP access list. Valid values are as follows and are case-sensitive:
-          
-          * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
-          range. IP addresses in the block list are excluded even if they are included in an allow list.
-        :param ip_addresses: List[str] (optional)
-        
-        :returns: :class:`CreateIpAccessListResponse`
-        
+
+Creates an IP access list for the account.
+
+A list can be an allow list or a block list. See the top of this file for a description of how the
+server treats allow lists and block lists at runtime.
+
+When creating or updating an IP access list:
+
+* For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
+where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
+`error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP,
+error 400 is returned with `error_code` value `INVALID_STATE`.
+
+It can take a few minutes for the changes to take effect.
+
+:param label: str
+  Label for the IP access list. This **cannot** be empty.
+:param list_type: :class:`ListType`
+  Type of IP access list. Valid values are as follows and are case-sensitive:
+  
+  * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
+  range. IP addresses in the block list are excluded even if they are included in an allow list.
+:param ip_addresses: List[str] (optional)
+
+:returns: :class:`CreateIpAccessListResponse`
+
 
     .. py:method:: delete(ip_access_list_id: str)
 
         Delete access list.
-        
-        Deletes an IP access list, specified by its list ID.
-        
-        :param ip_access_list_id: str
-          The ID for the corresponding IP access list
-        
-        
-        
+
+Deletes an IP access list, specified by its list ID.
+
+:param ip_access_list_id: str
+  The ID for the corresponding IP access list
+
+
+
 
     .. py:method:: get(ip_access_list_id: str) -> GetIpAccessListResponse
 
         Get IP access list.
-        
-        Gets an IP access list, specified by its list ID.
-        
-        :param ip_access_list_id: str
-          The ID for the corresponding IP access list
-        
-        :returns: :class:`GetIpAccessListResponse`
-        
+
+Gets an IP access list, specified by its list ID.
+
+:param ip_access_list_id: str
+  The ID for the corresponding IP access list
+
+:returns: :class:`GetIpAccessListResponse`
+
 
     .. py:method:: list() -> Iterator[IpAccessListInfo]
 
         Get access lists.
-        
-        Gets all IP access lists for the specified account.
-        
-        :returns: Iterator over :class:`IpAccessListInfo`
-        
+
+Gets all IP access lists for the specified account.
+
+:returns: Iterator over :class:`IpAccessListInfo`
+
 
     .. py:method:: replace(ip_access_list_id: str, label: str, list_type: ListType, enabled: bool [, ip_addresses: Optional[List[str]]])
 
         Replace access list.
-        
-        Replaces an IP access list, specified by its ID.
-        
-        A list can include allow lists and block lists. See the top of this file for a description of how the
-        server treats allow lists and block lists at run time. When replacing an IP access list: * For all
-        allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one
-        CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value
-        `QUOTA_EXCEEDED`. * If the resulting list would block the calling user's current IP, error 400 is
-        returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take
-        effect.
-        
-        :param ip_access_list_id: str
-          The ID for the corresponding IP access list
-        :param label: str
-          Label for the IP access list. This **cannot** be empty.
-        :param list_type: :class:`ListType`
-          Type of IP access list. Valid values are as follows and are case-sensitive:
-          
-          * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
-          range. IP addresses in the block list are excluded even if they are included in an allow list.
-        :param enabled: bool
-          Specifies whether this IP access list is enabled.
-        :param ip_addresses: List[str] (optional)
-        
-        
-        
+
+Replaces an IP access list, specified by its ID.
+
+A list can include allow lists and block lists. See the top of this file for a description of how the
+server treats allow lists and block lists at run time. When replacing an IP access list: * For all
+allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one
+CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value
+`QUOTA_EXCEEDED`. * If the resulting list would block the calling user's current IP, error 400 is
+returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take
+effect.
+
+:param ip_access_list_id: str
+  The ID for the corresponding IP access list
+:param label: str
+  Label for the IP access list. This **cannot** be empty.
+:param list_type: :class:`ListType`
+  Type of IP access list. Valid values are as follows and are case-sensitive:
+  
+  * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
+  range. IP addresses in the block list are excluded even if they are included in an allow list.
+:param enabled: bool
+  Specifies whether this IP access list is enabled.
+:param ip_addresses: List[str] (optional)
+
+
+
 
     .. py:method:: update(ip_access_list_id: str [, enabled: Optional[bool], ip_addresses: Optional[List[str]], label: Optional[str], list_type: Optional[ListType]])
 
         Update access list.
-        
-        Updates an existing IP access list, specified by its ID.
-        
-        A list can include allow lists and block lists. See the top of this file for a description of how the
-        server treats allow lists and block lists at run time.
-        
-        When updating an IP access list:
-        
-        * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
-        where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
-        `error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP,
-        error 400 is returned with `error_code` value `INVALID_STATE`.
-        
-        It can take a few minutes for the changes to take effect.
-        
-        :param ip_access_list_id: str
-          The ID for the corresponding IP access list
-        :param enabled: bool (optional)
-          Specifies whether this IP access list is enabled.
-        :param ip_addresses: List[str] (optional)
-        :param label: str (optional)
-          Label for the IP access list. This **cannot** be empty.
-        :param list_type: :class:`ListType` (optional)
-          Type of IP access list. Valid values are as follows and are case-sensitive:
-          
-          * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
-          range. IP addresses in the block list are excluded even if they are included in an allow list.
-        
-        
-        
\ No newline at end of file
+
+Updates an existing IP access list, specified by its ID.
+
+A list can include allow lists and block lists. See the top of this file for a description of how the
+server treats allow lists and block lists at run time.
+
+When updating an IP access list:
+
+* For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
+where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
+`error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP,
+error 400 is returned with `error_code` value `INVALID_STATE`.
+
+It can take a few minutes for the changes to take effect.
+
+:param ip_access_list_id: str
+  The ID for the corresponding IP access list
+:param enabled: bool (optional)
+  Specifies whether this IP access list is enabled.
+:param ip_addresses: List[str] (optional)
+:param label: str (optional)
+  Label for the IP access list. This **cannot** be empty.
+:param list_type: :class:`ListType` (optional)
+  Type of IP access list. Valid values are as follows and are case-sensitive:
+  
+  * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
+  range. IP addresses in the block list are excluded even if they are included in an allow list.
+
+
diff --git a/docs/account/settings/network_connectivity.rst b/docs/account/settings/network_connectivity.rst
index 30b50abcb..d073fc1da 100644
--- a/docs/account/settings/network_connectivity.rst
+++ b/docs/account/settings/network_connectivity.rst
@@ -5,125 +5,124 @@
 .. py:class:: NetworkConnectivityAPI
 
     These APIs provide configurations for the network connectivity of your workspaces for serverless compute
-    resources.
+resources.
 
     .. py:method:: create_network_connectivity_configuration(name: str, region: str) -> NetworkConnectivityConfiguration
 
         Create a network connectivity configuration.
-        
-        :param name: str
-          The name of the network connectivity configuration. The name can contain alphanumeric characters,
-          hyphens, and underscores. The length must be between 3 and 30 characters. The name must match the
-          regular expression `^[0-9a-zA-Z-_]{3,30}$`.
-        :param region: str
-          The region for the network connectivity configuration. Only workspaces in the same region can be
-          attached to the network connectivity configuration.
-        
-        :returns: :class:`NetworkConnectivityConfiguration`
-        
+
+:param name: str
+  The name of the network connectivity configuration. The name can contain alphanumeric characters,
+  hyphens, and underscores. The length must be between 3 and 30 characters. The name must match the
+  regular expression `^[0-9a-zA-Z-_]{3,30}$`.
+:param region: str
+  The region for the network connectivity configuration. Only workspaces in the same region can be
+  attached to the network connectivity configuration.
+
+:returns: :class:`NetworkConnectivityConfiguration`
+
 
     .. py:method:: create_private_endpoint_rule(network_connectivity_config_id: str, resource_id: str, group_id: CreatePrivateEndpointRuleRequestGroupId) -> NccAzurePrivateEndpointRule
 
         Create a private endpoint rule.
-        
-        Create a private endpoint rule for the specified network connectivity config object. Once the object
-        is created, Databricks asynchronously provisions a new Azure private endpoint to your specified Azure
-        resource.
-        
-        **IMPORTANT**: You must use Azure portal or other Azure tools to approve the private endpoint to
-        complete the connection. To get the information of the private endpoint created, make a `GET` request
-        on the new private endpoint rule. See [serverless private link].
-        
-        [serverless private link]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security/serverless-private-link
-        
-        :param network_connectivity_config_id: str
-          Your Network Connectvity Configuration ID.
-        :param resource_id: str
-          The Azure resource ID of the target resource.
-        :param group_id: :class:`CreatePrivateEndpointRuleRequestGroupId`
-          The sub-resource type (group ID) of the target resource. Note that to connect to workspace root
-          storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`.
-        
-        :returns: :class:`NccAzurePrivateEndpointRule`
-        
+
+Create a private endpoint rule for the specified network connectivity config object. Once the object
+is created, Databricks asynchronously provisions a new Azure private endpoint to your specified Azure
+resource.
+
+**IMPORTANT**: You must use Azure portal or other Azure tools to approve the private endpoint to
+complete the connection. To get the information of the private endpoint created, make a `GET` request
+on the new private endpoint rule. See [serverless private link].
+
+[serverless private link]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security/serverless-private-link
+
+:param network_connectivity_config_id: str
+  Your Network Connectvity Configuration ID.
+:param resource_id: str
+  The Azure resource ID of the target resource.
+:param group_id: :class:`CreatePrivateEndpointRuleRequestGroupId`
+  The sub-resource type (group ID) of the target resource. Note that to connect to workspace root
+  storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`.
+
+:returns: :class:`NccAzurePrivateEndpointRule`
+
 
     .. py:method:: delete_network_connectivity_configuration(network_connectivity_config_id: str)
 
         Delete a network connectivity configuration.
-        
-        Deletes a network connectivity configuration.
-        
-        :param network_connectivity_config_id: str
-          Your Network Connectvity Configuration ID.
-        
-        
-        
+
+Deletes a network connectivity configuration.
+
+:param network_connectivity_config_id: str
+  Your Network Connectvity Configuration ID.
+
+
+
 
     .. py:method:: delete_private_endpoint_rule(network_connectivity_config_id: str, private_endpoint_rule_id: str) -> NccAzurePrivateEndpointRule
 
         Delete a private endpoint rule.
-        
-        Initiates deleting a private endpoint rule. If the connection state is PENDING or EXPIRED, the private
-        endpoint is immediately deleted. Otherwise, the private endpoint is deactivated and will be deleted
-        after seven days of deactivation. When a private endpoint is deactivated, the `deactivated` field is
-        set to `true` and the private endpoint is not available to your serverless compute resources.
-        
-        :param network_connectivity_config_id: str
-          Your Network Connectvity Configuration ID.
-        :param private_endpoint_rule_id: str
-          Your private endpoint rule ID.
-        
-        :returns: :class:`NccAzurePrivateEndpointRule`
-        
+
+Initiates deleting a private endpoint rule. If the connection state is PENDING or EXPIRED, the private
+endpoint is immediately deleted. Otherwise, the private endpoint is deactivated and will be deleted
+after seven days of deactivation. When a private endpoint is deactivated, the `deactivated` field is
+set to `true` and the private endpoint is not available to your serverless compute resources.
+
+:param network_connectivity_config_id: str
+  Your Network Connectvity Configuration ID.
+:param private_endpoint_rule_id: str
+  Your private endpoint rule ID.
+
+:returns: :class:`NccAzurePrivateEndpointRule`
+
 
     .. py:method:: get_network_connectivity_configuration(network_connectivity_config_id: str) -> NetworkConnectivityConfiguration
 
         Get a network connectivity configuration.
-        
-        Gets a network connectivity configuration.
-        
-        :param network_connectivity_config_id: str
-          Your Network Connectvity Configuration ID.
-        
-        :returns: :class:`NetworkConnectivityConfiguration`
-        
+
+Gets a network connectivity configuration.
+
+:param network_connectivity_config_id: str
+  Your Network Connectvity Configuration ID.
+
+:returns: :class:`NetworkConnectivityConfiguration`
+
 
     .. py:method:: get_private_endpoint_rule(network_connectivity_config_id: str, private_endpoint_rule_id: str) -> NccAzurePrivateEndpointRule
 
         Get a private endpoint rule.
-        
-        Gets the private endpoint rule.
-        
-        :param network_connectivity_config_id: str
-          Your Network Connectvity Configuration ID.
-        :param private_endpoint_rule_id: str
-          Your private endpoint rule ID.
-        
-        :returns: :class:`NccAzurePrivateEndpointRule`
-        
+
+Gets the private endpoint rule.
+
+:param network_connectivity_config_id: str
+  Your Network Connectvity Configuration ID.
+:param private_endpoint_rule_id: str
+  Your private endpoint rule ID.
+
+:returns: :class:`NccAzurePrivateEndpointRule`
+
 
     .. py:method:: list_network_connectivity_configurations( [, page_token: Optional[str]]) -> Iterator[NetworkConnectivityConfiguration]
 
         List network connectivity configurations.
-        
-        Gets an array of network connectivity configurations.
-        
-        :param page_token: str (optional)
-          Pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`NetworkConnectivityConfiguration`
-        
+
+Gets an array of network connectivity configurations.
+
+:param page_token: str (optional)
+  Pagination token to go to next page based on previous query.
+
+:returns: Iterator over :class:`NetworkConnectivityConfiguration`
+
 
     .. py:method:: list_private_endpoint_rules(network_connectivity_config_id: str [, page_token: Optional[str]]) -> Iterator[NccAzurePrivateEndpointRule]
 
         List private endpoint rules.
-        
-        Gets an array of private endpoint rules.
-        
-        :param network_connectivity_config_id: str
-          Your Network Connectvity Configuration ID.
-        :param page_token: str (optional)
-          Pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`NccAzurePrivateEndpointRule`
-        
\ No newline at end of file
+
+Gets an array of private endpoint rules.
+
+:param network_connectivity_config_id: str
+  Your Network Connectvity Configuration ID.
+:param page_token: str (optional)
+  Pagination token to go to next page based on previous query.
+
+:returns: Iterator over :class:`NccAzurePrivateEndpointRule`
diff --git a/docs/account/settings/personal_compute.rst b/docs/account/settings/personal_compute.rst
index 54e958a28..46eec4a5d 100644
--- a/docs/account/settings/personal_compute.rst
+++ b/docs/account/settings/personal_compute.rst
@@ -5,64 +5,63 @@
 .. py:class:: PersonalComputeAPI
 
     The Personal Compute enablement setting lets you control which users can use the Personal Compute default
-    policy to create compute resources. By default all users in all workspaces have access (ON), but you can
-    change the setting to instead let individual workspaces configure access control (DELEGATE).
-    
-    There is only one instance of this setting per account. Since this setting has a default value, this
-    setting is present on all accounts even though it's never set on a given account. Deletion reverts the
-    value of the setting back to the default value.
+policy to create compute resources. By default all users in all workspaces have access (ON), but you can
+change the setting to instead let individual workspaces configure access control (DELEGATE).
+
+There is only one instance of this setting per account. Since this setting has a default value, this
+setting is present on all accounts even though it's never set on a given account. Deletion reverts the
+value of the setting back to the default value.
 
     .. py:method:: delete( [, etag: Optional[str]]) -> DeletePersonalComputeSettingResponse
 
         Delete Personal Compute setting.
-        
-        Reverts back the Personal Compute setting value to default (ON)
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`DeletePersonalComputeSettingResponse`
-        
+
+Reverts back the Personal Compute setting value to default (ON)
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`DeletePersonalComputeSettingResponse`
+
 
     .. py:method:: get( [, etag: Optional[str]]) -> PersonalComputeSetting
 
         Get Personal Compute setting.
-        
-        Gets the value of the Personal Compute setting.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`PersonalComputeSetting`
-        
+
+Gets the value of the Personal Compute setting.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`PersonalComputeSetting`
+
 
     .. py:method:: update(allow_missing: bool, setting: PersonalComputeSetting, field_mask: str) -> PersonalComputeSetting
 
         Update Personal Compute setting.
-        
-        Updates the value of the Personal Compute setting.
-        
-        :param allow_missing: bool
-          This should always be set to true for Settings API. Added for AIP compliance.
-        :param setting: :class:`PersonalComputeSetting`
-        :param field_mask: str
-          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-          the entire collection field can be specified. Field names must exactly match the resource field
-          names.
-          
-          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-          changes in the future.
-        
-        :returns: :class:`PersonalComputeSetting`
-        
\ No newline at end of file
+
+Updates the value of the Personal Compute setting.
+
+:param allow_missing: bool
+  This should always be set to true for Settings API. Added for AIP compliance.
+:param setting: :class:`PersonalComputeSetting`
+:param field_mask: str
+  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+  the entire collection field can be specified. Field names must exactly match the resource field
+  names.
+  
+  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+  changes in the future.
+
+:returns: :class:`PersonalComputeSetting`
diff --git a/docs/account/settings/settings.rst b/docs/account/settings/settings.rst
index abf1c0e45..0a0b85b8b 100644
--- a/docs/account/settings/settings.rst
+++ b/docs/account/settings/settings.rst
@@ -10,41 +10,41 @@
         :type: CspEnablementAccountAPI
 
         The compliance security profile settings at the account level control whether to enable it for new
-        workspaces. By default, this account-level setting is disabled for new workspaces. After workspace
-        creation, account admins can enable the compliance security profile individually for each workspace.
-        
-        This settings can be disabled so that new workspaces do not have compliance security profile enabled by
-        default.
+    workspaces. By default, this account-level setting is disabled for new workspaces. After workspace
+    creation, account admins can enable the compliance security profile individually for each workspace.
+    
+    This settings can be disabled so that new workspaces do not have compliance security profile enabled by
+    default.
 
     .. py:property:: disable_legacy_features
         :type: DisableLegacyFeaturesAPI
 
         Disable legacy features for new Databricks workspaces.
-        
-        For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be
-        provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions
-        prior to 13.3LTS.
+    
+    For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be
+    provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions
+    prior to 13.3LTS.
 
     .. py:property:: enable_ip_access_lists
         :type: EnableIpAccessListsAPI
 
         Controls the enforcement of IP access lists for accessing the account console. Allowing you to enable or
-        disable restricted access based on IP addresses.
+    disable restricted access based on IP addresses.
 
     .. py:property:: esm_enablement_account
         :type: EsmEnablementAccountAPI
 
         The enhanced security monitoring setting at the account level controls whether to enable the feature on
-        new workspaces. By default, this account-level setting is disabled for new workspaces. After workspace
-        creation, account admins can enable enhanced security monitoring individually for each workspace.
+    new workspaces. By default, this account-level setting is disabled for new workspaces. After workspace
+    creation, account admins can enable enhanced security monitoring individually for each workspace.
 
     .. py:property:: personal_compute
         :type: PersonalComputeAPI
 
         The Personal Compute enablement setting lets you control which users can use the Personal Compute default
-        policy to create compute resources. By default all users in all workspaces have access (ON), but you can
-        change the setting to instead let individual workspaces configure access control (DELEGATE).
-        
-        There is only one instance of this setting per account. Since this setting has a default value, this
-        setting is present on all accounts even though it's never set on a given account. Deletion reverts the
-        value of the setting back to the default value.
\ No newline at end of file
+    policy to create compute resources. By default all users in all workspaces have access (ON), but you can
+    change the setting to instead let individual workspaces configure access control (DELEGATE).
+    
+    There is only one instance of this setting per account. Since this setting has a default value, this
+    setting is present on all accounts even though it's never set on a given account. Deletion reverts the
+    value of the setting back to the default value.
\ No newline at end of file
diff --git a/docs/dbdataclasses/billing.rst b/docs/dbdataclasses/billing.rst
index 2e788ec97..590fd693e 100644
--- a/docs/dbdataclasses/billing.rst
+++ b/docs/dbdataclasses/billing.rst
@@ -140,6 +140,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: LimitConfig
+   :members:
+   :undoc-members:
+
 .. autoclass:: ListBudgetConfigurationsResponse
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/cleanrooms.rst b/docs/dbdataclasses/cleanrooms.rst
index bdea23775..85ec98250 100644
--- a/docs/dbdataclasses/cleanrooms.rst
+++ b/docs/dbdataclasses/cleanrooms.rst
@@ -84,21 +84,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: CleanRoomNotebookReview
-   :members:
-   :undoc-members:
-
-.. py:class:: CleanRoomNotebookReviewNotebookReviewState
-
-   .. py:attribute:: APPROVED
-      :value: "APPROVED"
-
-   .. py:attribute:: PENDING
-      :value: "PENDING"
-
-   .. py:attribute:: REJECTED
-      :value: "REJECTED"
-
 .. autoclass:: CleanRoomNotebookTaskRun
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/dashboards.rst b/docs/dbdataclasses/dashboards.rst
index 42b0fb462..114bd1f5b 100644
--- a/docs/dbdataclasses/dashboards.rst
+++ b/docs/dbdataclasses/dashboards.rst
@@ -255,6 +255,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: SQL_EXECUTION_EXCEPTION
       :value: "SQL_EXECUTION_EXCEPTION"
 
+   .. py:attribute:: STOP_PROCESS_DUE_TO_AUTO_REGENERATE
+      :value: "STOP_PROCESS_DUE_TO_AUTO_REGENERATE"
+
    .. py:attribute:: TABLES_MISSING_EXCEPTION
       :value: "TABLES_MISSING_EXCEPTION"
 
diff --git a/docs/workspace/apps/apps.rst b/docs/workspace/apps/apps.rst
index af7229f34..9c0bba237 100644
--- a/docs/workspace/apps/apps.rst
+++ b/docs/workspace/apps/apps.rst
@@ -5,22 +5,22 @@
 .. py:class:: AppsAPI
 
     Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend
-    Databricks services, and enable users to interact through single sign-on.
+Databricks services, and enable users to interact through single sign-on.
 
     .. py:method:: create( [, app: Optional[App], no_compute: Optional[bool]]) -> Wait[App]
 
         Create an app.
-        
-        Creates a new app.
-        
-        :param app: :class:`App` (optional)
-        :param no_compute: bool (optional)
-          If true, the app will not be started after creation.
-        
-        :returns:
-          Long-running operation waiter for :class:`App`.
-          See :method:wait_get_app_active for more details.
-        
+
+Creates a new app.
+
+:param app: :class:`App` (optional)
+:param no_compute: bool (optional)
+  If true, the app will not be started after creation.
+
+:returns:
+  Long-running operation waiter for :class:`App`.
+  See :method:wait_get_app_active for more details.
+
 
     .. py:method:: create_and_wait( [, app: Optional[App], no_compute: Optional[bool], timeout: datetime.timedelta = 0:20:00]) -> App
 
@@ -28,29 +28,29 @@
     .. py:method:: delete(name: str) -> App
 
         Delete an app.
-        
-        Deletes an app.
-        
-        :param name: str
-          The name of the app.
-        
-        :returns: :class:`App`
-        
+
+Deletes an app.
+
+:param name: str
+  The name of the app.
+
+:returns: :class:`App`
+
 
     .. py:method:: deploy(app_name: str [, app_deployment: Optional[AppDeployment]]) -> Wait[AppDeployment]
 
         Create an app deployment.
-        
-        Creates an app deployment for the app with the supplied name.
-        
-        :param app_name: str
-          The name of the app.
-        :param app_deployment: :class:`AppDeployment` (optional)
-        
-        :returns:
-          Long-running operation waiter for :class:`AppDeployment`.
-          See :method:wait_get_deployment_app_succeeded for more details.
-        
+
+Creates an app deployment for the app with the supplied name.
+
+:param app_name: str
+  The name of the app.
+:param app_deployment: :class:`AppDeployment` (optional)
+
+:returns:
+  Long-running operation waiter for :class:`AppDeployment`.
+  See :method:wait_get_deployment_app_succeeded for more details.
+
 
     .. py:method:: deploy_and_wait(app_name: str [, app_deployment: Optional[AppDeployment], timeout: datetime.timedelta = 0:20:00]) -> AppDeployment
 
@@ -58,110 +58,110 @@
     .. py:method:: get(name: str) -> App
 
         Get an app.
-        
-        Retrieves information for the app with the supplied name.
-        
-        :param name: str
-          The name of the app.
-        
-        :returns: :class:`App`
-        
+
+Retrieves information for the app with the supplied name.
+
+:param name: str
+  The name of the app.
+
+:returns: :class:`App`
+
 
     .. py:method:: get_deployment(app_name: str, deployment_id: str) -> AppDeployment
 
         Get an app deployment.
-        
-        Retrieves information for the app deployment with the supplied name and deployment id.
-        
-        :param app_name: str
-          The name of the app.
-        :param deployment_id: str
-          The unique id of the deployment.
-        
-        :returns: :class:`AppDeployment`
-        
+
+Retrieves information for the app deployment with the supplied name and deployment id.
+
+:param app_name: str
+  The name of the app.
+:param deployment_id: str
+  The unique id of the deployment.
+
+:returns: :class:`AppDeployment`
+
 
     .. py:method:: get_permission_levels(app_name: str) -> GetAppPermissionLevelsResponse
 
         Get app permission levels.
-        
-        Gets the permission levels that a user can have on an object.
-        
-        :param app_name: str
-          The app for which to get or manage permissions.
-        
-        :returns: :class:`GetAppPermissionLevelsResponse`
-        
+
+Gets the permission levels that a user can have on an object.
+
+:param app_name: str
+  The app for which to get or manage permissions.
+
+:returns: :class:`GetAppPermissionLevelsResponse`
+
 
     .. py:method:: get_permissions(app_name: str) -> AppPermissions
 
         Get app permissions.
-        
-        Gets the permissions of an app. Apps can inherit permissions from their root object.
-        
-        :param app_name: str
-          The app for which to get or manage permissions.
-        
-        :returns: :class:`AppPermissions`
-        
+
+Gets the permissions of an app. Apps can inherit permissions from their root object.
+
+:param app_name: str
+  The app for which to get or manage permissions.
+
+:returns: :class:`AppPermissions`
+
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[App]
 
         List apps.
-        
-        Lists all apps in the workspace.
-        
-        :param page_size: int (optional)
-          Upper bound for items returned.
-        :param page_token: str (optional)
-          Pagination token to go to the next page of apps. Requests first page if absent.
-        
-        :returns: Iterator over :class:`App`
-        
+
+Lists all apps in the workspace.
+
+:param page_size: int (optional)
+  Upper bound for items returned.
+:param page_token: str (optional)
+  Pagination token to go to the next page of apps. Requests first page if absent.
+
+:returns: Iterator over :class:`App`
+
 
     .. py:method:: list_deployments(app_name: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[AppDeployment]
 
         List app deployments.
-        
-        Lists all app deployments for the app with the supplied name.
-        
-        :param app_name: str
-          The name of the app.
-        :param page_size: int (optional)
-          Upper bound for items returned.
-        :param page_token: str (optional)
-          Pagination token to go to the next page of apps. Requests first page if absent.
-        
-        :returns: Iterator over :class:`AppDeployment`
-        
+
+Lists all app deployments for the app with the supplied name.
+
+:param app_name: str
+  The name of the app.
+:param page_size: int (optional)
+  Upper bound for items returned.
+:param page_token: str (optional)
+  Pagination token to go to the next page of apps. Requests first page if absent.
+
+:returns: Iterator over :class:`AppDeployment`
+
 
     .. py:method:: set_permissions(app_name: str [, access_control_list: Optional[List[AppAccessControlRequest]]]) -> AppPermissions
 
         Set app permissions.
-        
-        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-        permissions if none are specified. Objects can inherit permissions from their root object.
-        
-        :param app_name: str
-          The app for which to get or manage permissions.
-        :param access_control_list: List[:class:`AppAccessControlRequest`] (optional)
-        
-        :returns: :class:`AppPermissions`
-        
+
+Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+permissions if none are specified. Objects can inherit permissions from their root object.
+
+:param app_name: str
+  The app for which to get or manage permissions.
+:param access_control_list: List[:class:`AppAccessControlRequest`] (optional)
+
+:returns: :class:`AppPermissions`
+
 
     .. py:method:: start(name: str) -> Wait[App]
 
         Start an app.
-        
-        Start the last active deployment of the app in the workspace.
-        
-        :param name: str
-          The name of the app.
-        
-        :returns:
-          Long-running operation waiter for :class:`App`.
-          See :method:wait_get_app_active for more details.
-        
+
+Start the last active deployment of the app in the workspace.
+
+:param name: str
+  The name of the app.
+
+:returns:
+  Long-running operation waiter for :class:`App`.
+  See :method:wait_get_app_active for more details.
+
 
     .. py:method:: start_and_wait(name: str, timeout: datetime.timedelta = 0:20:00) -> App
 
@@ -169,16 +169,16 @@
     .. py:method:: stop(name: str) -> Wait[App]
 
         Stop an app.
-        
-        Stops the active deployment of the app in the workspace.
-        
-        :param name: str
-          The name of the app.
-        
-        :returns:
-          Long-running operation waiter for :class:`App`.
-          See :method:wait_get_app_stopped for more details.
-        
+
+Stops the active deployment of the app in the workspace.
+
+:param name: str
+  The name of the app.
+
+:returns:
+  Long-running operation waiter for :class:`App`.
+  See :method:wait_get_app_stopped for more details.
+
 
     .. py:method:: stop_and_wait(name: str, timeout: datetime.timedelta = 0:20:00) -> App
 
@@ -186,29 +186,29 @@
     .. py:method:: update(name: str [, app: Optional[App]]) -> App
 
         Update an app.
-        
-        Updates the app with the supplied name.
-        
-        :param name: str
-          The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
-          must be unique within the workspace.
-        :param app: :class:`App` (optional)
-        
-        :returns: :class:`App`
-        
+
+Updates the app with the supplied name.
+
+:param name: str
+  The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
+  must be unique within the workspace.
+:param app: :class:`App` (optional)
+
+:returns: :class:`App`
+
 
     .. py:method:: update_permissions(app_name: str [, access_control_list: Optional[List[AppAccessControlRequest]]]) -> AppPermissions
 
         Update app permissions.
-        
-        Updates the permissions on an app. Apps can inherit permissions from their root object.
-        
-        :param app_name: str
-          The app for which to get or manage permissions.
-        :param access_control_list: List[:class:`AppAccessControlRequest`] (optional)
-        
-        :returns: :class:`AppPermissions`
-        
+
+Updates the permissions on an app. Apps can inherit permissions from their root object.
+
+:param app_name: str
+  The app for which to get or manage permissions.
+:param access_control_list: List[:class:`AppAccessControlRequest`] (optional)
+
+:returns: :class:`AppPermissions`
+
 
     .. py:method:: wait_get_app_active(name: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[App], None]]) -> App
 
diff --git a/docs/workspace/catalog/artifact_allowlists.rst b/docs/workspace/catalog/artifact_allowlists.rst
index 349bbbd0f..9f22ed335 100644
--- a/docs/workspace/catalog/artifact_allowlists.rst
+++ b/docs/workspace/catalog/artifact_allowlists.rst
@@ -5,33 +5,32 @@
 .. py:class:: ArtifactAllowlistsAPI
 
     In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the `allowlist` in UC so
-    that users can leverage these artifacts on compute configured with shared access mode.
+that users can leverage these artifacts on compute configured with shared access mode.
 
     .. py:method:: get(artifact_type: ArtifactType) -> ArtifactAllowlistInfo
 
         Get an artifact allowlist.
-        
-        Get the artifact allowlist of a certain artifact type. The caller must be a metastore admin or have
-        the **MANAGE ALLOWLIST** privilege on the metastore.
-        
-        :param artifact_type: :class:`ArtifactType`
-          The artifact type of the allowlist.
-        
-        :returns: :class:`ArtifactAllowlistInfo`
-        
+
+Get the artifact allowlist of a certain artifact type. The caller must be a metastore admin or have
+the **MANAGE ALLOWLIST** privilege on the metastore.
+
+:param artifact_type: :class:`ArtifactType`
+  The artifact type of the allowlist.
+
+:returns: :class:`ArtifactAllowlistInfo`
+
 
     .. py:method:: update(artifact_type: ArtifactType, artifact_matchers: List[ArtifactMatcher]) -> ArtifactAllowlistInfo
 
         Set an artifact allowlist.
-        
-        Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is replaced with
-        the new allowlist. The caller must be a metastore admin or have the **MANAGE ALLOWLIST** privilege on
-        the metastore.
-        
-        :param artifact_type: :class:`ArtifactType`
-          The artifact type of the allowlist.
-        :param artifact_matchers: List[:class:`ArtifactMatcher`]
-          A list of allowed artifact match patterns.
-        
-        :returns: :class:`ArtifactAllowlistInfo`
-        
\ No newline at end of file
+
+Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is replaced with
+the new allowlist. The caller must be a metastore admin or have the **MANAGE ALLOWLIST** privilege on
+the metastore.
+
+:param artifact_type: :class:`ArtifactType`
+  The artifact type of the allowlist.
+:param artifact_matchers: List[:class:`ArtifactMatcher`]
+  A list of allowed artifact match patterns.
+
+:returns: :class:`ArtifactAllowlistInfo`
diff --git a/docs/workspace/catalog/catalogs.rst b/docs/workspace/catalog/catalogs.rst
index 1d6b6dc2a..6cd07861f 100644
--- a/docs/workspace/catalog/catalogs.rst
+++ b/docs/workspace/catalog/catalogs.rst
@@ -5,11 +5,11 @@
 .. py:class:: CatalogsAPI
 
     A catalog is the first layer of Unity Catalog’s three-level namespace. It’s used to organize your data
-    assets. Users can see all catalogs on which they have been assigned the USE_CATALOG data permission.
-    
-    In Unity Catalog, admins and data stewards manage users and their access to data centrally across all of
-    the workspaces in a Databricks account. Users in different workspaces can share access to the same data,
-    depending on privileges granted centrally in Unity Catalog.
+assets. Users can see all catalogs on which they have been assigned the USE_CATALOG data permission.
+
+In Unity Catalog, admins and data stewards manage users and their access to data centrally across all of
+the workspaces in a Databricks account. Users in different workspaces can share access to the same data,
+depending on privileges granted centrally in Unity Catalog.
 
     .. py:method:: create(name: str [, comment: Optional[str], connection_name: Optional[str], options: Optional[Dict[str, str]], properties: Optional[Dict[str, str]], provider_name: Optional[str], share_name: Optional[str], storage_root: Optional[str]]) -> CatalogInfo
 
@@ -30,46 +30,46 @@
             w.catalogs.delete(name=created.name, force=True)
 
         Create a catalog.
-        
-        Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the
-        **CREATE_CATALOG** privilege.
-        
-        :param name: str
-          Name of catalog.
-        :param comment: str (optional)
-          User-provided free-form text description.
-        :param connection_name: str (optional)
-          The name of the connection to an external data source.
-        :param options: Dict[str,str] (optional)
-          A map of key-value properties attached to the securable.
-        :param properties: Dict[str,str] (optional)
-          A map of key-value properties attached to the securable.
-        :param provider_name: str (optional)
-          The name of delta sharing provider.
-          
-          A Delta Sharing catalog is a catalog that is based on a Delta share on a remote sharing server.
-        :param share_name: str (optional)
-          The name of the share under the share provider.
-        :param storage_root: str (optional)
-          Storage root URL for managed tables within catalog.
-        
-        :returns: :class:`CatalogInfo`
-        
+
+Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the
+**CREATE_CATALOG** privilege.
+
+:param name: str
+  Name of catalog.
+:param comment: str (optional)
+  User-provided free-form text description.
+:param connection_name: str (optional)
+  The name of the connection to an external data source.
+:param options: Dict[str,str] (optional)
+  A map of key-value properties attached to the securable.
+:param properties: Dict[str,str] (optional)
+  A map of key-value properties attached to the securable.
+:param provider_name: str (optional)
+  The name of delta sharing provider.
+  
+  A Delta Sharing catalog is a catalog that is based on a Delta share on a remote sharing server.
+:param share_name: str (optional)
+  The name of the share under the share provider.
+:param storage_root: str (optional)
+  Storage root URL for managed tables within catalog.
+
+:returns: :class:`CatalogInfo`
+
 
     .. py:method:: delete(name: str [, force: Optional[bool]])
 
         Delete a catalog.
-        
-        Deletes the catalog that matches the supplied name. The caller must be a metastore admin or the owner
-        of the catalog.
-        
-        :param name: str
-          The name of the catalog.
-        :param force: bool (optional)
-          Force deletion even if the catalog is not empty.
-        
-        
-        
+
+Deletes the catalog that matches the supplied name. The caller must be a metastore admin or the owner
+of the catalog.
+
+:param name: str
+  The name of the catalog.
+:param force: bool (optional)
+  Force deletion even if the catalog is not empty.
+
+
+
 
     .. py:method:: get(name: str [, include_browse: Optional[bool]]) -> CatalogInfo
 
@@ -92,18 +92,18 @@
             w.catalogs.delete(name=created.name, force=True)
 
         Get a catalog.
-        
-        Gets the specified catalog in a metastore. The caller must be a metastore admin, the owner of the
-        catalog, or a user that has the **USE_CATALOG** privilege set for their account.
-        
-        :param name: str
-          The name of the catalog.
-        :param include_browse: bool (optional)
-          Whether to include catalogs in the response for which the principal can only access selective
-          metadata for
-        
-        :returns: :class:`CatalogInfo`
-        
+
+Gets the specified catalog in a metastore. The caller must be a metastore admin, the owner of the
+catalog, or a user that has the **USE_CATALOG** privilege set for their account.
+
+:param name: str
+  The name of the catalog.
+:param include_browse: bool (optional)
+  Whether to include catalogs in the response for which the principal can only access selective
+  metadata for
+
+:returns: :class:`CatalogInfo`
+
 
     .. py:method:: list( [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[CatalogInfo]
 
@@ -120,28 +120,28 @@
             all = w.catalogs.list(catalog.ListCatalogsRequest())
 
         List catalogs.
-        
-        Gets an array of catalogs in the metastore. If the caller is the metastore admin, all catalogs will be
-        retrieved. Otherwise, only catalogs owned by the caller (or for which the caller has the
-        **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a specific ordering of the
-        elements in the array.
-        
-        :param include_browse: bool (optional)
-          Whether to include catalogs in the response for which the principal can only access selective
-          metadata for
-        :param max_results: int (optional)
-          Maximum number of catalogs to return. - when set to 0, the page length is set to a server configured
-          value (recommended); - when set to a value greater than 0, the page length is the minimum of this
-          value and a server configured value; - when set to a value less than 0, an invalid parameter error
-          is returned; - If not set, all valid catalogs are returned (not recommended). - Note: The number of
-          returned catalogs might be less than the specified max_results size, even zero. The only definitive
-          indication that no further catalogs can be fetched is when the next_page_token is unset from the
-          response.
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`CatalogInfo`
-        
+
+Gets an array of catalogs in the metastore. If the caller is the metastore admin, all catalogs will be
+retrieved. Otherwise, only catalogs owned by the caller (or for which the caller has the
+**USE_CATALOG** privilege) will be retrieved. There is no guarantee of a specific ordering of the
+elements in the array.
+
+:param include_browse: bool (optional)
+  Whether to include catalogs in the response for which the principal can only access selective
+  metadata for
+:param max_results: int (optional)
+  Maximum number of catalogs to return. - when set to 0, the page length is set to a server configured
+  value (recommended); - when set to a value greater than 0, the page length is the minimum of this
+  value and a server configured value; - when set to a value less than 0, an invalid parameter error
+  is returned; - If not set, all valid catalogs are returned (not recommended). - Note: The number of
+  returned catalogs might be less than the specified max_results size, even zero. The only definitive
+  indication that no further catalogs can be fetched is when the next_page_token is unset from the
+  response.
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+
+:returns: Iterator over :class:`CatalogInfo`
+
 
     .. py:method:: update(name: str [, comment: Optional[str], enable_predictive_optimization: Optional[EnablePredictiveOptimization], isolation_mode: Optional[CatalogIsolationMode], new_name: Optional[str], options: Optional[Dict[str, str]], owner: Optional[str], properties: Optional[Dict[str, str]]]) -> CatalogInfo
 
@@ -164,26 +164,25 @@
             w.catalogs.delete(name=created.name, force=True)
 
         Update a catalog.
-        
-        Updates the catalog that matches the supplied name. The caller must be either the owner of the
-        catalog, or a metastore admin (when changing the owner field of the catalog).
-        
-        :param name: str
-          The name of the catalog.
-        :param comment: str (optional)
-          User-provided free-form text description.
-        :param enable_predictive_optimization: :class:`EnablePredictiveOptimization` (optional)
-          Whether predictive optimization should be enabled for this object and objects under it.
-        :param isolation_mode: :class:`CatalogIsolationMode` (optional)
-          Whether the current securable is accessible from all workspaces or a specific set of workspaces.
-        :param new_name: str (optional)
-          New name for the catalog.
-        :param options: Dict[str,str] (optional)
-          A map of key-value properties attached to the securable.
-        :param owner: str (optional)
-          Username of current owner of catalog.
-        :param properties: Dict[str,str] (optional)
-          A map of key-value properties attached to the securable.
-        
-        :returns: :class:`CatalogInfo`
-        
\ No newline at end of file
+
+Updates the catalog that matches the supplied name. The caller must be either the owner of the
+catalog, or a metastore admin (when changing the owner field of the catalog).
+
+:param name: str
+  The name of the catalog.
+:param comment: str (optional)
+  User-provided free-form text description.
+:param enable_predictive_optimization: :class:`EnablePredictiveOptimization` (optional)
+  Whether predictive optimization should be enabled for this object and objects under it.
+:param isolation_mode: :class:`CatalogIsolationMode` (optional)
+  Whether the current securable is accessible from all workspaces or a specific set of workspaces.
+:param new_name: str (optional)
+  New name for the catalog.
+:param options: Dict[str,str] (optional)
+  A map of key-value properties attached to the securable.
+:param owner: str (optional)
+  Username of current owner of catalog.
+:param properties: Dict[str,str] (optional)
+  A map of key-value properties attached to the securable.
+
+:returns: :class:`CatalogInfo`
diff --git a/docs/workspace/catalog/connections.rst b/docs/workspace/catalog/connections.rst
index b2637c2d0..32105ff61 100644
--- a/docs/workspace/catalog/connections.rst
+++ b/docs/workspace/catalog/connections.rst
@@ -5,13 +5,13 @@
 .. py:class:: ConnectionsAPI
 
     Connections allow for creating a connection to an external data source.
-    
-    A connection is an abstraction of an external data source that can be connected from Databricks Compute.
-    Creating a connection object is the first step to managing external data sources within Unity Catalog,
-    with the second step being creating a data object (catalog, schema, or table) using the connection. Data
-    objects derived from a connection can be written to or read from similar to other Unity Catalog data
-    objects based on cloud storage. Users may create different types of connections with each connection
-    having a unique set of configuration options to support credential management and other settings.
+
+A connection is an abstraction of an external data source that can be connected from Databricks Compute.
+Creating a connection object is the first step to managing external data sources within Unity Catalog,
+with the second step being creating a data object (catalog, schema, or table) using the connection. Data
+objects derived from a connection can be written to or read from similar to other Unity Catalog data
+objects based on cloud storage. Users may create different types of connections with each connection
+having a unique set of configuration options to support credential management and other settings.
 
     .. py:method:: create(name: str, connection_type: ConnectionType, options: Dict[str, str] [, comment: Optional[str], properties: Optional[Dict[str, str]], read_only: Optional[bool]]) -> ConnectionInfo
 
@@ -43,39 +43,39 @@
             w.connections.delete(name=conn_create.name)
 
         Create a connection.
-        
-        Creates a new connection
-        
-        Creates a new connection to an external data source. It allows users to specify connection details and
-        configurations for interaction with the external server.
-        
-        :param name: str
-          Name of the connection.
-        :param connection_type: :class:`ConnectionType`
-          The type of connection.
-        :param options: Dict[str,str]
-          A map of key-value properties attached to the securable.
-        :param comment: str (optional)
-          User-provided free-form text description.
-        :param properties: Dict[str,str] (optional)
-          An object containing map of key-value properties attached to the connection.
-        :param read_only: bool (optional)
-          If the connection is read only.
-        
-        :returns: :class:`ConnectionInfo`
-        
+
+Creates a new connection
+
+Creates a new connection to an external data source. It allows users to specify connection details and
+configurations for interaction with the external server.
+
+:param name: str
+  Name of the connection.
+:param connection_type: :class:`ConnectionType`
+  The type of connection.
+:param options: Dict[str,str]
+  A map of key-value properties attached to the securable.
+:param comment: str (optional)
+  User-provided free-form text description.
+:param properties: Dict[str,str] (optional)
+  An object containing map of key-value properties attached to the connection.
+:param read_only: bool (optional)
+  If the connection is read only.
+
+:returns: :class:`ConnectionInfo`
+
 
     .. py:method:: delete(name: str)
 
         Delete a connection.
-        
-        Deletes the connection that matches the supplied name.
-        
-        :param name: str
-          The name of the connection to be deleted.
-        
-        
-        
+
+Deletes the connection that matches the supplied name.
+
+:param name: str
+  The name of the connection to be deleted.
+
+
+
 
     .. py:method:: get(name: str) -> ConnectionInfo
 
@@ -119,14 +119,14 @@
             w.connections.delete(name=conn_create.name)
 
         Get a connection.
-        
-        Gets a connection from it's name.
-        
-        :param name: str
-          Name of the connection.
-        
-        :returns: :class:`ConnectionInfo`
-        
+
+Gets a connection from it's name.
+
+:param name: str
+  Name of the connection.
+
+:returns: :class:`ConnectionInfo`
+
 
     .. py:method:: list( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ConnectionInfo]
 
@@ -143,19 +143,19 @@
             conn_list = w.connections.list(catalog.ListConnectionsRequest())
 
         List connections.
-        
-        List all connections.
-        
-        :param max_results: int (optional)
-          Maximum number of connections to return. - If not set, all connections are returned (not
-          recommended). - when set to a value greater than 0, the page length is the minimum of this value and
-          a server configured value; - when set to 0, the page length is set to a server configured value
-          (recommended); - when set to a value less than 0, an invalid parameter error is returned;
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`ConnectionInfo`
-        
+
+List all connections.
+
+:param max_results: int (optional)
+  Maximum number of connections to return. - If not set, all connections are returned (not
+  recommended). - when set to a value greater than 0, the page length is the minimum of this value and
+  a server configured value; - when set to 0, the page length is set to a server configured value
+  (recommended); - when set to a value less than 0, an invalid parameter error is returned;
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+
+:returns: Iterator over :class:`ConnectionInfo`
+
 
     .. py:method:: update(name: str, options: Dict[str, str] [, new_name: Optional[str], owner: Optional[str]]) -> ConnectionInfo
 
@@ -197,17 +197,16 @@
             w.connections.delete(name=conn_create.name)
 
         Update a connection.
-        
-        Updates the connection that matches the supplied name.
-        
-        :param name: str
-          Name of the connection.
-        :param options: Dict[str,str]
-          A map of key-value properties attached to the securable.
-        :param new_name: str (optional)
-          New name for the connection.
-        :param owner: str (optional)
-          Username of current owner of the connection.
-        
-        :returns: :class:`ConnectionInfo`
-        
\ No newline at end of file
+
+Updates the connection that matches the supplied name.
+
+:param name: str
+  Name of the connection.
+:param options: Dict[str,str]
+  A map of key-value properties attached to the securable.
+:param new_name: str (optional)
+  New name for the connection.
+:param owner: str (optional)
+  Username of current owner of the connection.
+
+:returns: :class:`ConnectionInfo`
diff --git a/docs/workspace/catalog/credentials.rst b/docs/workspace/catalog/credentials.rst
index 54b55516b..f8f0f81f5 100644
--- a/docs/workspace/catalog/credentials.rst
+++ b/docs/workspace/catalog/credentials.rst
@@ -4,65 +4,189 @@
 
 .. py:class:: CredentialsAPI
 
-    These APIs manage credential configurations for this workspace. Databricks needs access to a cross-account
-    service IAM role in your AWS account so that Databricks can deploy clusters in the appropriate VPC for the
-    new workspace. A credential configuration encapsulates this role information, and its ID is used when
-    creating a new workspace.
-
-    .. py:method:: create(credentials_name: str, aws_credentials: CreateCredentialAwsCredentials) -> Credential
-
-        Create credential configuration.
-        
-        Creates a Databricks credential configuration that represents cloud cross-account credentials for a
-        specified account. Databricks uses this to set up network infrastructure properly to host Databricks
-        clusters. For your AWS IAM role, you need to trust the External ID (the Databricks Account API account
-        ID) in the returned credential object, and configure the required access policy.
-        
-        Save the response's `credentials_id` field, which is the ID for your new credential configuration
-        object.
-        
-        For information about how to create a new workspace with this API, see [Create a new workspace using
-        the Account API]
-        
-        [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
-        
-        :param credentials_name: str
-          The human-readable name of the credential configuration object.
-        :param aws_credentials: :class:`CreateCredentialAwsCredentials`
-        
-        :returns: :class:`Credential`
-        
-
-    .. py:method:: delete(credentials_id: str)
-
-        Delete credential configuration.
-        
-        Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot
-        delete a credential that is associated with any workspace.
-        
-        :param credentials_id: str
-          Databricks Account API credential configuration ID
-        
-        
-        
-
-    .. py:method:: get(credentials_id: str) -> Credential
-
-        Get credential configuration.
-        
-        Gets a Databricks credential configuration object for an account, both specified by ID.
-        
-        :param credentials_id: str
-          Databricks Account API credential configuration ID
-        
-        :returns: :class:`Credential`
-        
-
-    .. py:method:: list() -> Iterator[Credential]
-
-        Get all credential configurations.
-        
-        Gets all Databricks credential configurations associated with an account specified by ID.
-        
-        :returns: Iterator over :class:`Credential`
-        
\ No newline at end of file
+    A credential represents an authentication and authorization mechanism for accessing services on your cloud
+tenant. Each credential is subject to Unity Catalog access-control policies that control which users and
+groups can access the credential.
+
+To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE CREDENTIAL`
+privilege. The user who creates the credential can delegate ownership to another user or group to manage
+permissions on it.
+
+    .. py:method:: create_credential(name: str [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], azure_service_principal: Optional[AzureServicePrincipal], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], purpose: Optional[CredentialPurpose], read_only: Optional[bool], skip_validation: Optional[bool]]) -> CredentialInfo
+
+        Create a credential.
+
+Creates a new credential. The type of credential to be created is determined by the **purpose** field,
+which should be either **SERVICE** or **STORAGE**.
+
+The caller must be a metastore admin or have the metastore privilege **CREATE_STORAGE_CREDENTIAL** for
+storage credentials, or **CREATE_SERVICE_CREDENTIAL** for service credentials.
+
+:param name: str
+  The credential name. The name must be unique among storage and service credentials within the
+  metastore.
+:param aws_iam_role: :class:`AwsIamRole` (optional)
+  The AWS IAM role configuration
+:param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
+  The Azure managed identity configuration.
+:param azure_service_principal: :class:`AzureServicePrincipal` (optional)
+  The Azure service principal configuration. Only applicable when purpose is **STORAGE**.
+:param comment: str (optional)
+  Comment associated with the credential.
+:param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional)
+  GCP long-lived credential. Databricks-created Google Cloud Storage service account.
+:param purpose: :class:`CredentialPurpose` (optional)
+  Indicates the purpose of the credential.
+:param read_only: bool (optional)
+  Whether the credential is usable only for read operations. Only applicable when purpose is
+  **STORAGE**.
+:param skip_validation: bool (optional)
+  Optional. Supplying true to this argument skips validation of the created set of credentials.
+
+:returns: :class:`CredentialInfo`
+
+
+    .. py:method:: delete_credential(name_arg: str [, force: Optional[bool]])
+
+        Delete a credential.
+
+Deletes a service or storage credential from the metastore. The caller must be an owner of the
+credential.
+
+:param name_arg: str
+  Name of the credential.
+:param force: bool (optional)
+  Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
+  external locations and external tables (when purpose is **STORAGE**).
+
+
+
+
+    .. py:method:: generate_temporary_service_credential(credential_name: str [, azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions], gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions]]) -> TemporaryCredentials
+
+        Generate a temporary service credential.
+
+Returns a set of temporary credentials generated using the specified service credential. The caller
+must be a metastore admin or have the metastore privilege **ACCESS** on the service credential.
+
+:param credential_name: str
+  The name of the service credential used to generate a temporary credential
+:param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional)
+  The Azure cloud options to customize the requested temporary credential
+:param gcp_options: :class:`GenerateTemporaryServiceCredentialGcpOptions` (optional)
+  The GCP cloud options to customize the requested temporary credential
+
+:returns: :class:`TemporaryCredentials`
+
+
+    .. py:method:: get_credential(name_arg: str) -> CredentialInfo
+
+        Get a credential.
+
+Gets a service or storage credential from the metastore. The caller must be a metastore admin, the
+owner of the credential, or have any permission on the credential.
+
+:param name_arg: str
+  Name of the credential.
+
+:returns: :class:`CredentialInfo`
+
+
+    .. py:method:: list_credentials( [, max_results: Optional[int], page_token: Optional[str], purpose: Optional[CredentialPurpose]]) -> Iterator[CredentialInfo]
+
+        List credentials.
+
+Gets an array of credentials (as __CredentialInfo__ objects).
+
+The array is limited to only the credentials that the caller has permission to access. If the caller
+is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific
+ordering of the elements in the array.
+
+:param max_results: int (optional)
+  Maximum number of credentials to return. - If not set, the default max page size is used. - When set
+  to a value greater than 0, the page length is the minimum of this value and a server-configured
+  value. - When set to 0, the page length is set to a server-configured value (recommended). - When
+  set to a value less than 0, an invalid parameter error is returned.
+:param page_token: str (optional)
+  Opaque token to retrieve the next page of results.
+:param purpose: :class:`CredentialPurpose` (optional)
+  Return only credentials for the specified purpose.
+
+:returns: Iterator over :class:`CredentialInfo`
+
+
+    .. py:method:: update_credential(name_arg: str [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], azure_service_principal: Optional[AzureServicePrincipal], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], force: Optional[bool], isolation_mode: Optional[IsolationMode], new_name: Optional[str], owner: Optional[str], read_only: Optional[bool], skip_validation: Optional[bool]]) -> CredentialInfo
+
+        Update a credential.
+
+Updates a service or storage credential on the metastore.
+
+The caller must be the owner of the credential or a metastore admin or have the `MANAGE` permission.
+If the caller is a metastore admin, only the __owner__ field can be changed.
+
+:param name_arg: str
+  Name of the credential.
+:param aws_iam_role: :class:`AwsIamRole` (optional)
+  The AWS IAM role configuration
+:param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
+  The Azure managed identity configuration.
+:param azure_service_principal: :class:`AzureServicePrincipal` (optional)
+  The Azure service principal configuration. Only applicable when purpose is **STORAGE**.
+:param comment: str (optional)
+  Comment associated with the credential.
+:param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional)
+  GCP long-lived credential. Databricks-created Google Cloud Storage service account.
+:param force: bool (optional)
+  Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
+  external locations and external tables (when purpose is **STORAGE**).
+:param isolation_mode: :class:`IsolationMode` (optional)
+  Whether the current securable is accessible from all workspaces or a specific set of workspaces.
+:param new_name: str (optional)
+  New name of credential.
+:param owner: str (optional)
+  Username of current owner of credential.
+:param read_only: bool (optional)
+  Whether the credential is usable only for read operations. Only applicable when purpose is
+  **STORAGE**.
+:param skip_validation: bool (optional)
+  Supply true to this argument to skip validation of the updated credential.
+
+:returns: :class:`CredentialInfo`
+
+
+    .. py:method:: validate_credential( [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], credential_name: Optional[str], external_location_name: Optional[str], purpose: Optional[CredentialPurpose], read_only: Optional[bool], url: Optional[str]]) -> ValidateCredentialResponse
+
+        Validate a credential.
+
+Validates a credential.
+
+For service credentials (purpose is **SERVICE**), either the __credential_name__ or the cloud-specific
+credential must be provided.
+
+For storage credentials (purpose is **STORAGE**), at least one of __external_location_name__ and
+__url__ need to be provided. If only one of them is provided, it will be used for validation. And if
+both are provided, the __url__ will be used for validation, and __external_location_name__ will be
+ignored when checking overlapping urls. Either the __credential_name__ or the cloud-specific
+credential must be provided.
+
+The caller must be a metastore admin or the credential owner or have the required permission on the
+metastore and the credential (e.g., **CREATE_EXTERNAL_LOCATION** when purpose is **STORAGE**).
+
+:param aws_iam_role: :class:`AwsIamRole` (optional)
+  The AWS IAM role configuration
+:param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
+  The Azure managed identity configuration.
+:param credential_name: str (optional)
+  Required. The name of an existing credential or long-lived cloud credential to validate.
+:param external_location_name: str (optional)
+  The name of an existing external location to validate. Only applicable for storage credentials
+  (purpose is **STORAGE**.)
+:param purpose: :class:`CredentialPurpose` (optional)
+  The purpose of the credential. This should only be used when the credential is specified.
+:param read_only: bool (optional)
+  Whether the credential is only usable for read operations. Only applicable for storage credentials
+  (purpose is **STORAGE**.)
+:param url: str (optional)
+  The external location url to validate. Only applicable when purpose is **STORAGE**.
+
+:returns: :class:`ValidateCredentialResponse`
diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst
index fc60b18f6..13e4a90f0 100644
--- a/docs/workspace/catalog/external_locations.rst
+++ b/docs/workspace/catalog/external_locations.rst
@@ -5,15 +5,15 @@
 .. py:class:: ExternalLocationsAPI
 
     An external location is an object that combines a cloud storage path with a storage credential that
-    authorizes access to the cloud storage path. Each external location is subject to Unity Catalog
-    access-control policies that control which users and groups can access the credential. If a user does not
-    have access to an external location in Unity Catalog, the request fails and Unity Catalog does not attempt
-    to authenticate to your cloud tenant on the user’s behalf.
-    
-    Databricks recommends using external locations rather than using storage credentials directly.
-    
-    To create external locations, you must be a metastore admin or a user with the
-    **CREATE_EXTERNAL_LOCATION** privilege.
+authorizes access to the cloud storage path. Each external location is subject to Unity Catalog
+access-control policies that control which users and groups can access the credential. If a user does not
+have access to an external location in Unity Catalog, the request fails and Unity Catalog does not attempt
+to authenticate to your cloud tenant on the user’s behalf.
+
+Databricks recommends using external locations rather than using storage credentials directly.
+
+To create external locations, you must be a metastore admin or a user with the
+**CREATE_EXTERNAL_LOCATION** privilege.
 
     .. py:method:: create(name: str, url: str, credential_name: str [, access_point: Optional[str], comment: Optional[str], encryption_details: Optional[EncryptionDetails], fallback: Optional[bool], read_only: Optional[bool], skip_validation: Optional[bool]]) -> ExternalLocationInfo
 
@@ -46,49 +46,49 @@
             w.external_locations.delete(name=external_location.name)
 
         Create an external location.
-        
-        Creates a new external location entry in the metastore. The caller must be a metastore admin or have
-        the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage
-        credential.
-        
-        :param name: str
-          Name of the external location.
-        :param url: str
-          Path URL of the external location.
-        :param credential_name: str
-          Name of the storage credential used with this location.
-        :param access_point: str (optional)
-          The AWS access point to use when accesing s3 for this external location.
-        :param comment: str (optional)
-          User-provided free-form text description.
-        :param encryption_details: :class:`EncryptionDetails` (optional)
-          Encryption options that apply to clients connecting to cloud storage.
-        :param fallback: bool (optional)
-          Indicates whether fallback mode is enabled for this external location. When fallback mode is
-          enabled, the access to the location falls back to cluster credentials if UC credentials are not
-          sufficient.
-        :param read_only: bool (optional)
-          Indicates whether the external location is read-only.
-        :param skip_validation: bool (optional)
-          Skips validation of the storage credential associated with the external location.
-        
-        :returns: :class:`ExternalLocationInfo`
-        
+
+Creates a new external location entry in the metastore. The caller must be a metastore admin or have
+the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage
+credential.
+
+:param name: str
+  Name of the external location.
+:param url: str
+  Path URL of the external location.
+:param credential_name: str
+  Name of the storage credential used with this location.
+:param access_point: str (optional)
+  The AWS access point to use when accesing s3 for this external location.
+:param comment: str (optional)
+  User-provided free-form text description.
+:param encryption_details: :class:`EncryptionDetails` (optional)
+  Encryption options that apply to clients connecting to cloud storage.
+:param fallback: bool (optional)
+  Indicates whether fallback mode is enabled for this external location. When fallback mode is
+  enabled, the access to the location falls back to cluster credentials if UC credentials are not
+  sufficient.
+:param read_only: bool (optional)
+  Indicates whether the external location is read-only.
+:param skip_validation: bool (optional)
+  Skips validation of the storage credential associated with the external location.
+
+:returns: :class:`ExternalLocationInfo`
+
 
     .. py:method:: delete(name: str [, force: Optional[bool]])
 
         Delete an external location.
-        
-        Deletes the specified external location from the metastore. The caller must be the owner of the
-        external location.
-        
-        :param name: str
-          Name of the external location.
-        :param force: bool (optional)
-          Force deletion even if there are dependent external tables or mounts.
-        
-        
-        
+
+Deletes the specified external location from the metastore. The caller must be the owner of the
+external location.
+
+:param name: str
+  Name of the external location.
+:param force: bool (optional)
+  Force deletion even if there are dependent external tables or mounts.
+
+
+
 
     .. py:method:: get(name: str [, include_browse: Optional[bool]]) -> ExternalLocationInfo
 
@@ -120,18 +120,18 @@
             w.external_locations.delete(delete=created.name)
 
         Get an external location.
-        
-        Gets an external location from the metastore. The caller must be either a metastore admin, the owner
-        of the external location, or a user that has some privilege on the external location.
-        
-        :param name: str
-          Name of the external location.
-        :param include_browse: bool (optional)
-          Whether to include external locations in the response for which the principal can only access
-          selective metadata for
-        
-        :returns: :class:`ExternalLocationInfo`
-        
+
+Gets an external location from the metastore. The caller must be either a metastore admin, the owner
+of the external location, or a user that has some privilege on the external location.
+
+:param name: str
+  Name of the external location.
+:param include_browse: bool (optional)
+  Whether to include external locations in the response for which the principal can only access
+  selective metadata for
+
+:returns: :class:`ExternalLocationInfo`
+
 
     .. py:method:: list( [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ExternalLocationInfo]
 
@@ -148,24 +148,24 @@
             all = w.external_locations.list(catalog.ListExternalLocationsRequest())
 
         List external locations.
-        
-        Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller
-        must be a metastore admin, the owner of the external location, or a user that has some privilege on
-        the external location. There is no guarantee of a specific ordering of the elements in the array.
-        
-        :param include_browse: bool (optional)
-          Whether to include external locations in the response for which the principal can only access
-          selective metadata for
-        :param max_results: int (optional)
-          Maximum number of external locations to return. If not set, all the external locations are returned
-          (not recommended). - when set to a value greater than 0, the page length is the minimum of this
-          value and a server configured value; - when set to 0, the page length is set to a server configured
-          value (recommended); - when set to a value less than 0, an invalid parameter error is returned;
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`ExternalLocationInfo`
-        
+
+Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller
+must be a metastore admin, the owner of the external location, or a user that has some privilege on
+the external location. There is no guarantee of a specific ordering of the elements in the array.
+
+:param include_browse: bool (optional)
+  Whether to include external locations in the response for which the principal can only access
+  selective metadata for
+:param max_results: int (optional)
+  Maximum number of external locations to return. If not set, all the external locations are returned
+  (not recommended). - when set to a value greater than 0, the page length is the minimum of this
+  value and a server configured value; - when set to 0, the page length is set to a server configured
+  value (recommended); - when set to a value less than 0, an invalid parameter error is returned;
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+
+:returns: Iterator over :class:`ExternalLocationInfo`
+
 
     .. py:method:: update(name: str [, access_point: Optional[str], comment: Optional[str], credential_name: Optional[str], encryption_details: Optional[EncryptionDetails], fallback: Optional[bool], force: Optional[bool], isolation_mode: Optional[IsolationMode], new_name: Optional[str], owner: Optional[str], read_only: Optional[bool], skip_validation: Optional[bool], url: Optional[str]]) -> ExternalLocationInfo
 
@@ -199,38 +199,37 @@
             w.external_locations.delete(name=created.name)
 
         Update an external location.
-        
-        Updates an external location in the metastore. The caller must be the owner of the external location,
-        or be a metastore admin. In the second case, the admin can only update the name of the external
-        location.
-        
-        :param name: str
-          Name of the external location.
-        :param access_point: str (optional)
-          The AWS access point to use when accesing s3 for this external location.
-        :param comment: str (optional)
-          User-provided free-form text description.
-        :param credential_name: str (optional)
-          Name of the storage credential used with this location.
-        :param encryption_details: :class:`EncryptionDetails` (optional)
-          Encryption options that apply to clients connecting to cloud storage.
-        :param fallback: bool (optional)
-          Indicates whether fallback mode is enabled for this external location. When fallback mode is
-          enabled, the access to the location falls back to cluster credentials if UC credentials are not
-          sufficient.
-        :param force: bool (optional)
-          Force update even if changing url invalidates dependent external tables or mounts.
-        :param isolation_mode: :class:`IsolationMode` (optional)
-        :param new_name: str (optional)
-          New name for the external location.
-        :param owner: str (optional)
-          The owner of the external location.
-        :param read_only: bool (optional)
-          Indicates whether the external location is read-only.
-        :param skip_validation: bool (optional)
-          Skips validation of the storage credential associated with the external location.
-        :param url: str (optional)
-          Path URL of the external location.
-        
-        :returns: :class:`ExternalLocationInfo`
-        
\ No newline at end of file
+
+Updates an external location in the metastore. The caller must be the owner of the external location,
+or be a metastore admin. In the second case, the admin can only update the name of the external
+location.
+
+:param name: str
+  Name of the external location.
+:param access_point: str (optional)
+  The AWS access point to use when accesing s3 for this external location.
+:param comment: str (optional)
+  User-provided free-form text description.
+:param credential_name: str (optional)
+  Name of the storage credential used with this location.
+:param encryption_details: :class:`EncryptionDetails` (optional)
+  Encryption options that apply to clients connecting to cloud storage.
+:param fallback: bool (optional)
+  Indicates whether fallback mode is enabled for this external location. When fallback mode is
+  enabled, the access to the location falls back to cluster credentials if UC credentials are not
+  sufficient.
+:param force: bool (optional)
+  Force update even if changing url invalidates dependent external tables or mounts.
+:param isolation_mode: :class:`IsolationMode` (optional)
+:param new_name: str (optional)
+  New name for the external location.
+:param owner: str (optional)
+  The owner of the external location.
+:param read_only: bool (optional)
+  Indicates whether the external location is read-only.
+:param skip_validation: bool (optional)
+  Skips validation of the storage credential associated with the external location.
+:param url: str (optional)
+  Path URL of the external location.
+
+:returns: :class:`ExternalLocationInfo`
diff --git a/docs/workspace/catalog/functions.rst b/docs/workspace/catalog/functions.rst
index 646488074..61537556b 100644
--- a/docs/workspace/catalog/functions.rst
+++ b/docs/workspace/catalog/functions.rst
@@ -5,113 +5,112 @@
 .. py:class:: FunctionsAPI
 
     Functions implement User-Defined Functions (UDFs) in Unity Catalog.
-    
-    The function implementation can be any SQL expression or Query, and it can be invoked wherever a table
-    reference is allowed in a query. In Unity Catalog, a function resides at the same level as a table, so it
-    can be referenced with the form __catalog_name__.__schema_name__.__function_name__.
+
+The function implementation can be any SQL expression or Query, and it can be invoked wherever a table
+reference is allowed in a query. In Unity Catalog, a function resides at the same level as a table, so it
+can be referenced with the form __catalog_name__.__schema_name__.__function_name__.
 
     .. py:method:: create(function_info: CreateFunction) -> FunctionInfo
 
         Create a function.
-        
-        **WARNING: This API is experimental and will change in future versions**
-        
-        Creates a new function
-        
-        The user must have the following permissions in order for the function to be created: -
-        **USE_CATALOG** on the function's parent catalog - **USE_SCHEMA** and **CREATE_FUNCTION** on the
-        function's parent schema
-        
-        :param function_info: :class:`CreateFunction`
-          Partial __FunctionInfo__ specifying the function to be created.
-        
-        :returns: :class:`FunctionInfo`
-        
+
+**WARNING: This API is experimental and will change in future versions**
+
+Creates a new function
+
+The user must have the following permissions in order for the function to be created: -
+**USE_CATALOG** on the function's parent catalog - **USE_SCHEMA** and **CREATE_FUNCTION** on the
+function's parent schema
+
+:param function_info: :class:`CreateFunction`
+  Partial __FunctionInfo__ specifying the function to be created.
+
+:returns: :class:`FunctionInfo`
+
 
     .. py:method:: delete(name: str [, force: Optional[bool]])
 
         Delete a function.
-        
-        Deletes the function that matches the supplied name. For the deletion to succeed, the user must
-        satisfy one of the following conditions: - Is the owner of the function's parent catalog - Is the
-        owner of the function's parent schema and have the **USE_CATALOG** privilege on its parent catalog -
-        Is the owner of the function itself and have both the **USE_CATALOG** privilege on its parent catalog
-        and the **USE_SCHEMA** privilege on its parent schema
-        
-        :param name: str
-          The fully-qualified name of the function (of the form
-          __catalog_name__.__schema_name__.__function__name__).
-        :param force: bool (optional)
-          Force deletion even if the function is notempty.
-        
-        
-        
+
+Deletes the function that matches the supplied name. For the deletion to succeed, the user must
+satisfy one of the following conditions: - Is the owner of the function's parent catalog - Is the
+owner of the function's parent schema and have the **USE_CATALOG** privilege on its parent catalog -
+Is the owner of the function itself and have both the **USE_CATALOG** privilege on its parent catalog
+and the **USE_SCHEMA** privilege on its parent schema
+
+:param name: str
+  The fully-qualified name of the function (of the form
+  __catalog_name__.__schema_name__.__function__name__).
+:param force: bool (optional)
+  Force deletion even if the function is notempty.
+
+
+
 
     .. py:method:: get(name: str [, include_browse: Optional[bool]]) -> FunctionInfo
 
         Get a function.
-        
-        Gets a function from within a parent catalog and schema. For the fetch to succeed, the user must
-        satisfy one of the following requirements: - Is a metastore admin - Is an owner of the function's
-        parent catalog - Have the **USE_CATALOG** privilege on the function's parent catalog and be the owner
-        of the function - Have the **USE_CATALOG** privilege on the function's parent catalog, the
-        **USE_SCHEMA** privilege on the function's parent schema, and the **EXECUTE** privilege on the
-        function itself
-        
-        :param name: str
-          The fully-qualified name of the function (of the form
-          __catalog_name__.__schema_name__.__function__name__).
-        :param include_browse: bool (optional)
-          Whether to include functions in the response for which the principal can only access selective
-          metadata for
-        
-        :returns: :class:`FunctionInfo`
-        
+
+Gets a function from within a parent catalog and schema. For the fetch to succeed, the user must
+satisfy one of the following requirements: - Is a metastore admin - Is an owner of the function's
+parent catalog - Have the **USE_CATALOG** privilege on the function's parent catalog and be the owner
+of the function - Have the **USE_CATALOG** privilege on the function's parent catalog, the
+**USE_SCHEMA** privilege on the function's parent schema, and the **EXECUTE** privilege on the
+function itself
+
+:param name: str
+  The fully-qualified name of the function (of the form
+  __catalog_name__.__schema_name__.__function__name__).
+:param include_browse: bool (optional)
+  Whether to include functions in the response for which the principal can only access selective
+  metadata for
+
+:returns: :class:`FunctionInfo`
+
 
     .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[FunctionInfo]
 
         List functions.
-        
-        List functions within the specified parent catalog and schema. If the user is a metastore admin, all
-        functions are returned in the output list. Otherwise, the user must have the **USE_CATALOG** privilege
-        on the catalog and the **USE_SCHEMA** privilege on the schema, and the output list contains only
-        functions for which either the user has the **EXECUTE** privilege or the user is the owner. There is
-        no guarantee of a specific ordering of the elements in the array.
-        
-        :param catalog_name: str
-          Name of parent catalog for functions of interest.
-        :param schema_name: str
-          Parent schema of functions.
-        :param include_browse: bool (optional)
-          Whether to include functions in the response for which the principal can only access selective
-          metadata for
-        :param max_results: int (optional)
-          Maximum number of functions to return. If not set, all the functions are returned (not recommended).
-          - when set to a value greater than 0, the page length is the minimum of this value and a server
-          configured value; - when set to 0, the page length is set to a server configured value
-          (recommended); - when set to a value less than 0, an invalid parameter error is returned;
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`FunctionInfo`
-        
+
+List functions within the specified parent catalog and schema. If the user is a metastore admin, all
+functions are returned in the output list. Otherwise, the user must have the **USE_CATALOG** privilege
+on the catalog and the **USE_SCHEMA** privilege on the schema, and the output list contains only
+functions for which either the user has the **EXECUTE** privilege or the user is the owner. There is
+no guarantee of a specific ordering of the elements in the array.
+
+:param catalog_name: str
+  Name of parent catalog for functions of interest.
+:param schema_name: str
+  Parent schema of functions.
+:param include_browse: bool (optional)
+  Whether to include functions in the response for which the principal can only access selective
+  metadata for
+:param max_results: int (optional)
+  Maximum number of functions to return. If not set, all the functions are returned (not recommended).
+  - when set to a value greater than 0, the page length is the minimum of this value and a server
+  configured value; - when set to 0, the page length is set to a server configured value
+  (recommended); - when set to a value less than 0, an invalid parameter error is returned;
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+
+:returns: Iterator over :class:`FunctionInfo`
+
 
     .. py:method:: update(name: str [, owner: Optional[str]]) -> FunctionInfo
 
         Update a function.
-        
-        Updates the function that matches the supplied name. Only the owner of the function can be updated. If
-        the user is not a metastore admin, the user must be a member of the group that is the new function
-        owner. - Is a metastore admin - Is the owner of the function's parent catalog - Is the owner of the
-        function's parent schema and has the **USE_CATALOG** privilege on its parent catalog - Is the owner of
-        the function itself and has the **USE_CATALOG** privilege on its parent catalog as well as the
-        **USE_SCHEMA** privilege on the function's parent schema.
-        
-        :param name: str
-          The fully-qualified name of the function (of the form
-          __catalog_name__.__schema_name__.__function__name__).
-        :param owner: str (optional)
-          Username of current owner of function.
-        
-        :returns: :class:`FunctionInfo`
-        
\ No newline at end of file
+
+Updates the function that matches the supplied name. Only the owner of the function can be updated. If
+the user is not a metastore admin, the user must be a member of the group that is the new function
+owner. - Is a metastore admin - Is the owner of the function's parent catalog - Is the owner of the
+function's parent schema and has the **USE_CATALOG** privilege on its parent catalog - Is the owner of
+the function itself and has the **USE_CATALOG** privilege on its parent catalog as well as the
+**USE_SCHEMA** privilege on the function's parent schema.
+
+:param name: str
+  The fully-qualified name of the function (of the form
+  __catalog_name__.__schema_name__.__function__name__).
+:param owner: str (optional)
+  Username of current owner of function.
+
+:returns: :class:`FunctionInfo`
diff --git a/docs/workspace/catalog/grants.rst b/docs/workspace/catalog/grants.rst
index 8def7ff83..20c63fc27 100644
--- a/docs/workspace/catalog/grants.rst
+++ b/docs/workspace/catalog/grants.rst
@@ -5,14 +5,14 @@
 .. py:class:: GrantsAPI
 
     In Unity Catalog, data is secure by default. Initially, users have no access to data in a metastore.
-    Access can be granted by either a metastore admin, the owner of an object, or the owner of the catalog or
-    schema that contains the object. Securable objects in Unity Catalog are hierarchical and privileges are
-    inherited downward.
-    
-    Securable objects in Unity Catalog are hierarchical and privileges are inherited downward. This means that
-    granting a privilege on the catalog automatically grants the privilege to all current and future objects
-    within the catalog. Similarly, privileges granted on a schema are inherited by all current and future
-    objects within that schema.
+Access can be granted by either a metastore admin, the owner of an object, or the owner of the catalog or
+schema that contains the object. Securable objects in Unity Catalog are hierarchical and privileges are
+inherited downward.
+
+Securable objects in Unity Catalog are hierarchical and privileges are inherited downward. This means that
+granting a privilege on the catalog automatically grants the privilege to all current and future objects
+within the catalog. Similarly, privileges granted on a schema are inherited by all current and future
+objects within that schema.
 
     .. py:method:: get(securable_type: SecurableType, full_name: str [, principal: Optional[str]]) -> PermissionsList
 
@@ -52,18 +52,18 @@
             w.tables.delete(full_name=table_full_name)
 
         Get permissions.
-        
-        Gets the permissions for a securable.
-        
-        :param securable_type: :class:`SecurableType`
-          Type of securable.
-        :param full_name: str
-          Full name of securable.
-        :param principal: str (optional)
-          If provided, only the permissions for the specified principal (user or group) are returned.
-        
-        :returns: :class:`PermissionsList`
-        
+
+Gets the permissions for a securable.
+
+:param securable_type: :class:`SecurableType`
+  Type of securable.
+:param full_name: str
+  Full name of securable.
+:param principal: str (optional)
+  If provided, only the permissions for the specified principal (user or group) are returned.
+
+:returns: :class:`PermissionsList`
+
 
     .. py:method:: get_effective(securable_type: SecurableType, full_name: str [, principal: Optional[str]]) -> EffectivePermissionsList
 
@@ -103,19 +103,19 @@
             w.tables.delete(full_name=table_full_name)
 
         Get effective permissions.
-        
-        Gets the effective permissions for a securable.
-        
-        :param securable_type: :class:`SecurableType`
-          Type of securable.
-        :param full_name: str
-          Full name of securable.
-        :param principal: str (optional)
-          If provided, only the effective permissions for the specified principal (user or group) are
-          returned.
-        
-        :returns: :class:`EffectivePermissionsList`
-        
+
+Gets the effective permissions for a securable.
+
+:param securable_type: :class:`SecurableType`
+  Type of securable.
+:param full_name: str
+  Full name of securable.
+:param principal: str (optional)
+  If provided, only the effective permissions for the specified principal (user or group) are
+  returned.
+
+:returns: :class:`EffectivePermissionsList`
+
 
     .. py:method:: update(securable_type: SecurableType, full_name: str [, changes: Optional[List[PermissionsChange]]]) -> PermissionsList
 
@@ -162,15 +162,14 @@
             w.tables.delete(full_name=table_full_name)
 
         Update permissions.
-        
-        Updates the permissions for a securable.
-        
-        :param securable_type: :class:`SecurableType`
-          Type of securable.
-        :param full_name: str
-          Full name of securable.
-        :param changes: List[:class:`PermissionsChange`] (optional)
-          Array of permissions change objects.
-        
-        :returns: :class:`PermissionsList`
-        
\ No newline at end of file
+
+Updates the permissions for a securable.
+
+:param securable_type: :class:`SecurableType`
+  Type of securable.
+:param full_name: str
+  Full name of securable.
+:param changes: List[:class:`PermissionsChange`] (optional)
+  Array of permissions change objects.
+
+:returns: :class:`PermissionsList`
diff --git a/docs/workspace/catalog/metastores.rst b/docs/workspace/catalog/metastores.rst
index 01a936e0b..f1ab5ff61 100644
--- a/docs/workspace/catalog/metastores.rst
+++ b/docs/workspace/catalog/metastores.rst
@@ -5,16 +5,16 @@
 .. py:class:: MetastoresAPI
 
     A metastore is the top-level container of objects in Unity Catalog. It stores data assets (tables and
-    views) and the permissions that govern access to them. Databricks account admins can create metastores and
-    assign them to Databricks workspaces to control which workloads use each metastore. For a workspace to use
-    Unity Catalog, it must have a Unity Catalog metastore attached.
-    
-    Each metastore is configured with a root storage location in a cloud storage account. This storage
-    location is used for metadata and managed tables data.
-    
-    NOTE: This metastore is distinct from the metastore included in Databricks workspaces created before Unity
-    Catalog was released. If your workspace includes a legacy Hive metastore, the data in that metastore is
-    available in a catalog named hive_metastore.
+views) and the permissions that govern access to them. Databricks account admins can create metastores and
+assign them to Databricks workspaces to control which workloads use each metastore. For a workspace to use
+Unity Catalog, it must have a Unity Catalog metastore attached.
+
+Each metastore is configured with a root storage location in a cloud storage account. This storage
+location is used for metadata and managed tables data.
+
+NOTE: This metastore is distinct from the metastore included in Databricks workspaces created before Unity
+Catalog was released. If your workspace includes a legacy Hive metastore, the data in that metastore is
+available in a catalog named hive_metastore.
 
     .. py:method:: assign(workspace_id: int, metastore_id: str, default_catalog_name: str)
 
@@ -42,21 +42,21 @@
             w.metastores.delete(id=created.metastore_id, force=True)
 
         Create an assignment.
-        
-        Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists, it will be
-        overwritten by the new __metastore_id__ and __default_catalog_name__. The caller must be an account
-        admin.
-        
-        :param workspace_id: int
-          A workspace ID.
-        :param metastore_id: str
-          The unique ID of the metastore.
-        :param default_catalog_name: str
-          The name of the default catalog in the metastore. This field is depracted. Please use "Default
-          Namespace API" to configure the default catalog for a Databricks workspace.
-        
-        
-        
+
+Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists, it will be
+overwritten by the new __metastore_id__ and __default_catalog_name__. The caller must be an account
+admin.
+
+:param workspace_id: int
+  A workspace ID.
+:param metastore_id: str
+  The unique ID of the metastore.
+:param default_catalog_name: str
+  The name of the default catalog in the metastore. This field is depracted. Please use "Default
+  Namespace API" to configure the default catalog for a Databricks workspace.
+
+
+
 
     .. py:method:: create(name: str [, region: Optional[str], storage_root: Optional[str]]) -> MetastoreInfo
 
@@ -80,23 +80,23 @@
             w.metastores.delete(id=created.metastore_id, force=True)
 
         Create a metastore.
-        
-        Creates a new metastore based on a provided name and optional storage root path. By default (if the
-        __owner__ field is not set), the owner of the new metastore is the user calling the
-        __createMetastore__ API. If the __owner__ field is set to the empty string (**""**), the ownership is
-        assigned to the System User instead.
-        
-        :param name: str
-          The user-specified name of the metastore.
-        :param region: str (optional)
-          Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). The field can be omitted in
-          the __workspace-level__ __API__ but not in the __account-level__ __API__. If this field is omitted,
-          the region of the workspace receiving the request will be used.
-        :param storage_root: str (optional)
-          The storage root URL for metastore
-        
-        :returns: :class:`MetastoreInfo`
-        
+
+Creates a new metastore based on a provided name and optional storage root path. By default (if the
+__owner__ field is not set), the owner of the new metastore is the user calling the
+__createMetastore__ API. If the __owner__ field is set to the empty string (**""**), the ownership is
+assigned to the System User instead.
+
+:param name: str
+  The user-specified name of the metastore.
+:param region: str (optional)
+  Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). The field can be omitted in
+  the __workspace-level__ __API__ but not in the __account-level__ __API__. If this field is omitted,
+  the region of the workspace receiving the request will be used.
+:param storage_root: str (optional)
+  The storage root URL for metastore
+
+:returns: :class:`MetastoreInfo`
+
 
     .. py:method:: current() -> MetastoreAssignment
 
@@ -112,25 +112,25 @@
             current_metastore = w.metastores.current()
 
         Get metastore assignment for workspace.
-        
-        Gets the metastore assignment for the workspace being accessed.
-        
-        :returns: :class:`MetastoreAssignment`
-        
+
+Gets the metastore assignment for the workspace being accessed.
+
+:returns: :class:`MetastoreAssignment`
+
 
     .. py:method:: delete(id: str [, force: Optional[bool]])
 
         Delete a metastore.
-        
-        Deletes a metastore. The caller must be a metastore admin.
-        
-        :param id: str
-          Unique ID of the metastore.
-        :param force: bool (optional)
-          Force deletion even if the metastore is not empty. Default is false.
-        
-        
-        
+
+Deletes a metastore. The caller must be a metastore admin.
+
+:param id: str
+  Unique ID of the metastore.
+:param force: bool (optional)
+  Force deletion even if the metastore is not empty. Default is false.
+
+
+
 
     .. py:method:: get(id: str) -> MetastoreInfo
 
@@ -156,15 +156,15 @@
             w.metastores.delete(id=created.metastore_id, force=True)
 
         Get a metastore.
-        
-        Gets a metastore that matches the supplied ID. The caller must be a metastore admin to retrieve this
-        info.
-        
-        :param id: str
-          Unique ID of the metastore.
-        
-        :returns: :class:`MetastoreInfo`
-        
+
+Gets a metastore that matches the supplied ID. The caller must be a metastore admin to retrieve this
+info.
+
+:param id: str
+  Unique ID of the metastore.
+
+:returns: :class:`MetastoreInfo`
+
 
     .. py:method:: list() -> Iterator[MetastoreInfo]
 
@@ -180,12 +180,12 @@
             all = w.metastores.list()
 
         List metastores.
-        
-        Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an admin
-        to retrieve this info. There is no guarantee of a specific ordering of the elements in the array.
-        
-        :returns: Iterator over :class:`MetastoreInfo`
-        
+
+Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an admin
+to retrieve this info. There is no guarantee of a specific ordering of the elements in the array.
+
+:returns: Iterator over :class:`MetastoreInfo`
+
 
     .. py:method:: summary() -> GetMetastoreSummaryResponse
 
@@ -201,12 +201,12 @@
             summary = w.metastores.summary()
 
         Get a metastore summary.
-        
-        Gets information about a metastore. This summary includes the storage credential, the cloud vendor,
-        the cloud region, and the global metastore ID.
-        
-        :returns: :class:`GetMetastoreSummaryResponse`
-        
+
+Gets information about a metastore. This summary includes the storage credential, the cloud vendor,
+the cloud region, and the global metastore ID.
+
+:returns: :class:`GetMetastoreSummaryResponse`
+
 
     .. py:method:: unassign(workspace_id: int, metastore_id: str)
 
@@ -234,16 +234,16 @@
             w.metastores.delete(id=created.metastore_id, force=True)
 
         Delete an assignment.
-        
-        Deletes a metastore assignment. The caller must be an account administrator.
-        
-        :param workspace_id: int
-          A workspace ID.
-        :param metastore_id: str
-          Query for the ID of the metastore to delete.
-        
-        
-        
+
+Deletes a metastore assignment. The caller must be an account administrator.
+
+:param workspace_id: int
+  A workspace ID.
+:param metastore_id: str
+  Query for the ID of the metastore to delete.
+
+
+
 
     .. py:method:: update(id: str [, delta_sharing_organization_name: Optional[str], delta_sharing_recipient_token_lifetime_in_seconds: Optional[int], delta_sharing_scope: Optional[UpdateMetastoreDeltaSharingScope], new_name: Optional[str], owner: Optional[str], privilege_model_version: Optional[str], storage_root_credential_id: Optional[str]]) -> MetastoreInfo
 
@@ -269,47 +269,46 @@
             w.metastores.delete(id=created.metastore_id, force=True)
 
         Update a metastore.
-        
-        Updates information for a specific metastore. The caller must be a metastore admin. If the __owner__
-        field is set to the empty string (**""**), the ownership is updated to the System User.
-        
-        :param id: str
-          Unique ID of the metastore.
-        :param delta_sharing_organization_name: str (optional)
-          The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta
-          Sharing as the official name.
-        :param delta_sharing_recipient_token_lifetime_in_seconds: int (optional)
-          The lifetime of delta sharing recipient token in seconds.
-        :param delta_sharing_scope: :class:`UpdateMetastoreDeltaSharingScope` (optional)
-          The scope of Delta Sharing enabled for the metastore.
-        :param new_name: str (optional)
-          New name for the metastore.
-        :param owner: str (optional)
-          The owner of the metastore.
-        :param privilege_model_version: str (optional)
-          Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`).
-        :param storage_root_credential_id: str (optional)
-          UUID of storage credential to access the metastore storage_root.
-        
-        :returns: :class:`MetastoreInfo`
-        
+
+Updates information for a specific metastore. The caller must be a metastore admin. If the __owner__
+field is set to the empty string (**""**), the ownership is updated to the System User.
+
+:param id: str
+  Unique ID of the metastore.
+:param delta_sharing_organization_name: str (optional)
+  The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta
+  Sharing as the official name.
+:param delta_sharing_recipient_token_lifetime_in_seconds: int (optional)
+  The lifetime of delta sharing recipient token in seconds.
+:param delta_sharing_scope: :class:`UpdateMetastoreDeltaSharingScope` (optional)
+  The scope of Delta Sharing enabled for the metastore.
+:param new_name: str (optional)
+  New name for the metastore.
+:param owner: str (optional)
+  The owner of the metastore.
+:param privilege_model_version: str (optional)
+  Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`).
+:param storage_root_credential_id: str (optional)
+  UUID of storage credential to access the metastore storage_root.
+
+:returns: :class:`MetastoreInfo`
+
 
     .. py:method:: update_assignment(workspace_id: int [, default_catalog_name: Optional[str], metastore_id: Optional[str]])
 
         Update an assignment.
-        
-        Updates a metastore assignment. This operation can be used to update __metastore_id__ or
-        __default_catalog_name__ for a specified Workspace, if the Workspace is already assigned a metastore.
-        The caller must be an account admin to update __metastore_id__; otherwise, the caller can be a
-        Workspace admin.
-        
-        :param workspace_id: int
-          A workspace ID.
-        :param default_catalog_name: str (optional)
-          The name of the default catalog in the metastore. This field is depracted. Please use "Default
-          Namespace API" to configure the default catalog for a Databricks workspace.
-        :param metastore_id: str (optional)
-          The unique ID of the metastore.
-        
-        
-        
\ No newline at end of file
+
+Updates a metastore assignment. This operation can be used to update __metastore_id__ or
+__default_catalog_name__ for a specified Workspace, if the Workspace is already assigned a metastore.
+The caller must be an account admin to update __metastore_id__; otherwise, the caller can be a
+Workspace admin.
+
+:param workspace_id: int
+  A workspace ID.
+:param default_catalog_name: str (optional)
+  The name of the default catalog in the metastore. This field is depracted. Please use "Default
+  Namespace API" to configure the default catalog for a Databricks workspace.
+:param metastore_id: str (optional)
+  The unique ID of the metastore.
+
+
diff --git a/docs/workspace/catalog/model_versions.rst b/docs/workspace/catalog/model_versions.rst
index bae6f25f8..018379273 100644
--- a/docs/workspace/catalog/model_versions.rst
+++ b/docs/workspace/catalog/model_versions.rst
@@ -5,125 +5,124 @@
 .. py:class:: ModelVersionsAPI
 
     Databricks provides a hosted version of MLflow Model Registry in Unity Catalog. Models in Unity Catalog
-    provide centralized access control, auditing, lineage, and discovery of ML models across Databricks
-    workspaces.
-    
-    This API reference documents the REST endpoints for managing model versions in Unity Catalog. For more
-    details, see the [registered models API docs](/api/workspace/registeredmodels).
+provide centralized access control, auditing, lineage, and discovery of ML models across Databricks
+workspaces.
+
+This API reference documents the REST endpoints for managing model versions in Unity Catalog. For more
+details, see the [registered models API docs](/api/workspace/registeredmodels).
 
     .. py:method:: delete(full_name: str, version: int)
 
         Delete a Model Version.
-        
-        Deletes a model version from the specified registered model. Any aliases assigned to the model version
-        will also be deleted.
-        
-        The caller must be a metastore admin or an owner of the parent registered model. For the latter case,
-        the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
-        **USE_SCHEMA** privilege on the parent schema.
-        
-        :param full_name: str
-          The three-level (fully qualified) name of the model version
-        :param version: int
-          The integer version number of the model version
-        
-        
-        
+
+Deletes a model version from the specified registered model. Any aliases assigned to the model version
+will also be deleted.
+
+The caller must be a metastore admin or an owner of the parent registered model. For the latter case,
+the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
+**USE_SCHEMA** privilege on the parent schema.
+
+:param full_name: str
+  The three-level (fully qualified) name of the model version
+:param version: int
+  The integer version number of the model version
+
+
+
 
     .. py:method:: get(full_name: str, version: int [, include_aliases: Optional[bool], include_browse: Optional[bool]]) -> ModelVersionInfo
 
         Get a Model Version.
-        
-        Get a model version.
-        
-        The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the parent
-        registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG**
-        privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-        
-        :param full_name: str
-          The three-level (fully qualified) name of the model version
-        :param version: int
-          The integer version number of the model version
-        :param include_aliases: bool (optional)
-          Whether to include aliases associated with the model version in the response
-        :param include_browse: bool (optional)
-          Whether to include model versions in the response for which the principal can only access selective
-          metadata for
-        
-        :returns: :class:`ModelVersionInfo`
-        
+
+Get a model version.
+
+The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the parent
+registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG**
+privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
+
+:param full_name: str
+  The three-level (fully qualified) name of the model version
+:param version: int
+  The integer version number of the model version
+:param include_aliases: bool (optional)
+  Whether to include aliases associated with the model version in the response
+:param include_browse: bool (optional)
+  Whether to include model versions in the response for which the principal can only access selective
+  metadata for
+
+:returns: :class:`ModelVersionInfo`
+
 
     .. py:method:: get_by_alias(full_name: str, alias: str [, include_aliases: Optional[bool]]) -> ModelVersionInfo
 
         Get Model Version By Alias.
-        
-        Get a model version by alias.
-        
-        The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the
-        registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG**
-        privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-        
-        :param full_name: str
-          The three-level (fully qualified) name of the registered model
-        :param alias: str
-          The name of the alias
-        :param include_aliases: bool (optional)
-          Whether to include aliases associated with the model version in the response
-        
-        :returns: :class:`ModelVersionInfo`
-        
+
+Get a model version by alias.
+
+The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the
+registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG**
+privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
+
+:param full_name: str
+  The three-level (fully qualified) name of the registered model
+:param alias: str
+  The name of the alias
+:param include_aliases: bool (optional)
+  Whether to include aliases associated with the model version in the response
+
+:returns: :class:`ModelVersionInfo`
+
 
     .. py:method:: list(full_name: str [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ModelVersionInfo]
 
         List Model Versions.
-        
-        List model versions. You can list model versions under a particular schema, or list all model versions
-        in the current metastore.
-        
-        The returned models are filtered based on the privileges of the calling user. For example, the
-        metastore admin is able to list all the model versions. A regular user needs to be the owner or have
-        the **EXECUTE** privilege on the parent registered model to recieve the model versions in the
-        response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege
-        on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-        
-        There is no guarantee of a specific ordering of the elements in the response. The elements in the
-        response will not contain any aliases or tags.
-        
-        :param full_name: str
-          The full three-level name of the registered model under which to list model versions
-        :param include_browse: bool (optional)
-          Whether to include model versions in the response for which the principal can only access selective
-          metadata for
-        :param max_results: int (optional)
-          Maximum number of model versions to return. If not set, the page length is set to a server
-          configured value (100, as of 1/3/2024). - when set to a value greater than 0, the page length is the
-          minimum of this value and a server configured value(1000, as of 1/3/2024); - when set to 0, the page
-          length is set to a server configured value (100, as of 1/3/2024) (recommended); - when set to a
-          value less than 0, an invalid parameter error is returned;
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`ModelVersionInfo`
-        
+
+List model versions. You can list model versions under a particular schema, or list all model versions
+in the current metastore.
+
+The returned models are filtered based on the privileges of the calling user. For example, the
+metastore admin is able to list all the model versions. A regular user needs to be the owner or have
+the **EXECUTE** privilege on the parent registered model to recieve the model versions in the
+response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege
+on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
+
+There is no guarantee of a specific ordering of the elements in the response. The elements in the
+response will not contain any aliases or tags.
+
+:param full_name: str
+  The full three-level name of the registered model under which to list model versions
+:param include_browse: bool (optional)
+  Whether to include model versions in the response for which the principal can only access selective
+  metadata for
+:param max_results: int (optional)
+  Maximum number of model versions to return. If not set, the page length is set to a server
+  configured value (100, as of 1/3/2024). - when set to a value greater than 0, the page length is the
+  minimum of this value and a server configured value(1000, as of 1/3/2024); - when set to 0, the page
+  length is set to a server configured value (100, as of 1/3/2024) (recommended); - when set to a
+  value less than 0, an invalid parameter error is returned;
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+
+:returns: Iterator over :class:`ModelVersionInfo`
+
 
     .. py:method:: update(full_name: str, version: int [, comment: Optional[str]]) -> ModelVersionInfo
 
         Update a Model Version.
-        
-        Updates the specified model version.
-        
-        The caller must be a metastore admin or an owner of the parent registered model. For the latter case,
-        the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
-        **USE_SCHEMA** privilege on the parent schema.
-        
-        Currently only the comment of the model version can be updated.
-        
-        :param full_name: str
-          The three-level (fully qualified) name of the model version
-        :param version: int
-          The integer version number of the model version
-        :param comment: str (optional)
-          The comment attached to the model version
-        
-        :returns: :class:`ModelVersionInfo`
-        
\ No newline at end of file
+
+Updates the specified model version.
+
+The caller must be a metastore admin or an owner of the parent registered model. For the latter case,
+the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
+**USE_SCHEMA** privilege on the parent schema.
+
+Currently only the comment of the model version can be updated.
+
+:param full_name: str
+  The three-level (fully qualified) name of the model version
+:param version: int
+  The integer version number of the model version
+:param comment: str (optional)
+  The comment attached to the model version
+
+:returns: :class:`ModelVersionInfo`
diff --git a/docs/workspace/catalog/online_tables.rst b/docs/workspace/catalog/online_tables.rst
index d0119657f..fe4cee905 100644
--- a/docs/workspace/catalog/online_tables.rst
+++ b/docs/workspace/catalog/online_tables.rst
@@ -9,16 +9,16 @@
     .. py:method:: create( [, table: Optional[OnlineTable]]) -> Wait[OnlineTable]
 
         Create an Online Table.
-        
-        Create a new Online Table.
-        
-        :param table: :class:`OnlineTable` (optional)
-          Online Table information.
-        
-        :returns:
-          Long-running operation waiter for :class:`OnlineTable`.
-          See :method:wait_get_online_table_active for more details.
-        
+
+Create a new Online Table.
+
+:param table: :class:`OnlineTable` (optional)
+  Online Table information.
+
+:returns:
+  Long-running operation waiter for :class:`OnlineTable`.
+  See :method:wait_get_online_table_active for more details.
+
 
     .. py:method:: create_and_wait( [, table: Optional[OnlineTable], timeout: datetime.timedelta = 0:20:00]) -> OnlineTable
 
@@ -26,27 +26,27 @@
     .. py:method:: delete(name: str)
 
         Delete an Online Table.
-        
-        Delete an online table. Warning: This will delete all the data in the online table. If the source
-        Delta table was deleted or modified since this Online Table was created, this will lose the data
-        forever!
-        
-        :param name: str
-          Full three-part (catalog, schema, table) name of the table.
-        
-        
-        
+
+Delete an online table. Warning: This will delete all the data in the online table. If the source
+Delta table was deleted or modified since this Online Table was created, this will lose the data
+forever!
+
+:param name: str
+  Full three-part (catalog, schema, table) name of the table.
+
+
+
 
     .. py:method:: get(name: str) -> OnlineTable
 
         Get an Online Table.
-        
-        Get information about an existing online table and its status.
-        
-        :param name: str
-          Full three-part (catalog, schema, table) name of the table.
-        
-        :returns: :class:`OnlineTable`
-        
+
+Get information about an existing online table and its status.
+
+:param name: str
+  Full three-part (catalog, schema, table) name of the table.
+
+:returns: :class:`OnlineTable`
+
 
     .. py:method:: wait_get_online_table_active(name: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[OnlineTable], None]]) -> OnlineTable
diff --git a/docs/workspace/catalog/quality_monitors.rst b/docs/workspace/catalog/quality_monitors.rst
index 93f05b69a..8e71050f2 100644
--- a/docs/workspace/catalog/quality_monitors.rst
+++ b/docs/workspace/catalog/quality_monitors.rst
@@ -5,255 +5,254 @@
 .. py:class:: QualityMonitorsAPI
 
     A monitor computes and monitors data or model quality metrics for a table over time. It generates metrics
-    tables and a dashboard that you can use to monitor table health and set alerts.
-    
-    Most write operations require the user to be the owner of the table (or its parent schema or parent
-    catalog). Viewing the dashboard, computed metrics, or monitor configuration only requires the user to have
-    **SELECT** privileges on the table (along with **USE_SCHEMA** and **USE_CATALOG**).
+tables and a dashboard that you can use to monitor table health and set alerts.
+
+Most write operations require the user to be the owner of the table (or its parent schema or parent
+catalog). Viewing the dashboard, computed metrics, or monitor configuration only requires the user to have
+**SELECT** privileges on the table (along with **USE_SCHEMA** and **USE_CATALOG**).
 
     .. py:method:: cancel_refresh(table_name: str, refresh_id: str)
 
         Cancel refresh.
-        
-        Cancel an active monitor refresh for the given refresh ID.
-        
-        The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
-        table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
-        - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
-        owner of the table
-        
-        Additionally, the call must be made from the workspace where the monitor was created.
-        
-        :param table_name: str
-          Full name of the table.
-        :param refresh_id: str
-          ID of the refresh.
-        
-        
-        
+
+Cancel an active monitor refresh for the given refresh ID.
+
+The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
+table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
+- **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
+owner of the table
+
+Additionally, the call must be made from the workspace where the monitor was created.
+
+:param table_name: str
+  Full name of the table.
+:param refresh_id: str
+  ID of the refresh.
+
+
+
 
     .. py:method:: create(table_name: str, assets_dir: str, output_schema_name: str [, baseline_table_name: Optional[str], custom_metrics: Optional[List[MonitorMetric]], data_classification_config: Optional[MonitorDataClassificationConfig], inference_log: Optional[MonitorInferenceLog], notifications: Optional[MonitorNotifications], schedule: Optional[MonitorCronSchedule], skip_builtin_dashboard: Optional[bool], slicing_exprs: Optional[List[str]], snapshot: Optional[MonitorSnapshot], time_series: Optional[MonitorTimeSeries], warehouse_id: Optional[str]]) -> MonitorInfo
 
         Create a table monitor.
-        
-        Creates a new monitor for the specified table.
-        
-        The caller must either: 1. be an owner of the table's parent catalog, have **USE_SCHEMA** on the
-        table's parent schema, and have **SELECT** access on the table 2. have **USE_CATALOG** on the table's
-        parent catalog, be an owner of the table's parent schema, and have **SELECT** access on the table. 3.
-        have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on
-        the table's parent schema - be an owner of the table.
-        
-        Workspace assets, such as the dashboard, will be created in the workspace where this call was made.
-        
-        :param table_name: str
-          Full name of the table.
-        :param assets_dir: str
-          The directory to store monitoring assets (e.g. dashboard, metric tables).
-        :param output_schema_name: str
-          Schema where output metric tables are created.
-        :param baseline_table_name: str (optional)
-          Name of the baseline table from which drift metrics are computed from. Columns in the monitored
-          table should also be present in the baseline table.
-        :param custom_metrics: List[:class:`MonitorMetric`] (optional)
-          Custom metrics to compute on the monitored table. These can be aggregate metrics, derived metrics
-          (from already computed aggregate metrics), or drift metrics (comparing metrics across time windows).
-        :param data_classification_config: :class:`MonitorDataClassificationConfig` (optional)
-          The data classification config for the monitor.
-        :param inference_log: :class:`MonitorInferenceLog` (optional)
-          Configuration for monitoring inference logs.
-        :param notifications: :class:`MonitorNotifications` (optional)
-          The notification settings for the monitor.
-        :param schedule: :class:`MonitorCronSchedule` (optional)
-          The schedule for automatically updating and refreshing metric tables.
-        :param skip_builtin_dashboard: bool (optional)
-          Whether to skip creating a default dashboard summarizing data quality metrics.
-        :param slicing_exprs: List[str] (optional)
-          List of column expressions to slice data with for targeted analysis. The data is grouped by each
-          expression independently, resulting in a separate slice for each predicate and its complements. For
-          high-cardinality columns, only the top 100 unique values by frequency will generate slices.
-        :param snapshot: :class:`MonitorSnapshot` (optional)
-          Configuration for monitoring snapshot tables.
-        :param time_series: :class:`MonitorTimeSeries` (optional)
-          Configuration for monitoring time series tables.
-        :param warehouse_id: str (optional)
-          Optional argument to specify the warehouse for dashboard creation. If not specified, the first
-          running warehouse will be used.
-        
-        :returns: :class:`MonitorInfo`
-        
+
+Creates a new monitor for the specified table.
+
+The caller must either: 1. be an owner of the table's parent catalog, have **USE_SCHEMA** on the
+table's parent schema, and have **SELECT** access on the table 2. have **USE_CATALOG** on the table's
+parent catalog, be an owner of the table's parent schema, and have **SELECT** access on the table. 3.
+have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on
+the table's parent schema - be an owner of the table.
+
+Workspace assets, such as the dashboard, will be created in the workspace where this call was made.
+
+:param table_name: str
+  Full name of the table.
+:param assets_dir: str
+  The directory to store monitoring assets (e.g. dashboard, metric tables).
+:param output_schema_name: str
+  Schema where output metric tables are created.
+:param baseline_table_name: str (optional)
+  Name of the baseline table from which drift metrics are computed from. Columns in the monitored
+  table should also be present in the baseline table.
+:param custom_metrics: List[:class:`MonitorMetric`] (optional)
+  Custom metrics to compute on the monitored table. These can be aggregate metrics, derived metrics
+  (from already computed aggregate metrics), or drift metrics (comparing metrics across time windows).
+:param data_classification_config: :class:`MonitorDataClassificationConfig` (optional)
+  The data classification config for the monitor.
+:param inference_log: :class:`MonitorInferenceLog` (optional)
+  Configuration for monitoring inference logs.
+:param notifications: :class:`MonitorNotifications` (optional)
+  The notification settings for the monitor.
+:param schedule: :class:`MonitorCronSchedule` (optional)
+  The schedule for automatically updating and refreshing metric tables.
+:param skip_builtin_dashboard: bool (optional)
+  Whether to skip creating a default dashboard summarizing data quality metrics.
+:param slicing_exprs: List[str] (optional)
+  List of column expressions to slice data with for targeted analysis. The data is grouped by each
+  expression independently, resulting in a separate slice for each predicate and its complements. For
+  high-cardinality columns, only the top 100 unique values by frequency will generate slices.
+:param snapshot: :class:`MonitorSnapshot` (optional)
+  Configuration for monitoring snapshot tables.
+:param time_series: :class:`MonitorTimeSeries` (optional)
+  Configuration for monitoring time series tables.
+:param warehouse_id: str (optional)
+  Optional argument to specify the warehouse for dashboard creation. If not specified, the first
+  running warehouse will be used.
+
+:returns: :class:`MonitorInfo`
+
 
     .. py:method:: delete(table_name: str)
 
         Delete a table monitor.
-        
-        Deletes a monitor for the specified table.
-        
-        The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
-        table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
-        - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
-        owner of the table.
-        
-        Additionally, the call must be made from the workspace where the monitor was created.
-        
-        Note that the metric tables and dashboard will not be deleted as part of this call; those assets must
-        be manually cleaned up (if desired).
-        
-        :param table_name: str
-          Full name of the table.
-        
-        
-        
+
+Deletes a monitor for the specified table.
+
+The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
+table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
+- **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
+owner of the table.
+
+Additionally, the call must be made from the workspace where the monitor was created.
+
+Note that the metric tables and dashboard will not be deleted as part of this call; those assets must
+be manually cleaned up (if desired).
+
+:param table_name: str
+  Full name of the table.
+
+
+
 
     .. py:method:: get(table_name: str) -> MonitorInfo
 
         Get a table monitor.
-        
-        Gets a monitor for the specified table.
-        
-        The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
-        table's parent catalog and be an owner of the table's parent schema. 3. have the following
-        permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent
-        schema - **SELECT** privilege on the table.
-        
-        The returned information includes configuration values, as well as information on assets created by
-        the monitor. Some information (e.g., dashboard) may be filtered out if the caller is in a different
-        workspace than where the monitor was created.
-        
-        :param table_name: str
-          Full name of the table.
-        
-        :returns: :class:`MonitorInfo`
-        
+
+Gets a monitor for the specified table.
+
+The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
+table's parent catalog and be an owner of the table's parent schema. 3. have the following
+permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent
+schema - **SELECT** privilege on the table.
+
+The returned information includes configuration values, as well as information on assets created by
+the monitor. Some information (e.g., dashboard) may be filtered out if the caller is in a different
+workspace than where the monitor was created.
+
+:param table_name: str
+  Full name of the table.
+
+:returns: :class:`MonitorInfo`
+
 
     .. py:method:: get_refresh(table_name: str, refresh_id: str) -> MonitorRefreshInfo
 
         Get refresh.
-        
-        Gets info about a specific monitor refresh using the given refresh ID.
-        
-        The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
-        table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
-        - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema -
-        **SELECT** privilege on the table.
-        
-        Additionally, the call must be made from the workspace where the monitor was created.
-        
-        :param table_name: str
-          Full name of the table.
-        :param refresh_id: str
-          ID of the refresh.
-        
-        :returns: :class:`MonitorRefreshInfo`
-        
+
+Gets info about a specific monitor refresh using the given refresh ID.
+
+The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
+table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
+- **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema -
+**SELECT** privilege on the table.
+
+Additionally, the call must be made from the workspace where the monitor was created.
+
+:param table_name: str
+  Full name of the table.
+:param refresh_id: str
+  ID of the refresh.
+
+:returns: :class:`MonitorRefreshInfo`
+
 
     .. py:method:: list_refreshes(table_name: str) -> MonitorRefreshListResponse
 
         List refreshes.
-        
-        Gets an array containing the history of the most recent refreshes (up to 25) for this table.
-        
-        The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
-        table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
-        - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema -
-        **SELECT** privilege on the table.
-        
-        Additionally, the call must be made from the workspace where the monitor was created.
-        
-        :param table_name: str
-          Full name of the table.
-        
-        :returns: :class:`MonitorRefreshListResponse`
-        
+
+Gets an array containing the history of the most recent refreshes (up to 25) for this table.
+
+The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
+table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
+- **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema -
+**SELECT** privilege on the table.
+
+Additionally, the call must be made from the workspace where the monitor was created.
+
+:param table_name: str
+  Full name of the table.
+
+:returns: :class:`MonitorRefreshListResponse`
+
 
     .. py:method:: regenerate_dashboard(table_name: str [, warehouse_id: Optional[str]]) -> RegenerateDashboardResponse
 
         Regenerate a monitoring dashboard.
-        
-        Regenerates the monitoring dashboard for the specified table.
-        
-        The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
-        table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
-        - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
-        owner of the table
-        
-        The call must be made from the workspace where the monitor was created. The dashboard will be
-        regenerated in the assets directory that was specified when the monitor was created.
-        
-        :param table_name: str
-          Full name of the table.
-        :param warehouse_id: str (optional)
-          Optional argument to specify the warehouse for dashboard regeneration. If not specified, the first
-          running warehouse will be used.
-        
-        :returns: :class:`RegenerateDashboardResponse`
-        
+
+Regenerates the monitoring dashboard for the specified table.
+
+The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
+table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
+- **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
+owner of the table
+
+The call must be made from the workspace where the monitor was created. The dashboard will be
+regenerated in the assets directory that was specified when the monitor was created.
+
+:param table_name: str
+  Full name of the table.
+:param warehouse_id: str (optional)
+  Optional argument to specify the warehouse for dashboard regeneration. If not specified, the first
+  running warehouse will be used.
+
+:returns: :class:`RegenerateDashboardResponse`
+
 
     .. py:method:: run_refresh(table_name: str) -> MonitorRefreshInfo
 
         Queue a metric refresh for a monitor.
-        
-        Queues a metric refresh on the monitor for the specified table. The refresh will execute in the
-        background.
-        
-        The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
-        table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
-        - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
-        owner of the table
-        
-        Additionally, the call must be made from the workspace where the monitor was created.
-        
-        :param table_name: str
-          Full name of the table.
-        
-        :returns: :class:`MonitorRefreshInfo`
-        
+
+Queues a metric refresh on the monitor for the specified table. The refresh will execute in the
+background.
+
+The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
+table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
+- **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
+owner of the table
+
+Additionally, the call must be made from the workspace where the monitor was created.
+
+:param table_name: str
+  Full name of the table.
+
+:returns: :class:`MonitorRefreshInfo`
+
 
     .. py:method:: update(table_name: str, output_schema_name: str [, baseline_table_name: Optional[str], custom_metrics: Optional[List[MonitorMetric]], dashboard_id: Optional[str], data_classification_config: Optional[MonitorDataClassificationConfig], inference_log: Optional[MonitorInferenceLog], notifications: Optional[MonitorNotifications], schedule: Optional[MonitorCronSchedule], slicing_exprs: Optional[List[str]], snapshot: Optional[MonitorSnapshot], time_series: Optional[MonitorTimeSeries]]) -> MonitorInfo
 
         Update a table monitor.
-        
-        Updates a monitor for the specified table.
-        
-        The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
-        table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
-        - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
-        owner of the table.
-        
-        Additionally, the call must be made from the workspace where the monitor was created, and the caller
-        must be the original creator of the monitor.
-        
-        Certain configuration fields, such as output asset identifiers, cannot be updated.
-        
-        :param table_name: str
-          Full name of the table.
-        :param output_schema_name: str
-          Schema where output metric tables are created.
-        :param baseline_table_name: str (optional)
-          Name of the baseline table from which drift metrics are computed from. Columns in the monitored
-          table should also be present in the baseline table.
-        :param custom_metrics: List[:class:`MonitorMetric`] (optional)
-          Custom metrics to compute on the monitored table. These can be aggregate metrics, derived metrics
-          (from already computed aggregate metrics), or drift metrics (comparing metrics across time windows).
-        :param dashboard_id: str (optional)
-          Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in PENDING
-          state.
-        :param data_classification_config: :class:`MonitorDataClassificationConfig` (optional)
-          The data classification config for the monitor.
-        :param inference_log: :class:`MonitorInferenceLog` (optional)
-          Configuration for monitoring inference logs.
-        :param notifications: :class:`MonitorNotifications` (optional)
-          The notification settings for the monitor.
-        :param schedule: :class:`MonitorCronSchedule` (optional)
-          The schedule for automatically updating and refreshing metric tables.
-        :param slicing_exprs: List[str] (optional)
-          List of column expressions to slice data with for targeted analysis. The data is grouped by each
-          expression independently, resulting in a separate slice for each predicate and its complements. For
-          high-cardinality columns, only the top 100 unique values by frequency will generate slices.
-        :param snapshot: :class:`MonitorSnapshot` (optional)
-          Configuration for monitoring snapshot tables.
-        :param time_series: :class:`MonitorTimeSeries` (optional)
-          Configuration for monitoring time series tables.
-        
-        :returns: :class:`MonitorInfo`
-        
\ No newline at end of file
+
+Updates a monitor for the specified table.
+
+The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
+table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
+- **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
+owner of the table.
+
+Additionally, the call must be made from the workspace where the monitor was created, and the caller
+must be the original creator of the monitor.
+
+Certain configuration fields, such as output asset identifiers, cannot be updated.
+
+:param table_name: str
+  Full name of the table.
+:param output_schema_name: str
+  Schema where output metric tables are created.
+:param baseline_table_name: str (optional)
+  Name of the baseline table from which drift metrics are computed from. Columns in the monitored
+  table should also be present in the baseline table.
+:param custom_metrics: List[:class:`MonitorMetric`] (optional)
+  Custom metrics to compute on the monitored table. These can be aggregate metrics, derived metrics
+  (from already computed aggregate metrics), or drift metrics (comparing metrics across time windows).
+:param dashboard_id: str (optional)
+  Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in PENDING
+  state.
+:param data_classification_config: :class:`MonitorDataClassificationConfig` (optional)
+  The data classification config for the monitor.
+:param inference_log: :class:`MonitorInferenceLog` (optional)
+  Configuration for monitoring inference logs.
+:param notifications: :class:`MonitorNotifications` (optional)
+  The notification settings for the monitor.
+:param schedule: :class:`MonitorCronSchedule` (optional)
+  The schedule for automatically updating and refreshing metric tables.
+:param slicing_exprs: List[str] (optional)
+  List of column expressions to slice data with for targeted analysis. The data is grouped by each
+  expression independently, resulting in a separate slice for each predicate and its complements. For
+  high-cardinality columns, only the top 100 unique values by frequency will generate slices.
+:param snapshot: :class:`MonitorSnapshot` (optional)
+  Configuration for monitoring snapshot tables.
+:param time_series: :class:`MonitorTimeSeries` (optional)
+  Configuration for monitoring time series tables.
+
+:returns: :class:`MonitorInfo`
diff --git a/docs/workspace/catalog/registered_models.rst b/docs/workspace/catalog/registered_models.rst
index b05a702b5..cba3cbc96 100644
--- a/docs/workspace/catalog/registered_models.rst
+++ b/docs/workspace/catalog/registered_models.rst
@@ -5,197 +5,196 @@
 .. py:class:: RegisteredModelsAPI
 
     Databricks provides a hosted version of MLflow Model Registry in Unity Catalog. Models in Unity Catalog
-    provide centralized access control, auditing, lineage, and discovery of ML models across Databricks
-    workspaces.
-    
-    An MLflow registered model resides in the third layer of Unity Catalog’s three-level namespace.
-    Registered models contain model versions, which correspond to actual ML models (MLflow models). Creating
-    new model versions currently requires use of the MLflow Python client. Once model versions are created,
-    you can load them for batch inference using MLflow Python client APIs, or deploy them for real-time
-    serving using Databricks Model Serving.
-    
-    All operations on registered models and model versions require USE_CATALOG permissions on the enclosing
-    catalog and USE_SCHEMA permissions on the enclosing schema. In addition, the following additional
-    privileges are required for various operations:
-    
-    * To create a registered model, users must additionally have the CREATE_MODEL permission on the target
-    schema. * To view registered model or model version metadata, model version data files, or invoke a model
-    version, users must additionally have the EXECUTE permission on the registered model * To update
-    registered model or model version tags, users must additionally have APPLY TAG permissions on the
-    registered model * To update other registered model or model version metadata (comments, aliases) create a
-    new model version, or update permissions on the registered model, users must be owners of the registered
-    model.
-    
-    Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. tagging, grants) that
-    specify a securable type, use "FUNCTION" as the securable type.
+provide centralized access control, auditing, lineage, and discovery of ML models across Databricks
+workspaces.
+
+An MLflow registered model resides in the third layer of Unity Catalog’s three-level namespace.
+Registered models contain model versions, which correspond to actual ML models (MLflow models). Creating
+new model versions currently requires use of the MLflow Python client. Once model versions are created,
+you can load them for batch inference using MLflow Python client APIs, or deploy them for real-time
+serving using Databricks Model Serving.
+
+All operations on registered models and model versions require USE_CATALOG permissions on the enclosing
+catalog and USE_SCHEMA permissions on the enclosing schema. In addition, the following additional
+privileges are required for various operations:
+
+* To create a registered model, users must additionally have the CREATE_MODEL permission on the target
+schema. * To view registered model or model version metadata, model version data files, or invoke a model
+version, users must additionally have the EXECUTE permission on the registered model * To update
+registered model or model version tags, users must additionally have APPLY TAG permissions on the
+registered model * To update other registered model or model version metadata (comments, aliases) create a
+new model version, or update permissions on the registered model, users must be owners of the registered
+model.
+
+Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. tagging, grants) that
+specify a securable type, use "FUNCTION" as the securable type.
 
     .. py:method:: create(catalog_name: str, schema_name: str, name: str [, comment: Optional[str], storage_location: Optional[str]]) -> RegisteredModelInfo
 
         Create a Registered Model.
-        
-        Creates a new registered model in Unity Catalog.
-        
-        File storage for model versions in the registered model will be located in the default location which
-        is specified by the parent schema, or the parent catalog, or the Metastore.
-        
-        For registered model creation to succeed, the user must satisfy the following conditions: - The caller
-        must be a metastore admin, or be the owner of the parent catalog and schema, or have the
-        **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-        - The caller must have the **CREATE MODEL** or **CREATE FUNCTION** privilege on the parent schema.
-        
-        :param catalog_name: str
-          The name of the catalog where the schema and the registered model reside
-        :param schema_name: str
-          The name of the schema where the registered model resides
-        :param name: str
-          The name of the registered model
-        :param comment: str (optional)
-          The comment attached to the registered model
-        :param storage_location: str (optional)
-          The storage location on the cloud under which model version data files are stored
-        
-        :returns: :class:`RegisteredModelInfo`
-        
+
+Creates a new registered model in Unity Catalog.
+
+File storage for model versions in the registered model will be located in the default location which
+is specified by the parent schema, or the parent catalog, or the Metastore.
+
+For registered model creation to succeed, the user must satisfy the following conditions: - The caller
+must be a metastore admin, or be the owner of the parent catalog and schema, or have the
+**USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
+- The caller must have the **CREATE MODEL** or **CREATE FUNCTION** privilege on the parent schema.
+
+:param catalog_name: str
+  The name of the catalog where the schema and the registered model reside
+:param schema_name: str
+  The name of the schema where the registered model resides
+:param name: str
+  The name of the registered model
+:param comment: str (optional)
+  The comment attached to the registered model
+:param storage_location: str (optional)
+  The storage location on the cloud under which model version data files are stored
+
+:returns: :class:`RegisteredModelInfo`
+
 
     .. py:method:: delete(full_name: str)
 
         Delete a Registered Model.
-        
-        Deletes a registered model and all its model versions from the specified parent catalog and schema.
-        
-        The caller must be a metastore admin or an owner of the registered model. For the latter case, the
-        caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
-        **USE_SCHEMA** privilege on the parent schema.
-        
-        :param full_name: str
-          The three-level (fully qualified) name of the registered model
-        
-        
-        
+
+Deletes a registered model and all its model versions from the specified parent catalog and schema.
+
+The caller must be a metastore admin or an owner of the registered model. For the latter case, the
+caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
+**USE_SCHEMA** privilege on the parent schema.
+
+:param full_name: str
+  The three-level (fully qualified) name of the registered model
+
+
+
 
     .. py:method:: delete_alias(full_name: str, alias: str)
 
         Delete a Registered Model Alias.
-        
-        Deletes a registered model alias.
-        
-        The caller must be a metastore admin or an owner of the registered model. For the latter case, the
-        caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
-        **USE_SCHEMA** privilege on the parent schema.
-        
-        :param full_name: str
-          The three-level (fully qualified) name of the registered model
-        :param alias: str
-          The name of the alias
-        
-        
-        
+
+Deletes a registered model alias.
+
+The caller must be a metastore admin or an owner of the registered model. For the latter case, the
+caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
+**USE_SCHEMA** privilege on the parent schema.
+
+:param full_name: str
+  The three-level (fully qualified) name of the registered model
+:param alias: str
+  The name of the alias
+
+
+
 
     .. py:method:: get(full_name: str [, include_aliases: Optional[bool], include_browse: Optional[bool]]) -> RegisteredModelInfo
 
         Get a Registered Model.
-        
-        Get a registered model.
-        
-        The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the
-        registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG**
-        privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-        
-        :param full_name: str
-          The three-level (fully qualified) name of the registered model
-        :param include_aliases: bool (optional)
-          Whether to include registered model aliases in the response
-        :param include_browse: bool (optional)
-          Whether to include registered models in the response for which the principal can only access
-          selective metadata for
-        
-        :returns: :class:`RegisteredModelInfo`
-        
+
+Get a registered model.
+
+The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the
+registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG**
+privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
+
+:param full_name: str
+  The three-level (fully qualified) name of the registered model
+:param include_aliases: bool (optional)
+  Whether to include registered model aliases in the response
+:param include_browse: bool (optional)
+  Whether to include registered models in the response for which the principal can only access
+  selective metadata for
+
+:returns: :class:`RegisteredModelInfo`
+
 
     .. py:method:: list( [, catalog_name: Optional[str], include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str], schema_name: Optional[str]]) -> Iterator[RegisteredModelInfo]
 
         List Registered Models.
-        
-        List registered models. You can list registered models under a particular schema, or list all
-        registered models in the current metastore.
-        
-        The returned models are filtered based on the privileges of the calling user. For example, the
-        metastore admin is able to list all the registered models. A regular user needs to be the owner or
-        have the **EXECUTE** privilege on the registered model to recieve the registered models in the
-        response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege
-        on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-        
-        There is no guarantee of a specific ordering of the elements in the response.
-        
-        :param catalog_name: str (optional)
-          The identifier of the catalog under which to list registered models. If specified, schema_name must
-          be specified.
-        :param include_browse: bool (optional)
-          Whether to include registered models in the response for which the principal can only access
-          selective metadata for
-        :param max_results: int (optional)
-          Max number of registered models to return.
-          
-          If both catalog and schema are specified: - when max_results is not specified, the page length is
-          set to a server configured value (10000, as of 4/2/2024). - when set to a value greater than 0, the
-          page length is the minimum of this value and a server configured value (10000, as of 4/2/2024); -
-          when set to 0, the page length is set to a server configured value (10000, as of 4/2/2024); - when
-          set to a value less than 0, an invalid parameter error is returned;
-          
-          If neither schema nor catalog is specified: - when max_results is not specified, the page length is
-          set to a server configured value (100, as of 4/2/2024). - when set to a value greater than 0, the
-          page length is the minimum of this value and a server configured value (1000, as of 4/2/2024); -
-          when set to 0, the page length is set to a server configured value (100, as of 4/2/2024); - when set
-          to a value less than 0, an invalid parameter error is returned;
-        :param page_token: str (optional)
-          Opaque token to send for the next page of results (pagination).
-        :param schema_name: str (optional)
-          The identifier of the schema under which to list registered models. If specified, catalog_name must
-          be specified.
-        
-        :returns: Iterator over :class:`RegisteredModelInfo`
-        
+
+List registered models. You can list registered models under a particular schema, or list all
+registered models in the current metastore.
+
+The returned models are filtered based on the privileges of the calling user. For example, the
+metastore admin is able to list all the registered models. A regular user needs to be the owner or
+have the **EXECUTE** privilege on the registered model to recieve the registered models in the
+response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege
+on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
+
+There is no guarantee of a specific ordering of the elements in the response.
+
+:param catalog_name: str (optional)
+  The identifier of the catalog under which to list registered models. If specified, schema_name must
+  be specified.
+:param include_browse: bool (optional)
+  Whether to include registered models in the response for which the principal can only access
+  selective metadata for
+:param max_results: int (optional)
+  Max number of registered models to return.
+  
+  If both catalog and schema are specified: - when max_results is not specified, the page length is
+  set to a server configured value (10000, as of 4/2/2024). - when set to a value greater than 0, the
+  page length is the minimum of this value and a server configured value (10000, as of 4/2/2024); -
+  when set to 0, the page length is set to a server configured value (10000, as of 4/2/2024); - when
+  set to a value less than 0, an invalid parameter error is returned;
+  
+  If neither schema nor catalog is specified: - when max_results is not specified, the page length is
+  set to a server configured value (100, as of 4/2/2024). - when set to a value greater than 0, the
+  page length is the minimum of this value and a server configured value (1000, as of 4/2/2024); -
+  when set to 0, the page length is set to a server configured value (100, as of 4/2/2024); - when set
+  to a value less than 0, an invalid parameter error is returned;
+:param page_token: str (optional)
+  Opaque token to send for the next page of results (pagination).
+:param schema_name: str (optional)
+  The identifier of the schema under which to list registered models. If specified, catalog_name must
+  be specified.
+
+:returns: Iterator over :class:`RegisteredModelInfo`
+
 
     .. py:method:: set_alias(full_name: str, alias: str, version_num: int) -> RegisteredModelAlias
 
         Set a Registered Model Alias.
-        
-        Set an alias on the specified registered model.
-        
-        The caller must be a metastore admin or an owner of the registered model. For the latter case, the
-        caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
-        **USE_SCHEMA** privilege on the parent schema.
-        
-        :param full_name: str
-          Full name of the registered model
-        :param alias: str
-          The name of the alias
-        :param version_num: int
-          The version number of the model version to which the alias points
-        
-        :returns: :class:`RegisteredModelAlias`
-        
+
+Set an alias on the specified registered model.
+
+The caller must be a metastore admin or an owner of the registered model. For the latter case, the
+caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
+**USE_SCHEMA** privilege on the parent schema.
+
+:param full_name: str
+  Full name of the registered model
+:param alias: str
+  The name of the alias
+:param version_num: int
+  The version number of the model version to which the alias points
+
+:returns: :class:`RegisteredModelAlias`
+
 
     .. py:method:: update(full_name: str [, comment: Optional[str], new_name: Optional[str], owner: Optional[str]]) -> RegisteredModelInfo
 
         Update a Registered Model.
-        
-        Updates the specified registered model.
-        
-        The caller must be a metastore admin or an owner of the registered model. For the latter case, the
-        caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
-        **USE_SCHEMA** privilege on the parent schema.
-        
-        Currently only the name, the owner or the comment of the registered model can be updated.
-        
-        :param full_name: str
-          The three-level (fully qualified) name of the registered model
-        :param comment: str (optional)
-          The comment attached to the registered model
-        :param new_name: str (optional)
-          New name for the registered model.
-        :param owner: str (optional)
-          The identifier of the user who owns the registered model
-        
-        :returns: :class:`RegisteredModelInfo`
-        
\ No newline at end of file
+
+Updates the specified registered model.
+
+The caller must be a metastore admin or an owner of the registered model. For the latter case, the
+caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
+**USE_SCHEMA** privilege on the parent schema.
+
+Currently only the name, the owner or the comment of the registered model can be updated.
+
+:param full_name: str
+  The three-level (fully qualified) name of the registered model
+:param comment: str (optional)
+  The comment attached to the registered model
+:param new_name: str (optional)
+  New name for the registered model.
+:param owner: str (optional)
+  The identifier of the user who owns the registered model
+
+:returns: :class:`RegisteredModelInfo`
diff --git a/docs/workspace/catalog/resource_quotas.rst b/docs/workspace/catalog/resource_quotas.rst
index 3396011f0..22f50f0f1 100644
--- a/docs/workspace/catalog/resource_quotas.rst
+++ b/docs/workspace/catalog/resource_quotas.rst
@@ -5,41 +5,40 @@
 .. py:class:: ResourceQuotasAPI
 
     Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that
-    can be created. Quotas are expressed in terms of a resource type and a parent (for example, tables per
-    metastore or schemas per catalog). The resource quota APIs enable you to monitor your current usage and
-    limits. For more information on resource quotas see the [Unity Catalog documentation].
-    
-    [Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas
+can be created. Quotas are expressed in terms of a resource type and a parent (for example, tables per
+metastore or schemas per catalog). The resource quota APIs enable you to monitor your current usage and
+limits. For more information on resource quotas see the [Unity Catalog documentation].
+
+[Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas
 
     .. py:method:: get_quota(parent_securable_type: str, parent_full_name: str, quota_name: str) -> GetQuotaResponse
 
         Get information for a single resource quota.
-        
-        The GetQuota API returns usage information for a single resource quota, defined as a child-parent
-        pair. This API also refreshes the quota count if it is out of date. Refreshes are triggered
-        asynchronously. The updated count might not be returned in the first call.
-        
-        :param parent_securable_type: str
-          Securable type of the quota parent.
-        :param parent_full_name: str
-          Full name of the parent resource. Provide the metastore ID if the parent is a metastore.
-        :param quota_name: str
-          Name of the quota. Follows the pattern of the quota type, with "-quota" added as a suffix.
-        
-        :returns: :class:`GetQuotaResponse`
-        
+
+The GetQuota API returns usage information for a single resource quota, defined as a child-parent
+pair. This API also refreshes the quota count if it is out of date. Refreshes are triggered
+asynchronously. The updated count might not be returned in the first call.
+
+:param parent_securable_type: str
+  Securable type of the quota parent.
+:param parent_full_name: str
+  Full name of the parent resource. Provide the metastore ID if the parent is a metastore.
+:param quota_name: str
+  Name of the quota. Follows the pattern of the quota type, with "-quota" added as a suffix.
+
+:returns: :class:`GetQuotaResponse`
+
 
     .. py:method:: list_quotas( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[QuotaInfo]
 
         List all resource quotas under a metastore.
-        
-        ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the
-        counts returned. This API does not trigger a refresh of quota counts.
-        
-        :param max_results: int (optional)
-          The number of quotas to return.
-        :param page_token: str (optional)
-          Opaque token for the next page of results.
-        
-        :returns: Iterator over :class:`QuotaInfo`
-        
\ No newline at end of file
+
+ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the
+counts returned. This API does not trigger a refresh of quota counts.
+
+:param max_results: int (optional)
+  The number of quotas to return.
+:param page_token: str (optional)
+  Opaque token for the next page of results.
+
+:returns: Iterator over :class:`QuotaInfo`
diff --git a/docs/workspace/catalog/schemas.rst b/docs/workspace/catalog/schemas.rst
index feaf7c7a0..a3c9d2096 100644
--- a/docs/workspace/catalog/schemas.rst
+++ b/docs/workspace/catalog/schemas.rst
@@ -5,9 +5,9 @@
 .. py:class:: SchemasAPI
 
     A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace. A schema
-    organizes tables, views and functions. To access (or list) a table or view in a schema, users must have
-    the USE_SCHEMA data permission on the schema and its parent catalog, and they must have the SELECT
-    permission on the table or view.
+organizes tables, views and functions. To access (or list) a table or view in a schema, users must have
+the USE_SCHEMA data permission on the schema and its parent catalog, and they must have the SELECT
+permission on the table or view.
 
     .. py:method:: create(name: str, catalog_name: str [, comment: Optional[str], properties: Optional[Dict[str, str]], storage_root: Optional[str]]) -> SchemaInfo
 
@@ -31,38 +31,38 @@
             w.schemas.delete(full_name=created_schema.full_name)
 
         Create a schema.
-        
-        Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or have the
-        **CREATE_SCHEMA** privilege in the parent catalog.
-        
-        :param name: str
-          Name of schema, relative to parent catalog.
-        :param catalog_name: str
-          Name of parent catalog.
-        :param comment: str (optional)
-          User-provided free-form text description.
-        :param properties: Dict[str,str] (optional)
-          A map of key-value properties attached to the securable.
-        :param storage_root: str (optional)
-          Storage root URL for managed tables within schema.
-        
-        :returns: :class:`SchemaInfo`
-        
+
+Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or have the
+**CREATE_SCHEMA** privilege in the parent catalog.
+
+:param name: str
+  Name of schema, relative to parent catalog.
+:param catalog_name: str
+  Name of parent catalog.
+:param comment: str (optional)
+  User-provided free-form text description.
+:param properties: Dict[str,str] (optional)
+  A map of key-value properties attached to the securable.
+:param storage_root: str (optional)
+  Storage root URL for managed tables within schema.
+
+:returns: :class:`SchemaInfo`
+
 
     .. py:method:: delete(full_name: str [, force: Optional[bool]])
 
         Delete a schema.
-        
-        Deletes the specified schema from the parent catalog. The caller must be the owner of the schema or an
-        owner of the parent catalog.
-        
-        :param full_name: str
-          Full name of the schema.
-        :param force: bool (optional)
-          Force deletion even if the schema is not empty.
-        
-        
-        
+
+Deletes the specified schema from the parent catalog. The caller must be the owner of the schema or an
+owner of the parent catalog.
+
+:param full_name: str
+  Full name of the schema.
+:param force: bool (optional)
+  Force deletion even if the schema is not empty.
+
+
+
 
     .. py:method:: get(full_name: str [, include_browse: Optional[bool]]) -> SchemaInfo
 
@@ -88,18 +88,18 @@
             w.schemas.delete(full_name=created.full_name)
 
         Get a schema.
-        
-        Gets the specified schema within the metastore. The caller must be a metastore admin, the owner of the
-        schema, or a user that has the **USE_SCHEMA** privilege on the schema.
-        
-        :param full_name: str
-          Full name of the schema.
-        :param include_browse: bool (optional)
-          Whether to include schemas in the response for which the principal can only access selective
-          metadata for
-        
-        :returns: :class:`SchemaInfo`
-        
+
+Gets the specified schema within the metastore. The caller must be a metastore admin, the owner of the
+schema, or a user that has the **USE_SCHEMA** privilege on the schema.
+
+:param full_name: str
+  Full name of the schema.
+:param include_browse: bool (optional)
+  Whether to include schemas in the response for which the principal can only access selective
+  metadata for
+
+:returns: :class:`SchemaInfo`
+
 
     .. py:method:: list(catalog_name: str [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[SchemaInfo]
 
@@ -122,27 +122,27 @@
             w.catalogs.delete(name=new_catalog.name, force=True)
 
         List schemas.
-        
-        Gets an array of schemas for a catalog in the metastore. If the caller is the metastore admin or the
-        owner of the parent catalog, all schemas for the catalog will be retrieved. Otherwise, only schemas
-        owned by the caller (or for which the caller has the **USE_SCHEMA** privilege) will be retrieved.
-        There is no guarantee of a specific ordering of the elements in the array.
-        
-        :param catalog_name: str
-          Parent catalog for schemas of interest.
-        :param include_browse: bool (optional)
-          Whether to include schemas in the response for which the principal can only access selective
-          metadata for
-        :param max_results: int (optional)
-          Maximum number of schemas to return. If not set, all the schemas are returned (not recommended). -
-          when set to a value greater than 0, the page length is the minimum of this value and a server
-          configured value; - when set to 0, the page length is set to a server configured value
-          (recommended); - when set to a value less than 0, an invalid parameter error is returned;
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`SchemaInfo`
-        
+
+Gets an array of schemas for a catalog in the metastore. If the caller is the metastore admin or the
+owner of the parent catalog, all schemas for the catalog will be retrieved. Otherwise, only schemas
+owned by the caller (or for which the caller has the **USE_SCHEMA** privilege) will be retrieved.
+There is no guarantee of a specific ordering of the elements in the array.
+
+:param catalog_name: str
+  Parent catalog for schemas of interest.
+:param include_browse: bool (optional)
+  Whether to include schemas in the response for which the principal can only access selective
+  metadata for
+:param max_results: int (optional)
+  Maximum number of schemas to return. If not set, all the schemas are returned (not recommended). -
+  when set to a value greater than 0, the page length is the minimum of this value and a server
+  configured value; - when set to 0, the page length is set to a server configured value
+  (recommended); - when set to a value less than 0, an invalid parameter error is returned;
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+
+:returns: Iterator over :class:`SchemaInfo`
+
 
     .. py:method:: update(full_name: str [, comment: Optional[str], enable_predictive_optimization: Optional[EnablePredictiveOptimization], new_name: Optional[str], owner: Optional[str], properties: Optional[Dict[str, str]]]) -> SchemaInfo
 
@@ -168,24 +168,23 @@
             w.schemas.delete(full_name=created.full_name)
 
         Update a schema.
-        
-        Updates a schema for a catalog. The caller must be the owner of the schema or a metastore admin. If
-        the caller is a metastore admin, only the __owner__ field can be changed in the update. If the
-        __name__ field must be updated, the caller must be a metastore admin or have the **CREATE_SCHEMA**
-        privilege on the parent catalog.
-        
-        :param full_name: str
-          Full name of the schema.
-        :param comment: str (optional)
-          User-provided free-form text description.
-        :param enable_predictive_optimization: :class:`EnablePredictiveOptimization` (optional)
-          Whether predictive optimization should be enabled for this object and objects under it.
-        :param new_name: str (optional)
-          New name for the schema.
-        :param owner: str (optional)
-          Username of current owner of schema.
-        :param properties: Dict[str,str] (optional)
-          A map of key-value properties attached to the securable.
-        
-        :returns: :class:`SchemaInfo`
-        
\ No newline at end of file
+
+Updates a schema for a catalog. The caller must be the owner of the schema or a metastore admin. If
+the caller is a metastore admin, only the __owner__ field can be changed in the update. If the
+__name__ field must be updated, the caller must be a metastore admin or have the **CREATE_SCHEMA**
+privilege on the parent catalog.
+
+:param full_name: str
+  Full name of the schema.
+:param comment: str (optional)
+  User-provided free-form text description.
+:param enable_predictive_optimization: :class:`EnablePredictiveOptimization` (optional)
+  Whether predictive optimization should be enabled for this object and objects under it.
+:param new_name: str (optional)
+  New name for the schema.
+:param owner: str (optional)
+  Username of current owner of schema.
+:param properties: Dict[str,str] (optional)
+  A map of key-value properties attached to the securable.
+
+:returns: :class:`SchemaInfo`
diff --git a/docs/workspace/catalog/storage_credentials.rst b/docs/workspace/catalog/storage_credentials.rst
index cac70a944..80cee7900 100644
--- a/docs/workspace/catalog/storage_credentials.rst
+++ b/docs/workspace/catalog/storage_credentials.rst
@@ -5,15 +5,15 @@
 .. py:class:: StorageCredentialsAPI
 
     A storage credential represents an authentication and authorization mechanism for accessing data stored on
-    your cloud tenant. Each storage credential is subject to Unity Catalog access-control policies that
-    control which users and groups can access the credential. If a user does not have access to a storage
-    credential in Unity Catalog, the request fails and Unity Catalog does not attempt to authenticate to your
-    cloud tenant on the user’s behalf.
-    
-    Databricks recommends using external locations rather than using storage credentials directly.
-    
-    To create storage credentials, you must be a Databricks account admin. The account admin who creates the
-    storage credential can delegate ownership to another user or group to manage permissions on it.
+your cloud tenant. Each storage credential is subject to Unity Catalog access-control policies that
+control which users and groups can access the credential. If a user does not have access to a storage
+credential in Unity Catalog, the request fails and Unity Catalog does not attempt to authenticate to your
+cloud tenant on the user’s behalf.
+
+Databricks recommends using external locations rather than using storage credentials directly.
+
+To create storage credentials, you must be a Databricks account admin. The account admin who creates the
+storage credential can delegate ownership to another user or group to manage permissions on it.
 
     .. py:method:: create(name: str [, aws_iam_role: Optional[AwsIamRoleRequest], azure_managed_identity: Optional[AzureManagedIdentityRequest], azure_service_principal: Optional[AzureServicePrincipal], cloudflare_api_token: Optional[CloudflareApiToken], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest], read_only: Optional[bool], skip_validation: Optional[bool]]) -> StorageCredentialInfo
 
@@ -38,45 +38,45 @@
             w.storage_credentials.delete(delete=created.name)
 
         Create a storage credential.
-        
-        Creates a new storage credential.
-        
-        :param name: str
-          The credential name. The name must be unique within the metastore.
-        :param aws_iam_role: :class:`AwsIamRoleRequest` (optional)
-          The AWS IAM role configuration.
-        :param azure_managed_identity: :class:`AzureManagedIdentityRequest` (optional)
-          The Azure managed identity configuration.
-        :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
-          The Azure service principal configuration.
-        :param cloudflare_api_token: :class:`CloudflareApiToken` (optional)
-          The Cloudflare API token configuration.
-        :param comment: str (optional)
-          Comment associated with the credential.
-        :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
-          The Databricks managed GCP service account configuration.
-        :param read_only: bool (optional)
-          Whether the storage credential is only usable for read operations.
-        :param skip_validation: bool (optional)
-          Supplying true to this argument skips validation of the created credential.
-        
-        :returns: :class:`StorageCredentialInfo`
-        
+
+Creates a new storage credential.
+
+:param name: str
+  The credential name. The name must be unique within the metastore.
+:param aws_iam_role: :class:`AwsIamRoleRequest` (optional)
+  The AWS IAM role configuration.
+:param azure_managed_identity: :class:`AzureManagedIdentityRequest` (optional)
+  The Azure managed identity configuration.
+:param azure_service_principal: :class:`AzureServicePrincipal` (optional)
+  The Azure service principal configuration.
+:param cloudflare_api_token: :class:`CloudflareApiToken` (optional)
+  The Cloudflare API token configuration.
+:param comment: str (optional)
+  Comment associated with the credential.
+:param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
+  The Databricks managed GCP service account configuration.
+:param read_only: bool (optional)
+  Whether the storage credential is only usable for read operations.
+:param skip_validation: bool (optional)
+  Supplying true to this argument skips validation of the created credential.
+
+:returns: :class:`StorageCredentialInfo`
+
 
     .. py:method:: delete(name: str [, force: Optional[bool]])
 
         Delete a credential.
-        
-        Deletes a storage credential from the metastore. The caller must be an owner of the storage
-        credential.
-        
-        :param name: str
-          Name of the storage credential.
-        :param force: bool (optional)
-          Force deletion even if there are dependent external locations or external tables.
-        
-        
-        
+
+Deletes a storage credential from the metastore. The caller must be an owner of the storage
+credential.
+
+:param name: str
+  Name of the storage credential.
+:param force: bool (optional)
+  Force deletion even if there are dependent external locations or external tables.
+
+
+
 
     .. py:method:: get(name: str) -> StorageCredentialInfo
 
@@ -103,15 +103,15 @@
             w.storage_credentials.delete(name=created.name)
 
         Get a credential.
-        
-        Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the
-        storage credential, or have some permission on the storage credential.
-        
-        :param name: str
-          Name of the storage credential.
-        
-        :returns: :class:`StorageCredentialInfo`
-        
+
+Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the
+storage credential, or have some permission on the storage credential.
+
+:param name: str
+  Name of the storage credential.
+
+:returns: :class:`StorageCredentialInfo`
+
 
     .. py:method:: list( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[StorageCredentialInfo]
 
@@ -127,23 +127,23 @@
             all = w.storage_credentials.list()
 
         List credentials.
-        
-        Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to
-        only those storage credentials the caller has permission to access. If the caller is a metastore
-        admin, retrieval of credentials is unrestricted. There is no guarantee of a specific ordering of the
-        elements in the array.
-        
-        :param max_results: int (optional)
-          Maximum number of storage credentials to return. If not set, all the storage credentials are
-          returned (not recommended). - when set to a value greater than 0, the page length is the minimum of
-          this value and a server configured value; - when set to 0, the page length is set to a server
-          configured value (recommended); - when set to a value less than 0, an invalid parameter error is
-          returned;
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`StorageCredentialInfo`
-        
+
+Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to
+only those storage credentials the caller has permission to access. If the caller is a metastore
+admin, retrieval of credentials is unrestricted. There is no guarantee of a specific ordering of the
+elements in the array.
+
+:param max_results: int (optional)
+  Maximum number of storage credentials to return. If not set, all the storage credentials are
+  returned (not recommended). - when set to a value greater than 0, the page length is the minimum of
+  this value and a server configured value; - when set to 0, the page length is set to a server
+  configured value (recommended); - when set to a value less than 0, an invalid parameter error is
+  returned;
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+
+:returns: Iterator over :class:`StorageCredentialInfo`
+
 
     .. py:method:: update(name: str [, aws_iam_role: Optional[AwsIamRoleRequest], azure_managed_identity: Optional[AzureManagedIdentityResponse], azure_service_principal: Optional[AzureServicePrincipal], cloudflare_api_token: Optional[CloudflareApiToken], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest], force: Optional[bool], isolation_mode: Optional[IsolationMode], new_name: Optional[str], owner: Optional[str], read_only: Optional[bool], skip_validation: Optional[bool]]) -> StorageCredentialInfo
 
@@ -173,70 +173,69 @@
             w.storage_credentials.delete(delete=created.name)
 
         Update a credential.
-        
-        Updates a storage credential on the metastore.
-        
-        :param name: str
-          Name of the storage credential.
-        :param aws_iam_role: :class:`AwsIamRoleRequest` (optional)
-          The AWS IAM role configuration.
-        :param azure_managed_identity: :class:`AzureManagedIdentityResponse` (optional)
-          The Azure managed identity configuration.
-        :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
-          The Azure service principal configuration.
-        :param cloudflare_api_token: :class:`CloudflareApiToken` (optional)
-          The Cloudflare API token configuration.
-        :param comment: str (optional)
-          Comment associated with the credential.
-        :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
-          The Databricks managed GCP service account configuration.
-        :param force: bool (optional)
-          Force update even if there are dependent external locations or external tables.
-        :param isolation_mode: :class:`IsolationMode` (optional)
-        :param new_name: str (optional)
-          New name for the storage credential.
-        :param owner: str (optional)
-          Username of current owner of credential.
-        :param read_only: bool (optional)
-          Whether the storage credential is only usable for read operations.
-        :param skip_validation: bool (optional)
-          Supplying true to this argument skips validation of the updated credential.
-        
-        :returns: :class:`StorageCredentialInfo`
-        
+
+Updates a storage credential on the metastore.
+
+:param name: str
+  Name of the storage credential.
+:param aws_iam_role: :class:`AwsIamRoleRequest` (optional)
+  The AWS IAM role configuration.
+:param azure_managed_identity: :class:`AzureManagedIdentityResponse` (optional)
+  The Azure managed identity configuration.
+:param azure_service_principal: :class:`AzureServicePrincipal` (optional)
+  The Azure service principal configuration.
+:param cloudflare_api_token: :class:`CloudflareApiToken` (optional)
+  The Cloudflare API token configuration.
+:param comment: str (optional)
+  Comment associated with the credential.
+:param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
+  The Databricks managed GCP service account configuration.
+:param force: bool (optional)
+  Force update even if there are dependent external locations or external tables.
+:param isolation_mode: :class:`IsolationMode` (optional)
+:param new_name: str (optional)
+  New name for the storage credential.
+:param owner: str (optional)
+  Username of current owner of credential.
+:param read_only: bool (optional)
+  Whether the storage credential is only usable for read operations.
+:param skip_validation: bool (optional)
+  Supplying true to this argument skips validation of the updated credential.
+
+:returns: :class:`StorageCredentialInfo`
+
 
     .. py:method:: validate( [, aws_iam_role: Optional[AwsIamRoleRequest], azure_managed_identity: Optional[AzureManagedIdentityRequest], azure_service_principal: Optional[AzureServicePrincipal], cloudflare_api_token: Optional[CloudflareApiToken], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest], external_location_name: Optional[str], read_only: Optional[bool], storage_credential_name: Optional[str], url: Optional[str]]) -> ValidateStorageCredentialResponse
 
         Validate a storage credential.
-        
-        Validates a storage credential. At least one of __external_location_name__ and __url__ need to be
-        provided. If only one of them is provided, it will be used for validation. And if both are provided,
-        the __url__ will be used for validation, and __external_location_name__ will be ignored when checking
-        overlapping urls.
-        
-        Either the __storage_credential_name__ or the cloud-specific credential must be provided.
-        
-        The caller must be a metastore admin or the storage credential owner or have the
-        **CREATE_EXTERNAL_LOCATION** privilege on the metastore and the storage credential.
-        
-        :param aws_iam_role: :class:`AwsIamRoleRequest` (optional)
-          The AWS IAM role configuration.
-        :param azure_managed_identity: :class:`AzureManagedIdentityRequest` (optional)
-          The Azure managed identity configuration.
-        :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
-          The Azure service principal configuration.
-        :param cloudflare_api_token: :class:`CloudflareApiToken` (optional)
-          The Cloudflare API token configuration.
-        :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
-          The Databricks created GCP service account configuration.
-        :param external_location_name: str (optional)
-          The name of an existing external location to validate.
-        :param read_only: bool (optional)
-          Whether the storage credential is only usable for read operations.
-        :param storage_credential_name: str (optional)
-          The name of the storage credential to validate.
-        :param url: str (optional)
-          The external location url to validate.
-        
-        :returns: :class:`ValidateStorageCredentialResponse`
-        
\ No newline at end of file
+
+Validates a storage credential. At least one of __external_location_name__ and __url__ need to be
+provided. If only one of them is provided, it will be used for validation. And if both are provided,
+the __url__ will be used for validation, and __external_location_name__ will be ignored when checking
+overlapping urls.
+
+Either the __storage_credential_name__ or the cloud-specific credential must be provided.
+
+The caller must be a metastore admin or the storage credential owner or have the
+**CREATE_EXTERNAL_LOCATION** privilege on the metastore and the storage credential.
+
+:param aws_iam_role: :class:`AwsIamRoleRequest` (optional)
+  The AWS IAM role configuration.
+:param azure_managed_identity: :class:`AzureManagedIdentityRequest` (optional)
+  The Azure managed identity configuration.
+:param azure_service_principal: :class:`AzureServicePrincipal` (optional)
+  The Azure service principal configuration.
+:param cloudflare_api_token: :class:`CloudflareApiToken` (optional)
+  The Cloudflare API token configuration.
+:param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
+  The Databricks created GCP service account configuration.
+:param external_location_name: str (optional)
+  The name of an existing external location to validate.
+:param read_only: bool (optional)
+  Whether the storage credential is only usable for read operations.
+:param storage_credential_name: str (optional)
+  The name of the storage credential to validate.
+:param url: str (optional)
+  The external location url to validate.
+
+:returns: :class:`ValidateStorageCredentialResponse`
diff --git a/docs/workspace/catalog/system_schemas.rst b/docs/workspace/catalog/system_schemas.rst
index 2028a3623..91e82ca57 100644
--- a/docs/workspace/catalog/system_schemas.rst
+++ b/docs/workspace/catalog/system_schemas.rst
@@ -5,54 +5,53 @@
 .. py:class:: SystemSchemasAPI
 
     A system schema is a schema that lives within the system catalog. A system schema may contain information
-    about customer usage of Unity Catalog such as audit-logs, billing-logs, lineage information, etc.
+about customer usage of Unity Catalog such as audit-logs, billing-logs, lineage information, etc.
 
     .. py:method:: disable(metastore_id: str, schema_name: str)
 
         Disable a system schema.
-        
-        Disables the system schema and removes it from the system catalog. The caller must be an account admin
-        or a metastore admin.
-        
-        :param metastore_id: str
-          The metastore ID under which the system schema lives.
-        :param schema_name: str
-          Full name of the system schema.
-        
-        
-        
+
+Disables the system schema and removes it from the system catalog. The caller must be an account admin
+or a metastore admin.
+
+:param metastore_id: str
+  The metastore ID under which the system schema lives.
+:param schema_name: str
+  Full name of the system schema.
+
+
+
 
     .. py:method:: enable(metastore_id: str, schema_name: str)
 
         Enable a system schema.
-        
-        Enables the system schema and adds it to the system catalog. The caller must be an account admin or a
-        metastore admin.
-        
-        :param metastore_id: str
-          The metastore ID under which the system schema lives.
-        :param schema_name: str
-          Full name of the system schema.
-        
-        
-        
+
+Enables the system schema and adds it to the system catalog. The caller must be an account admin or a
+metastore admin.
+
+:param metastore_id: str
+  The metastore ID under which the system schema lives.
+:param schema_name: str
+  Full name of the system schema.
+
+
+
 
     .. py:method:: list(metastore_id: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[SystemSchemaInfo]
 
         List system schemas.
-        
-        Gets an array of system schemas for a metastore. The caller must be an account admin or a metastore
-        admin.
-        
-        :param metastore_id: str
-          The ID for the metastore in which the system schema resides.
-        :param max_results: int (optional)
-          Maximum number of schemas to return. - When set to 0, the page length is set to a server configured
-          value (recommended); - When set to a value greater than 0, the page length is the minimum of this
-          value and a server configured value; - When set to a value less than 0, an invalid parameter error
-          is returned; - If not set, all the schemas are returned (not recommended).
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`SystemSchemaInfo`
-        
\ No newline at end of file
+
+Gets an array of system schemas for a metastore. The caller must be an account admin or a metastore
+admin.
+
+:param metastore_id: str
+  The ID for the metastore in which the system schema resides.
+:param max_results: int (optional)
+  Maximum number of schemas to return. - When set to 0, the page length is set to a server configured
+  value (recommended); - When set to a value greater than 0, the page length is the minimum of this
+  value and a server configured value; - When set to a value less than 0, an invalid parameter error
+  is returned; - If not set, all the schemas are returned (not recommended).
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+
+:returns: Iterator over :class:`SystemSchemaInfo`
diff --git a/docs/workspace/catalog/table_constraints.rst b/docs/workspace/catalog/table_constraints.rst
index dd46c42f3..0b631408f 100644
--- a/docs/workspace/catalog/table_constraints.rst
+++ b/docs/workspace/catalog/table_constraints.rst
@@ -5,58 +5,57 @@
 .. py:class:: TableConstraintsAPI
 
     Primary key and foreign key constraints encode relationships between fields in tables.
-    
-    Primary and foreign keys are informational only and are not enforced. Foreign keys must reference a
-    primary key in another table. This primary key is the parent constraint of the foreign key and the table
-    this primary key is on is the parent table of the foreign key. Similarly, the foreign key is the child
-    constraint of its referenced primary key; the table of the foreign key is the child table of the primary
-    key.
-    
-    You can declare primary keys and foreign keys as part of the table specification during table creation.
-    You can also add or drop constraints on existing tables.
+
+Primary and foreign keys are informational only and are not enforced. Foreign keys must reference a
+primary key in another table. This primary key is the parent constraint of the foreign key and the table
+this primary key is on is the parent table of the foreign key. Similarly, the foreign key is the child
+constraint of its referenced primary key; the table of the foreign key is the child table of the primary
+key.
+
+You can declare primary keys and foreign keys as part of the table specification during table creation.
+You can also add or drop constraints on existing tables.
 
     .. py:method:: create(full_name_arg: str, constraint: TableConstraint) -> TableConstraint
 
         Create a table constraint.
-        
-        Creates a new table constraint.
-        
-        For the table constraint creation to succeed, the user must satisfy both of these conditions: - the
-        user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA**
-        privilege on the table's parent schema, and be the owner of the table. - if the new constraint is a
-        __ForeignKeyConstraint__, the user must have the **USE_CATALOG** privilege on the referenced parent
-        table's catalog, the **USE_SCHEMA** privilege on the referenced parent table's schema, and be the
-        owner of the referenced parent table.
-        
-        :param full_name_arg: str
-          The full name of the table referenced by the constraint.
-        :param constraint: :class:`TableConstraint`
-          A table constraint, as defined by *one* of the following fields being set:
-          __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__.
-        
-        :returns: :class:`TableConstraint`
-        
+
+Creates a new table constraint.
+
+For the table constraint creation to succeed, the user must satisfy both of these conditions: - the
+user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA**
+privilege on the table's parent schema, and be the owner of the table. - if the new constraint is a
+__ForeignKeyConstraint__, the user must have the **USE_CATALOG** privilege on the referenced parent
+table's catalog, the **USE_SCHEMA** privilege on the referenced parent table's schema, and be the
+owner of the referenced parent table.
+
+:param full_name_arg: str
+  The full name of the table referenced by the constraint.
+:param constraint: :class:`TableConstraint`
+  A table constraint, as defined by *one* of the following fields being set:
+  __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__.
+
+:returns: :class:`TableConstraint`
+
 
     .. py:method:: delete(full_name: str, constraint_name: str, cascade: bool)
 
         Delete a table constraint.
-        
-        Deletes a table constraint.
-        
-        For the table constraint deletion to succeed, the user must satisfy both of these conditions: - the
-        user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA**
-        privilege on the table's parent schema, and be the owner of the table. - if __cascade__ argument is
-        **true**, the user must have the following permissions on all of the child tables: the **USE_CATALOG**
-        privilege on the table's catalog, the **USE_SCHEMA** privilege on the table's schema, and be the owner
-        of the table.
-        
-        :param full_name: str
-          Full name of the table referenced by the constraint.
-        :param constraint_name: str
-          The name of the constraint to delete.
-        :param cascade: bool
-          If true, try deleting all child constraints of the current constraint. If false, reject this
-          operation if the current constraint has any child constraints.
-        
-        
-        
\ No newline at end of file
+
+Deletes a table constraint.
+
+For the table constraint deletion to succeed, the user must satisfy both of these conditions: - the
+user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA**
+privilege on the table's parent schema, and be the owner of the table. - if __cascade__ argument is
+**true**, the user must have the following permissions on all of the child tables: the **USE_CATALOG**
+privilege on the table's catalog, the **USE_SCHEMA** privilege on the table's schema, and be the owner
+of the table.
+
+:param full_name: str
+  Full name of the table referenced by the constraint.
+:param constraint_name: str
+  The name of the constraint to delete.
+:param cascade: bool
+  If true, try deleting all child constraints of the current constraint. If false, reject this
+  operation if the current constraint has any child constraints.
+
+
diff --git a/docs/workspace/catalog/tables.rst b/docs/workspace/catalog/tables.rst
index 15cfb1cac..3fa411272 100644
--- a/docs/workspace/catalog/tables.rst
+++ b/docs/workspace/catalog/tables.rst
@@ -5,45 +5,45 @@
 .. py:class:: TablesAPI
 
     A table resides in the third layer of Unity Catalog’s three-level namespace. It contains rows of data.
-    To create a table, users must have CREATE_TABLE and USE_SCHEMA permissions on the schema, and they must
-    have the USE_CATALOG permission on its parent catalog. To query a table, users must have the SELECT
-    permission on the table, and they must have the USE_CATALOG permission on its parent catalog and the
-    USE_SCHEMA permission on its parent schema.
-    
-    A table can be managed or external. From an API perspective, a __VIEW__ is a particular kind of table
-    (rather than a managed or external table).
+To create a table, users must have CREATE_TABLE and USE_SCHEMA permissions on the schema, and they must
+have the USE_CATALOG permission on its parent catalog. To query a table, users must have the SELECT
+permission on the table, and they must have the USE_CATALOG permission on its parent catalog and the
+USE_SCHEMA permission on its parent schema.
+
+A table can be managed or external. From an API perspective, a __VIEW__ is a particular kind of table
+(rather than a managed or external table).
 
     .. py:method:: delete(full_name: str)
 
         Delete a table.
-        
-        Deletes a table from the specified parent catalog and schema. The caller must be the owner of the
-        parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the
-        parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent
-        catalog and the **USE_SCHEMA** privilege on the parent schema.
-        
-        :param full_name: str
-          Full name of the table.
-        
-        
-        
+
+Deletes a table from the specified parent catalog and schema. The caller must be the owner of the
+parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the
+parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent
+catalog and the **USE_SCHEMA** privilege on the parent schema.
+
+:param full_name: str
+  Full name of the table.
+
+
+
 
     .. py:method:: exists(full_name: str) -> TableExistsResponse
 
         Get boolean reflecting if table exists.
-        
-        Gets if a table exists in the metastore for a specific catalog and schema. The caller must satisfy one
-        of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the
-        owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the
-        **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema,
-        and either be the table owner or have the SELECT privilege on the table. * Have BROWSE privilege on
-        the parent catalog * Have BROWSE privilege on the parent schema.
-        
-        :param full_name: str
-          Full name of the table.
-        
-        :returns: :class:`TableExistsResponse`
-        
+
+Gets if a table exists in the metastore for a specific catalog and schema. The caller must satisfy one
+of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the
+owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the
+**USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema,
+and either be the table owner or have the SELECT privilege on the table. * Have BROWSE privilege on
+the parent catalog * Have BROWSE privilege on the parent schema.
+
+:param full_name: str
+  Full name of the table.
+
+:returns: :class:`TableExistsResponse`
+
 
     .. py:method:: get(full_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool], include_manifest_capabilities: Optional[bool]]) -> TableInfo
 
@@ -80,25 +80,25 @@
             w.tables.delete(full_name=table_full_name)
 
         Get a table.
-        
-        Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one of the
-        following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of
-        the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG**
-        privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, and either be
-        the table owner or have the SELECT privilege on the table.
-        
-        :param full_name: str
-          Full name of the table.
-        :param include_browse: bool (optional)
-          Whether to include tables in the response for which the principal can only access selective metadata
-          for
-        :param include_delta_metadata: bool (optional)
-          Whether delta metadata should be included in the response.
-        :param include_manifest_capabilities: bool (optional)
-          Whether to include a manifest containing capabilities the table has.
-        
-        :returns: :class:`TableInfo`
-        
+
+Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one of the
+following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of
+the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG**
+privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, and either be
+the table owner or have the SELECT privilege on the table.
+
+:param full_name: str
+  Full name of the table.
+:param include_browse: bool (optional)
+  Whether to include tables in the response for which the principal can only access selective metadata
+  for
+:param include_delta_metadata: bool (optional)
+  Whether delta metadata should be included in the response.
+:param include_manifest_capabilities: bool (optional)
+  Whether to include a manifest containing capabilities the table has.
+
+:returns: :class:`TableInfo`
+
 
     .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool], include_manifest_capabilities: Optional[bool], max_results: Optional[int], omit_columns: Optional[bool], omit_properties: Optional[bool], omit_username: Optional[bool], page_token: Optional[str]]) -> Iterator[TableInfo]
 
@@ -124,41 +124,41 @@
             w.catalogs.delete(name=created_catalog.name, force=True)
 
         List tables.
-        
-        Gets an array of all tables for the current metastore under the parent catalog and schema. The caller
-        must be a metastore admin or an owner of (or have the **SELECT** privilege on) the table. For the
-        latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent
-        catalog and the **USE_SCHEMA** privilege on the parent schema. There is no guarantee of a specific
-        ordering of the elements in the array.
-        
-        :param catalog_name: str
-          Name of parent catalog for tables of interest.
-        :param schema_name: str
-          Parent schema of tables.
-        :param include_browse: bool (optional)
-          Whether to include tables in the response for which the principal can only access selective metadata
-          for
-        :param include_delta_metadata: bool (optional)
-          Whether delta metadata should be included in the response.
-        :param include_manifest_capabilities: bool (optional)
-          Whether to include a manifest containing capabilities the table has.
-        :param max_results: int (optional)
-          Maximum number of tables to return. If not set, all the tables are returned (not recommended). -
-          when set to a value greater than 0, the page length is the minimum of this value and a server
-          configured value; - when set to 0, the page length is set to a server configured value
-          (recommended); - when set to a value less than 0, an invalid parameter error is returned;
-        :param omit_columns: bool (optional)
-          Whether to omit the columns of the table from the response or not.
-        :param omit_properties: bool (optional)
-          Whether to omit the properties of the table from the response or not.
-        :param omit_username: bool (optional)
-          Whether to omit the username of the table (e.g. owner, updated_by, created_by) from the response or
-          not.
-        :param page_token: str (optional)
-          Opaque token to send for the next page of results (pagination).
-        
-        :returns: Iterator over :class:`TableInfo`
-        
+
+Gets an array of all tables for the current metastore under the parent catalog and schema. The caller
+must be a metastore admin or an owner of (or have the **SELECT** privilege on) the table. For the
+latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent
+catalog and the **USE_SCHEMA** privilege on the parent schema. There is no guarantee of a specific
+ordering of the elements in the array.
+
+:param catalog_name: str
+  Name of parent catalog for tables of interest.
+:param schema_name: str
+  Parent schema of tables.
+:param include_browse: bool (optional)
+  Whether to include tables in the response for which the principal can only access selective metadata
+  for
+:param include_delta_metadata: bool (optional)
+  Whether delta metadata should be included in the response.
+:param include_manifest_capabilities: bool (optional)
+  Whether to include a manifest containing capabilities the table has.
+:param max_results: int (optional)
+  Maximum number of tables to return. If not set, all the tables are returned (not recommended). -
+  when set to a value greater than 0, the page length is the minimum of this value and a server
+  configured value; - when set to 0, the page length is set to a server configured value
+  (recommended); - when set to a value less than 0, an invalid parameter error is returned;
+:param omit_columns: bool (optional)
+  Whether to omit the columns of the table from the response or not.
+:param omit_properties: bool (optional)
+  Whether to omit the properties of the table from the response or not.
+:param omit_username: bool (optional)
+  Whether to omit the username of the table (e.g. owner, updated_by, created_by) from the response or
+  not.
+:param page_token: str (optional)
+  Opaque token to send for the next page of results (pagination).
+
+:returns: Iterator over :class:`TableInfo`
+
 
     .. py:method:: list_summaries(catalog_name: str [, include_manifest_capabilities: Optional[bool], max_results: Optional[int], page_token: Optional[str], schema_name_pattern: Optional[str], table_name_pattern: Optional[str]]) -> Iterator[TableSummary]
 
@@ -185,50 +185,49 @@
             w.catalogs.delete(name=created_catalog.name, force=True)
 
         List table summaries.
-        
-        Gets an array of summaries for tables for a schema and catalog within the metastore. The table
-        summaries returned are either:
-        
-        * summaries for tables (within the current metastore and parent catalog and schema), when the user is
-        a metastore admin, or: * summaries for tables and schemas (within the current metastore and parent
-        catalog) for which the user has ownership or the **SELECT** privilege on the table and ownership or
-        **USE_SCHEMA** privilege on the schema, provided that the user also has ownership or the
-        **USE_CATALOG** privilege on the parent catalog.
-        
-        There is no guarantee of a specific ordering of the elements in the array.
-        
-        :param catalog_name: str
-          Name of parent catalog for tables of interest.
-        :param include_manifest_capabilities: bool (optional)
-          Whether to include a manifest containing capabilities the table has.
-        :param max_results: int (optional)
-          Maximum number of summaries for tables to return. If not set, the page length is set to a server
-          configured value (10000, as of 1/5/2024). - when set to a value greater than 0, the page length is
-          the minimum of this value and a server configured value (10000, as of 1/5/2024); - when set to 0,
-          the page length is set to a server configured value (10000, as of 1/5/2024) (recommended); - when
-          set to a value less than 0, an invalid parameter error is returned;
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        :param schema_name_pattern: str (optional)
-          A sql LIKE pattern (% and _) for schema names. All schemas will be returned if not set or empty.
-        :param table_name_pattern: str (optional)
-          A sql LIKE pattern (% and _) for table names. All tables will be returned if not set or empty.
-        
-        :returns: Iterator over :class:`TableSummary`
-        
+
+Gets an array of summaries for tables for a schema and catalog within the metastore. The table
+summaries returned are either:
+
+* summaries for tables (within the current metastore and parent catalog and schema), when the user is
+a metastore admin, or: * summaries for tables and schemas (within the current metastore and parent
+catalog) for which the user has ownership or the **SELECT** privilege on the table and ownership or
+**USE_SCHEMA** privilege on the schema, provided that the user also has ownership or the
+**USE_CATALOG** privilege on the parent catalog.
+
+There is no guarantee of a specific ordering of the elements in the array.
+
+:param catalog_name: str
+  Name of parent catalog for tables of interest.
+:param include_manifest_capabilities: bool (optional)
+  Whether to include a manifest containing capabilities the table has.
+:param max_results: int (optional)
+  Maximum number of summaries for tables to return. If not set, the page length is set to a server
+  configured value (10000, as of 1/5/2024). - when set to a value greater than 0, the page length is
+  the minimum of this value and a server configured value (10000, as of 1/5/2024); - when set to 0,
+  the page length is set to a server configured value (10000, as of 1/5/2024) (recommended); - when
+  set to a value less than 0, an invalid parameter error is returned;
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+:param schema_name_pattern: str (optional)
+  A sql LIKE pattern (% and _) for schema names. All schemas will be returned if not set or empty.
+:param table_name_pattern: str (optional)
+  A sql LIKE pattern (% and _) for table names. All tables will be returned if not set or empty.
+
+:returns: Iterator over :class:`TableSummary`
+
 
     .. py:method:: update(full_name: str [, owner: Optional[str]])
 
         Update a table owner.
-        
-        Change the owner of the table. The caller must be the owner of the parent catalog, have the
-        **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner
-        of the table and have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
-        privilege on the parent schema.
-        
-        :param full_name: str
-          Full name of the table.
-        :param owner: str (optional)
-        
-        
-        
\ No newline at end of file
+
+Change the owner of the table. The caller must be the owner of the parent catalog, have the
+**USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner
+of the table and have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
+privilege on the parent schema.
+
+:param full_name: str
+  Full name of the table.
+:param owner: str (optional)
+
+
diff --git a/docs/workspace/catalog/temporary_table_credentials.rst b/docs/workspace/catalog/temporary_table_credentials.rst
index 1acd462b7..9898af1ba 100644
--- a/docs/workspace/catalog/temporary_table_credentials.rst
+++ b/docs/workspace/catalog/temporary_table_credentials.rst
@@ -5,32 +5,31 @@
 .. py:class:: TemporaryTableCredentialsAPI
 
     Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage
-    locationswhere table data is stored in Databricks. These credentials are employed to provide secure and
-    time-limitedaccess to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud provider
-    has its own typeof credentials: AWS uses temporary session tokens via AWS Security Token Service (STS),
-    Azure utilizesShared Access Signatures (SAS) for its data storage services, and Google Cloud supports
-    temporary credentialsthrough OAuth 2.0.Temporary table credentials ensure that data access is limited in
-    scope and duration, reducing the risk ofunauthorized access or misuse. To use the temporary table
-    credentials API, a metastore admin needs to enable the external_access_enabled flag (off by default) at
-    the metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema level
-    by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by
-    catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for
-    security reason.
+locationswhere table data is stored in Databricks. These credentials are employed to provide secure and
+time-limitedaccess to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud provider
+has its own typeof credentials: AWS uses temporary session tokens via AWS Security Token Service (STS),
+Azure utilizesShared Access Signatures (SAS) for its data storage services, and Google Cloud supports
+temporary credentialsthrough OAuth 2.0.Temporary table credentials ensure that data access is limited in
+scope and duration, reducing the risk ofunauthorized access or misuse. To use the temporary table
+credentials API, a metastore admin needs to enable the external_access_enabled flag (off by default) at
+the metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema level
+by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by
+catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for
+security reason.
 
     .. py:method:: generate_temporary_table_credentials( [, operation: Optional[TableOperation], table_id: Optional[str]]) -> GenerateTemporaryTableCredentialResponse
 
         Generate a temporary table credential.
-        
-        Get a short-lived credential for directly accessing the table data on cloud storage. The metastore
-        must have external_access_enabled flag set to true (default false). The caller must have
-        EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog
-        owners.
-        
-        :param operation: :class:`TableOperation` (optional)
-          The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is
-          specified, the credentials returned will have write permissions, otherwise, it will be read only.
-        :param table_id: str (optional)
-          UUID of the table to read or write.
-        
-        :returns: :class:`GenerateTemporaryTableCredentialResponse`
-        
\ No newline at end of file
+
+Get a short-lived credential for directly accessing the table data on cloud storage. The metastore
+must have external_access_enabled flag set to true (default false). The caller must have
+EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog
+owners.
+
+:param operation: :class:`TableOperation` (optional)
+  The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is
+  specified, the credentials returned will have write permissions, otherwise, it will be read only.
+:param table_id: str (optional)
+  UUID of the table to read or write.
+
+:returns: :class:`GenerateTemporaryTableCredentialResponse`
diff --git a/docs/workspace/catalog/volumes.rst b/docs/workspace/catalog/volumes.rst
index 76e7c6c33..62d23b88b 100644
--- a/docs/workspace/catalog/volumes.rst
+++ b/docs/workspace/catalog/volumes.rst
@@ -5,11 +5,11 @@
 .. py:class:: VolumesAPI
 
     Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing
-    files. Use cases include running machine learning on unstructured data such as image, audio, video, or PDF
-    files, organizing data sets during the data exploration stages in data science, working with libraries
-    that require access to the local file system on cluster machines, storing library and config files of
-    arbitrary formats such as .whl or .txt centrally and providing secure access across workspaces to it, or
-    transforming and querying non-tabular data files in ETL.
+files. Use cases include running machine learning on unstructured data such as image, audio, video, or PDF
+files, organizing data sets during the data exploration stages in data science, working with libraries
+that require access to the local file system on cluster machines, storing library and config files of
+arbitrary formats such as .whl or .txt centrally and providing secure access across workspaces to it, or
+transforming and querying non-tabular data files in ETL.
 
     .. py:method:: create(catalog_name: str, schema_name: str, name: str, volume_type: VolumeType [, comment: Optional[str], storage_location: Optional[str]]) -> VolumeInfo
 
@@ -55,53 +55,53 @@
             w.volumes.delete(name=created_volume.full_name)
 
         Create a Volume.
-        
-        Creates a new volume.
-        
-        The user could create either an external volume or a managed volume. An external volume will be
-        created in the specified external location, while a managed volume will be located in the default
-        location which is specified by the parent schema, or the parent catalog, or the Metastore.
-        
-        For the volume creation to succeed, the user must satisfy following conditions: - The caller must be a
-        metastore admin, or be the owner of the parent catalog and schema, or have the **USE_CATALOG**
-        privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - The caller
-        must have **CREATE VOLUME** privilege on the parent schema.
-        
-        For an external volume, following conditions also need to satisfy - The caller must have **CREATE
-        EXTERNAL VOLUME** privilege on the external location. - There are no other tables, nor volumes
-        existing in the specified storage location. - The specified storage location is not under the location
-        of other tables, nor volumes, or catalogs or schemas.
-        
-        :param catalog_name: str
-          The name of the catalog where the schema and the volume are
-        :param schema_name: str
-          The name of the schema where the volume is
-        :param name: str
-          The name of the volume
-        :param volume_type: :class:`VolumeType`
-        :param comment: str (optional)
-          The comment attached to the volume
-        :param storage_location: str (optional)
-          The storage location on the cloud
-        
-        :returns: :class:`VolumeInfo`
-        
+
+Creates a new volume.
+
+The user could create either an external volume or a managed volume. An external volume will be
+created in the specified external location, while a managed volume will be located in the default
+location which is specified by the parent schema, or the parent catalog, or the Metastore.
+
+For the volume creation to succeed, the user must satisfy following conditions: - The caller must be a
+metastore admin, or be the owner of the parent catalog and schema, or have the **USE_CATALOG**
+privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - The caller
+must have **CREATE VOLUME** privilege on the parent schema.
+
+For an external volume, following conditions also need to satisfy - The caller must have **CREATE
+EXTERNAL VOLUME** privilege on the external location. - There are no other tables, nor volumes
+existing in the specified storage location. - The specified storage location is not under the location
+of other tables, nor volumes, or catalogs or schemas.
+
+:param catalog_name: str
+  The name of the catalog where the schema and the volume are
+:param schema_name: str
+  The name of the schema where the volume is
+:param name: str
+  The name of the volume
+:param volume_type: :class:`VolumeType`
+:param comment: str (optional)
+  The comment attached to the volume
+:param storage_location: str (optional)
+  The storage location on the cloud
+
+:returns: :class:`VolumeInfo`
+
 
     .. py:method:: delete(name: str)
 
         Delete a Volume.
-        
-        Deletes a volume from the specified parent catalog and schema.
-        
-        The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must
-        also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
-        privilege on the parent schema.
-        
-        :param name: str
-          The three-level (fully qualified) name of the volume
-        
-        
-        
+
+Deletes a volume from the specified parent catalog and schema.
+
+The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must
+also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
+privilege on the parent schema.
+
+:param name: str
+  The three-level (fully qualified) name of the volume
+
+
+
 
     .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[VolumeInfo]
 
@@ -127,42 +127,42 @@
             w.catalogs.delete(name=created_catalog.name, force=True)
 
         List Volumes.
-        
-        Gets an array of volumes for the current metastore under the parent catalog and schema.
-        
-        The returned volumes are filtered based on the privileges of the calling user. For example, the
-        metastore admin is able to list all the volumes. A regular user needs to be the owner or have the
-        **READ VOLUME** privilege on the volume to recieve the volumes in the response. For the latter case,
-        the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
-        **USE_SCHEMA** privilege on the parent schema.
-        
-        There is no guarantee of a specific ordering of the elements in the array.
-        
-        :param catalog_name: str
-          The identifier of the catalog
-        :param schema_name: str
-          The identifier of the schema
-        :param include_browse: bool (optional)
-          Whether to include volumes in the response for which the principal can only access selective
-          metadata for
-        :param max_results: int (optional)
-          Maximum number of volumes to return (page length).
-          
-          If not set, the page length is set to a server configured value (10000, as of 1/29/2024). - when set
-          to a value greater than 0, the page length is the minimum of this value and a server configured
-          value (10000, as of 1/29/2024); - when set to 0, the page length is set to a server configured value
-          (10000, as of 1/29/2024) (recommended); - when set to a value less than 0, an invalid parameter
-          error is returned;
-          
-          Note: this parameter controls only the maximum number of volumes to return. The actual number of
-          volumes returned in a page may be smaller than this value, including 0, even if there are more
-          pages.
-        :param page_token: str (optional)
-          Opaque token returned by a previous request. It must be included in the request to retrieve the next
-          page of results (pagination).
-        
-        :returns: Iterator over :class:`VolumeInfo`
-        
+
+Gets an array of volumes for the current metastore under the parent catalog and schema.
+
+The returned volumes are filtered based on the privileges of the calling user. For example, the
+metastore admin is able to list all the volumes. A regular user needs to be the owner or have the
+**READ VOLUME** privilege on the volume to recieve the volumes in the response. For the latter case,
+the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
+**USE_SCHEMA** privilege on the parent schema.
+
+There is no guarantee of a specific ordering of the elements in the array.
+
+:param catalog_name: str
+  The identifier of the catalog
+:param schema_name: str
+  The identifier of the schema
+:param include_browse: bool (optional)
+  Whether to include volumes in the response for which the principal can only access selective
+  metadata for
+:param max_results: int (optional)
+  Maximum number of volumes to return (page length).
+  
+  If not set, the page length is set to a server configured value (10000, as of 1/29/2024). - when set
+  to a value greater than 0, the page length is the minimum of this value and a server configured
+  value (10000, as of 1/29/2024); - when set to 0, the page length is set to a server configured value
+  (10000, as of 1/29/2024) (recommended); - when set to a value less than 0, an invalid parameter
+  error is returned;
+  
+  Note: this parameter controls only the maximum number of volumes to return. The actual number of
+  volumes returned in a page may be smaller than this value, including 0, even if there are more
+  pages.
+:param page_token: str (optional)
+  Opaque token returned by a previous request. It must be included in the request to retrieve the next
+  page of results (pagination).
+
+:returns: Iterator over :class:`VolumeInfo`
+
 
     .. py:method:: read(name: str [, include_browse: Optional[bool]]) -> VolumeInfo
 
@@ -210,21 +210,21 @@
             w.volumes.delete(name=created_volume.full_name)
 
         Get a Volume.
-        
-        Gets a volume from the metastore for a specific catalog and schema.
-        
-        The caller must be a metastore admin or an owner of (or have the **READ VOLUME** privilege on) the
-        volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege
-        on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-        
-        :param name: str
-          The three-level (fully qualified) name of the volume
-        :param include_browse: bool (optional)
-          Whether to include volumes in the response for which the principal can only access selective
-          metadata for
-        
-        :returns: :class:`VolumeInfo`
-        
+
+Gets a volume from the metastore for a specific catalog and schema.
+
+The caller must be a metastore admin or an owner of (or have the **READ VOLUME** privilege on) the
+volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege
+on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
+
+:param name: str
+  The three-level (fully qualified) name of the volume
+:param include_browse: bool (optional)
+  Whether to include volumes in the response for which the principal can only access selective
+  metadata for
+
+:returns: :class:`VolumeInfo`
+
 
     .. py:method:: update(name: str [, comment: Optional[str], new_name: Optional[str], owner: Optional[str]]) -> VolumeInfo
 
@@ -274,23 +274,22 @@
             w.volumes.delete(name=created_volume.full_name)
 
         Update a Volume.
-        
-        Updates the specified volume under the specified parent catalog and schema.
-        
-        The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must
-        also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
-        privilege on the parent schema.
-        
-        Currently only the name, the owner or the comment of the volume could be updated.
-        
-        :param name: str
-          The three-level (fully qualified) name of the volume
-        :param comment: str (optional)
-          The comment attached to the volume
-        :param new_name: str (optional)
-          New name for the volume.
-        :param owner: str (optional)
-          The identifier of the user who owns the volume
-        
-        :returns: :class:`VolumeInfo`
-        
\ No newline at end of file
+
+Updates the specified volume under the specified parent catalog and schema.
+
+The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must
+also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
+privilege on the parent schema.
+
+Currently only the name, the owner or the comment of the volume could be updated.
+
+:param name: str
+  The three-level (fully qualified) name of the volume
+:param comment: str (optional)
+  The comment attached to the volume
+:param new_name: str (optional)
+  New name for the volume.
+:param owner: str (optional)
+  The identifier of the user who owns the volume
+
+:returns: :class:`VolumeInfo`
diff --git a/docs/workspace/catalog/workspace_bindings.rst b/docs/workspace/catalog/workspace_bindings.rst
index 08a74b29e..32f218d00 100644
--- a/docs/workspace/catalog/workspace_bindings.rst
+++ b/docs/workspace/catalog/workspace_bindings.rst
@@ -5,19 +5,19 @@
 .. py:class:: WorkspaceBindingsAPI
 
     A securable in Databricks can be configured as __OPEN__ or __ISOLATED__. An __OPEN__ securable can be
-    accessed from any workspace, while an __ISOLATED__ securable can only be accessed from a configured list
-    of workspaces. This API allows you to configure (bind) securables to workspaces.
-    
-    NOTE: The __isolation_mode__ is configured for the securable itself (using its Update method) and the
-    workspace bindings are only consulted when the securable's __isolation_mode__ is set to __ISOLATED__.
-    
-    A securable's workspace bindings can be configured by a metastore admin or the owner of the securable.
-    
-    The original path (/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}) is deprecated. Please use
-    the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which introduces the
-    ability to bind a securable in READ_ONLY mode (catalogs only).
-    
-    Securable types that support binding: - catalog - storage_credential - external_location
+accessed from any workspace, while an __ISOLATED__ securable can only be accessed from a configured list
+of workspaces. This API allows you to configure (bind) securables to workspaces.
+
+NOTE: The __isolation_mode__ is configured for the securable itself (using its Update method) and the
+workspace bindings are only consulted when the securable's __isolation_mode__ is set to __ISOLATED__.
+
+A securable's workspace bindings can be configured by a metastore admin or the owner of the securable.
+
+The original path (/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}) is deprecated. Please use
+the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which introduces the
+ability to bind a securable in READ_ONLY mode (catalogs only).
+
+Securable types that support binding: - catalog - storage_credential - external_location
 
     .. py:method:: get(name: str) -> CurrentWorkspaceBindings
 
@@ -40,37 +40,37 @@
             w.catalogs.delete(name=created.name, force=True)
 
         Get catalog workspace bindings.
-        
-        Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of the
-        catalog.
-        
-        :param name: str
-          The name of the catalog.
-        
-        :returns: :class:`CurrentWorkspaceBindings`
-        
+
+Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of the
+catalog.
+
+:param name: str
+  The name of the catalog.
+
+:returns: :class:`CurrentWorkspaceBindings`
+
 
     .. py:method:: get_bindings(securable_type: GetBindingsSecurableType, securable_name: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[WorkspaceBinding]
 
         Get securable workspace bindings.
-        
-        Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the
-        securable.
-        
-        :param securable_type: :class:`GetBindingsSecurableType`
-          The type of the securable to bind to a workspace.
-        :param securable_name: str
-          The name of the securable.
-        :param max_results: int (optional)
-          Maximum number of workspace bindings to return. - When set to 0, the page length is set to a server
-          configured value (recommended); - When set to a value greater than 0, the page length is the minimum
-          of this value and a server configured value; - When set to a value less than 0, an invalid parameter
-          error is returned; - If not set, all the workspace bindings are returned (not recommended).
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`WorkspaceBinding`
-        
+
+Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the
+securable.
+
+:param securable_type: :class:`GetBindingsSecurableType`
+  The type of the securable to bind to a workspace.
+:param securable_name: str
+  The name of the securable.
+:param max_results: int (optional)
+  Maximum number of workspace bindings to return. - When set to 0, the page length is set to a server
+  configured value (recommended); - When set to a value greater than 0, the page length is the minimum
+  of this value and a server configured value; - When set to a value less than 0, an invalid parameter
+  error is returned; - If not set, all the workspace bindings are returned (not recommended).
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+
+:returns: Iterator over :class:`WorkspaceBinding`
+
 
     .. py:method:: update(name: str [, assign_workspaces: Optional[List[int]], unassign_workspaces: Optional[List[int]]]) -> CurrentWorkspaceBindings
 
@@ -96,35 +96,34 @@
             w.catalogs.delete(name=created.name, force=True)
 
         Update catalog workspace bindings.
-        
-        Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner of the
-        catalog.
-        
-        :param name: str
-          The name of the catalog.
-        :param assign_workspaces: List[int] (optional)
-          A list of workspace IDs.
-        :param unassign_workspaces: List[int] (optional)
-          A list of workspace IDs.
-        
-        :returns: :class:`CurrentWorkspaceBindings`
-        
+
+Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner of the
+catalog.
+
+:param name: str
+  The name of the catalog.
+:param assign_workspaces: List[int] (optional)
+  A list of workspace IDs.
+:param unassign_workspaces: List[int] (optional)
+  A list of workspace IDs.
+
+:returns: :class:`CurrentWorkspaceBindings`
+
 
     .. py:method:: update_bindings(securable_type: UpdateBindingsSecurableType, securable_name: str [, add: Optional[List[WorkspaceBinding]], remove: Optional[List[WorkspaceBinding]]]) -> WorkspaceBindingsResponse
 
         Update securable workspace bindings.
-        
-        Updates workspace bindings of the securable. The caller must be a metastore admin or an owner of the
-        securable.
-        
-        :param securable_type: :class:`UpdateBindingsSecurableType`
-          The type of the securable to bind to a workspace.
-        :param securable_name: str
-          The name of the securable.
-        :param add: List[:class:`WorkspaceBinding`] (optional)
-          List of workspace bindings
-        :param remove: List[:class:`WorkspaceBinding`] (optional)
-          List of workspace bindings
-        
-        :returns: :class:`WorkspaceBindingsResponse`
-        
\ No newline at end of file
+
+Updates workspace bindings of the securable. The caller must be a metastore admin or an owner of the
+securable.
+
+:param securable_type: :class:`UpdateBindingsSecurableType`
+  The type of the securable to bind to a workspace.
+:param securable_name: str
+  The name of the securable.
+:param add: List[:class:`WorkspaceBinding`] (optional)
+  List of workspace bindings
+:param remove: List[:class:`WorkspaceBinding`] (optional)
+  List of workspace bindings
+
+:returns: :class:`WorkspaceBindingsResponse`
diff --git a/docs/workspace/cleanrooms/clean_room_assets.rst b/docs/workspace/cleanrooms/clean_room_assets.rst
index fe282543a..e58981eb9 100644
--- a/docs/workspace/cleanrooms/clean_room_assets.rst
+++ b/docs/workspace/cleanrooms/clean_room_assets.rst
@@ -5,90 +5,89 @@
 .. py:class:: CleanRoomAssetsAPI
 
     Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the
-    clean room.
+clean room.
 
     .. py:method:: create(clean_room_name: str [, asset: Optional[CleanRoomAsset]]) -> CleanRoomAsset
 
         Create an asset.
-        
-        Create a clean room asset —share an asset like a notebook or table into the clean room. For each UC
-        asset that is added through this method, the clean room owner must also have enough privilege on the
-        asset to consume it. The privilege must be maintained indefinitely for the clean room to be able to
-        access the asset. Typically, you should use a group as the clean room owner.
-        
-        :param clean_room_name: str
-          Name of the clean room.
-        :param asset: :class:`CleanRoomAsset` (optional)
-          Metadata of the clean room asset
-        
-        :returns: :class:`CleanRoomAsset`
-        
+
+Create a clean room asset —share an asset like a notebook or table into the clean room. For each UC
+asset that is added through this method, the clean room owner must also have enough privilege on the
+asset to consume it. The privilege must be maintained indefinitely for the clean room to be able to
+access the asset. Typically, you should use a group as the clean room owner.
+
+:param clean_room_name: str
+  Name of the clean room.
+:param asset: :class:`CleanRoomAsset` (optional)
+  Metadata of the clean room asset
+
+:returns: :class:`CleanRoomAsset`
+
 
     .. py:method:: delete(clean_room_name: str, asset_type: CleanRoomAssetAssetType, asset_full_name: str)
 
         Delete an asset.
-        
-        Delete a clean room asset - unshare/remove the asset from the clean room
-        
-        :param clean_room_name: str
-          Name of the clean room.
-        :param asset_type: :class:`CleanRoomAssetAssetType`
-          The type of the asset.
-        :param asset_full_name: str
-          The fully qualified name of the asset, it is same as the name field in CleanRoomAsset.
-        
-        
-        
+
+Delete a clean room asset - unshare/remove the asset from the clean room
+
+:param clean_room_name: str
+  Name of the clean room.
+:param asset_type: :class:`CleanRoomAssetAssetType`
+  The type of the asset.
+:param asset_full_name: str
+  The fully qualified name of the asset, it is same as the name field in CleanRoomAsset.
+
+
+
 
     .. py:method:: get(clean_room_name: str, asset_type: CleanRoomAssetAssetType, asset_full_name: str) -> CleanRoomAsset
 
         Get an asset.
-        
-        Get the details of a clean room asset by its type and full name.
-        
-        :param clean_room_name: str
-          Name of the clean room.
-        :param asset_type: :class:`CleanRoomAssetAssetType`
-          The type of the asset.
-        :param asset_full_name: str
-          The fully qualified name of the asset, it is same as the name field in CleanRoomAsset.
-        
-        :returns: :class:`CleanRoomAsset`
-        
+
+Get the details of a clean room asset by its type and full name.
+
+:param clean_room_name: str
+  Name of the clean room.
+:param asset_type: :class:`CleanRoomAssetAssetType`
+  The type of the asset.
+:param asset_full_name: str
+  The fully qualified name of the asset, it is same as the name field in CleanRoomAsset.
+
+:returns: :class:`CleanRoomAsset`
+
 
     .. py:method:: list(clean_room_name: str [, page_token: Optional[str]]) -> Iterator[CleanRoomAsset]
 
         List assets.
-        
-        :param clean_room_name: str
-          Name of the clean room.
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`CleanRoomAsset`
-        
+
+:param clean_room_name: str
+  Name of the clean room.
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+
+:returns: Iterator over :class:`CleanRoomAsset`
+
 
     .. py:method:: update(clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str [, asset: Optional[CleanRoomAsset]]) -> CleanRoomAsset
 
         Update an asset.
-        
-        Update a clean room asset. For example, updating the content of a notebook; changing the shared
-        partitions of a table; etc.
-        
-        :param clean_room_name: str
-          Name of the clean room.
-        :param asset_type: :class:`CleanRoomAssetAssetType`
-          The type of the asset.
-        :param name: str
-          A fully qualified name that uniquely identifies the asset within the clean room. This is also the
-          name displayed in the clean room UI.
-          
-          For UC securable assets (tables, volumes, etc.), the format is
-          *shared_catalog*.*shared_schema*.*asset_name*
-          
-          For notebooks, the name is the notebook file name.
-        :param asset: :class:`CleanRoomAsset` (optional)
-          Metadata of the clean room asset
-        
-        :returns: :class:`CleanRoomAsset`
-        
\ No newline at end of file
+
+Update a clean room asset. For example, updating the content of a notebook; changing the shared
+partitions of a table; etc.
+
+:param clean_room_name: str
+  Name of the clean room.
+:param asset_type: :class:`CleanRoomAssetAssetType`
+  The type of the asset.
+:param name: str
+  A fully qualified name that uniquely identifies the asset within the clean room. This is also the
+  name displayed in the clean room UI.
+  
+  For UC securable assets (tables, volumes, etc.), the format is
+  *shared_catalog*.*shared_schema*.*asset_name*
+  
+  For notebooks, the name is the notebook file name.
+:param asset: :class:`CleanRoomAsset` (optional)
+  Metadata of the clean room asset
+
+:returns: :class:`CleanRoomAsset`
diff --git a/docs/workspace/cleanrooms/clean_room_task_runs.rst b/docs/workspace/cleanrooms/clean_room_task_runs.rst
index dcf59037c..f8c421231 100644
--- a/docs/workspace/cleanrooms/clean_room_task_runs.rst
+++ b/docs/workspace/cleanrooms/clean_room_task_runs.rst
@@ -9,17 +9,16 @@
     .. py:method:: list(clean_room_name: str [, notebook_name: Optional[str], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[CleanRoomNotebookTaskRun]
 
         List notebook task runs.
-        
-        List all the historical notebook task runs in a clean room.
-        
-        :param clean_room_name: str
-          Name of the clean room.
-        :param notebook_name: str (optional)
-          Notebook name
-        :param page_size: int (optional)
-          The maximum number of task runs to return
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`CleanRoomNotebookTaskRun`
-        
\ No newline at end of file
+
+List all the historical notebook task runs in a clean room.
+
+:param clean_room_name: str
+  Name of the clean room.
+:param notebook_name: str (optional)
+  Notebook name
+:param page_size: int (optional)
+  The maximum number of task runs to return
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+
+:returns: Iterator over :class:`CleanRoomNotebookTaskRun`
diff --git a/docs/workspace/cleanrooms/clean_rooms.rst b/docs/workspace/cleanrooms/clean_rooms.rst
index 8ef5d8827..4a56740d9 100644
--- a/docs/workspace/cleanrooms/clean_rooms.rst
+++ b/docs/workspace/cleanrooms/clean_rooms.rst
@@ -5,91 +5,90 @@
 .. py:class:: CleanRoomsAPI
 
     A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting
-    environment where multiple parties can work together on sensitive enterprise data without direct access to
-    each other’s data.
+environment where multiple parties can work together on sensitive enterprise data without direct access to
+each other’s data.
 
     .. py:method:: create( [, clean_room: Optional[CleanRoom]]) -> CleanRoom
 
         Create a clean room.
-        
-        Create a new clean room with the specified collaborators. This method is asynchronous; the returned
-        name field inside the clean_room field can be used to poll the clean room status, using the
-        :method:cleanrooms/get method. When this method returns, the clean room will be in a PROVISIONING
-        state, with only name, owner, comment, created_at and status populated. The clean room will be usable
-        once it enters an ACTIVE state.
-        
-        The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore.
-        
-        :param clean_room: :class:`CleanRoom` (optional)
-        
-        :returns: :class:`CleanRoom`
-        
+
+Create a new clean room with the specified collaborators. This method is asynchronous; the returned
+name field inside the clean_room field can be used to poll the clean room status, using the
+:method:cleanrooms/get method. When this method returns, the clean room will be in a PROVISIONING
+state, with only name, owner, comment, created_at and status populated. The clean room will be usable
+once it enters an ACTIVE state.
+
+The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore.
+
+:param clean_room: :class:`CleanRoom` (optional)
+
+:returns: :class:`CleanRoom`
+
 
     .. py:method:: create_output_catalog(clean_room_name: str [, output_catalog: Optional[CleanRoomOutputCatalog]]) -> CreateCleanRoomOutputCatalogResponse
 
         Create an output catalog.
-        
-        Create the output catalog of the clean room.
-        
-        :param clean_room_name: str
-          Name of the clean room.
-        :param output_catalog: :class:`CleanRoomOutputCatalog` (optional)
-        
-        :returns: :class:`CreateCleanRoomOutputCatalogResponse`
-        
+
+Create the output catalog of the clean room.
+
+:param clean_room_name: str
+  Name of the clean room.
+:param output_catalog: :class:`CleanRoomOutputCatalog` (optional)
+
+:returns: :class:`CreateCleanRoomOutputCatalogResponse`
+
 
     .. py:method:: delete(name: str)
 
         Delete a clean room.
-        
-        Delete a clean room. After deletion, the clean room will be removed from the metastore. If the other
-        collaborators have not deleted the clean room, they will still have the clean room in their metastore,
-        but it will be in a DELETED state and no operations other than deletion can be performed on it.
-        
-        :param name: str
-          Name of the clean room.
-        
-        
-        
+
+Delete a clean room. After deletion, the clean room will be removed from the metastore. If the other
+collaborators have not deleted the clean room, they will still have the clean room in their metastore,
+but it will be in a DELETED state and no operations other than deletion can be performed on it.
+
+:param name: str
+  Name of the clean room.
+
+
+
 
     .. py:method:: get(name: str) -> CleanRoom
 
         Get a clean room.
-        
-        Get the details of a clean room given its name.
-        
-        :param name: str
-        
-        :returns: :class:`CleanRoom`
-        
+
+Get the details of a clean room given its name.
+
+:param name: str
+
+:returns: :class:`CleanRoom`
+
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[CleanRoom]
 
         List clean rooms.
-        
-        Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are
-        returned.
-        
-        :param page_size: int (optional)
-          Maximum number of clean rooms to return (i.e., the page length). Defaults to 100.
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`CleanRoom`
-        
+
+Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are
+returned.
+
+:param page_size: int (optional)
+  Maximum number of clean rooms to return (i.e., the page length). Defaults to 100.
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+
+:returns: Iterator over :class:`CleanRoom`
+
 
     .. py:method:: update(name: str [, clean_room: Optional[CleanRoom]]) -> CleanRoom
 
         Update a clean room.
-        
-        Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM**
-        privilege, or be metastore admin.
-        
-        When the caller is a metastore admin, only the __owner__ field can be updated.
-        
-        :param name: str
-          Name of the clean room.
-        :param clean_room: :class:`CleanRoom` (optional)
-        
-        :returns: :class:`CleanRoom`
-        
\ No newline at end of file
+
+Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM**
+privilege, or be metastore admin.
+
+When the caller is a metastore admin, only the __owner__ field can be updated.
+
+:param name: str
+  Name of the clean room.
+:param clean_room: :class:`CleanRoom` (optional)
+
+:returns: :class:`CleanRoom`
diff --git a/docs/workspace/compute/cluster_policies.rst b/docs/workspace/compute/cluster_policies.rst
index 65066964c..0ea0dc1fa 100644
--- a/docs/workspace/compute/cluster_policies.rst
+++ b/docs/workspace/compute/cluster_policies.rst
@@ -5,22 +5,22 @@
 .. py:class:: ClusterPoliciesAPI
 
     You can use cluster policies to control users' ability to configure clusters based on a set of rules.
-    These rules specify which attributes or attribute values can be used during cluster creation. Cluster
-    policies have ACLs that limit their use to specific users and groups.
-    
-    With cluster policies, you can: - Auto-install cluster libraries on the next restart by listing them in
-    the policy's "libraries" field (Public Preview). - Limit users to creating clusters with the prescribed
-    settings. - Simplify the user interface, enabling more users to create clusters, by fixing and hiding some
-    fields. - Manage costs by setting limits on attributes that impact the hourly rate.
-    
-    Cluster policy permissions limit which policies a user can select in the Policy drop-down when the user
-    creates a cluster: - A user who has unrestricted cluster create permission can select the Unrestricted
-    policy and create fully-configurable clusters. - A user who has both unrestricted cluster create
-    permission and access to cluster policies can select the Unrestricted policy and policies they have access
-    to. - A user that has access to only cluster policies, can select the policies they have access to.
-    
-    If no policies exist in the workspace, the Policy drop-down doesn't appear. Only admin users can create,
-    edit, and delete policies. Admin users also have access to all policies.
+These rules specify which attributes or attribute values can be used during cluster creation. Cluster
+policies have ACLs that limit their use to specific users and groups.
+
+With cluster policies, you can: - Auto-install cluster libraries on the next restart by listing them in
+the policy's "libraries" field (Public Preview). - Limit users to creating clusters with the prescribed
+settings. - Simplify the user interface, enabling more users to create clusters, by fixing and hiding some
+fields. - Manage costs by setting limits on attributes that impact the hourly rate.
+
+Cluster policy permissions limit which policies a user can select in the Policy drop-down when the user
+creates a cluster: - A user who has unrestricted cluster create permission can select the Unrestricted
+policy and create fully-configurable clusters. - A user who has both unrestricted cluster create
+permission and access to cluster policies can select the Unrestricted policy and policies they have access
+to. - A user that has access to only cluster policies, can select the policies they have access to.
+
+If no policies exist in the workspace, the Policy drop-down doesn't appear. Only admin users can create,
+edit, and delete policies. Admin users also have access to all policies.
 
     .. py:method:: create( [, definition: Optional[str], description: Optional[str], libraries: Optional[List[Library]], max_clusters_per_user: Optional[int], name: Optional[str], policy_family_definition_overrides: Optional[str], policy_family_id: Optional[str]]) -> CreatePolicyResponse
 
@@ -48,53 +48,53 @@
             w.cluster_policies.delete(policy_id=created.policy_id)
 
         Create a new policy.
-        
-        Creates a new policy with prescribed settings.
-        
-        :param definition: str (optional)
-          Policy definition document expressed in [Databricks Cluster Policy Definition Language].
-          
-          [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
-        :param description: str (optional)
-          Additional human-readable description of the cluster policy.
-        :param libraries: List[:class:`Library`] (optional)
-          A list of libraries to be installed on the next cluster restart that uses this policy. The maximum
-          number of libraries is 500.
-        :param max_clusters_per_user: int (optional)
-          Max number of clusters per user that can be active using this policy. If not present, there is no
-          max limit.
-        :param name: str (optional)
-          Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100
-          characters.
-        :param policy_family_definition_overrides: str (optional)
-          Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON
-          document must be passed as a string and cannot be embedded in the requests.
-          
-          You can use this to customize the policy definition inherited from the policy family. Policy rules
-          specified here are merged into the inherited policy definition.
-          
-          [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
-        :param policy_family_id: str (optional)
-          ID of the policy family. The cluster policy's policy definition inherits the policy family's policy
-          definition.
-          
-          Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the
-          policy definition.
-        
-        :returns: :class:`CreatePolicyResponse`
-        
+
+Creates a new policy with prescribed settings.
+
+:param definition: str (optional)
+  Policy definition document expressed in [Databricks Cluster Policy Definition Language].
+  
+  [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
+:param description: str (optional)
+  Additional human-readable description of the cluster policy.
+:param libraries: List[:class:`Library`] (optional)
+  A list of libraries to be installed on the next cluster restart that uses this policy. The maximum
+  number of libraries is 500.
+:param max_clusters_per_user: int (optional)
+  Max number of clusters per user that can be active using this policy. If not present, there is no
+  max limit.
+:param name: str (optional)
+  Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100
+  characters.
+:param policy_family_definition_overrides: str (optional)
+  Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON
+  document must be passed as a string and cannot be embedded in the requests.
+  
+  You can use this to customize the policy definition inherited from the policy family. Policy rules
+  specified here are merged into the inherited policy definition.
+  
+  [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
+:param policy_family_id: str (optional)
+  ID of the policy family. The cluster policy's policy definition inherits the policy family's policy
+  definition.
+  
+  Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the
+  policy definition.
+
+:returns: :class:`CreatePolicyResponse`
+
 
     .. py:method:: delete(policy_id: str)
 
         Delete a cluster policy.
-        
-        Delete a policy for a cluster. Clusters governed by this policy can still run, but cannot be edited.
-        
-        :param policy_id: str
-          The ID of the policy to delete.
-        
-        
-        
+
+Delete a policy for a cluster. Clusters governed by this policy can still run, but cannot be edited.
+
+:param policy_id: str
+  The ID of the policy to delete.
+
+
+
 
     .. py:method:: edit(policy_id: str [, definition: Optional[str], description: Optional[str], libraries: Optional[List[Library]], max_clusters_per_user: Optional[int], name: Optional[str], policy_family_definition_overrides: Optional[str], policy_family_id: Optional[str]])
 
@@ -134,44 +134,44 @@
             w.cluster_policies.delete(policy_id=created.policy_id)
 
         Update a cluster policy.
-        
-        Update an existing policy for cluster. This operation may make some clusters governed by the previous
-        policy invalid.
-        
-        :param policy_id: str
-          The ID of the policy to update.
-        :param definition: str (optional)
-          Policy definition document expressed in [Databricks Cluster Policy Definition Language].
-          
-          [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
-        :param description: str (optional)
-          Additional human-readable description of the cluster policy.
-        :param libraries: List[:class:`Library`] (optional)
-          A list of libraries to be installed on the next cluster restart that uses this policy. The maximum
-          number of libraries is 500.
-        :param max_clusters_per_user: int (optional)
-          Max number of clusters per user that can be active using this policy. If not present, there is no
-          max limit.
-        :param name: str (optional)
-          Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100
-          characters.
-        :param policy_family_definition_overrides: str (optional)
-          Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON
-          document must be passed as a string and cannot be embedded in the requests.
-          
-          You can use this to customize the policy definition inherited from the policy family. Policy rules
-          specified here are merged into the inherited policy definition.
-          
-          [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
-        :param policy_family_id: str (optional)
-          ID of the policy family. The cluster policy's policy definition inherits the policy family's policy
-          definition.
-          
-          Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the
-          policy definition.
-        
-        
-        
+
+Update an existing policy for cluster. This operation may make some clusters governed by the previous
+policy invalid.
+
+:param policy_id: str
+  The ID of the policy to update.
+:param definition: str (optional)
+  Policy definition document expressed in [Databricks Cluster Policy Definition Language].
+  
+  [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
+:param description: str (optional)
+  Additional human-readable description of the cluster policy.
+:param libraries: List[:class:`Library`] (optional)
+  A list of libraries to be installed on the next cluster restart that uses this policy. The maximum
+  number of libraries is 500.
+:param max_clusters_per_user: int (optional)
+  Max number of clusters per user that can be active using this policy. If not present, there is no
+  max limit.
+:param name: str (optional)
+  Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100
+  characters.
+:param policy_family_definition_overrides: str (optional)
+  Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON
+  document must be passed as a string and cannot be embedded in the requests.
+  
+  You can use this to customize the policy definition inherited from the policy family. Policy rules
+  specified here are merged into the inherited policy definition.
+  
+  [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
+:param policy_family_id: str (optional)
+  ID of the policy family. The cluster policy's policy definition inherits the policy family's policy
+  definition.
+  
+  Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the
+  policy definition.
+
+
+
 
     .. py:method:: get(policy_id: str) -> Policy
 
@@ -201,39 +201,39 @@
             w.cluster_policies.delete(policy_id=created.policy_id)
 
         Get a cluster policy.
-        
-        Get a cluster policy entity. Creation and editing is available to admins only.
-        
-        :param policy_id: str
-          Canonical unique identifier for the Cluster Policy.
-        
-        :returns: :class:`Policy`
-        
+
+Get a cluster policy entity. Creation and editing is available to admins only.
+
+:param policy_id: str
+  Canonical unique identifier for the Cluster Policy.
+
+:returns: :class:`Policy`
+
 
     .. py:method:: get_permission_levels(cluster_policy_id: str) -> GetClusterPolicyPermissionLevelsResponse
 
         Get cluster policy permission levels.
-        
-        Gets the permission levels that a user can have on an object.
-        
-        :param cluster_policy_id: str
-          The cluster policy for which to get or manage permissions.
-        
-        :returns: :class:`GetClusterPolicyPermissionLevelsResponse`
-        
+
+Gets the permission levels that a user can have on an object.
+
+:param cluster_policy_id: str
+  The cluster policy for which to get or manage permissions.
+
+:returns: :class:`GetClusterPolicyPermissionLevelsResponse`
+
 
     .. py:method:: get_permissions(cluster_policy_id: str) -> ClusterPolicyPermissions
 
         Get cluster policy permissions.
-        
-        Gets the permissions of a cluster policy. Cluster policies can inherit permissions from their root
-        object.
-        
-        :param cluster_policy_id: str
-          The cluster policy for which to get or manage permissions.
-        
-        :returns: :class:`ClusterPolicyPermissions`
-        
+
+Gets the permissions of a cluster policy. Cluster policies can inherit permissions from their root
+object.
+
+:param cluster_policy_id: str
+  The cluster policy for which to get or manage permissions.
+
+:returns: :class:`ClusterPolicyPermissions`
+
 
     .. py:method:: list( [, sort_column: Optional[ListSortColumn], sort_order: Optional[ListSortOrder]]) -> Iterator[Policy]
 
@@ -250,43 +250,42 @@
             all = w.cluster_policies.list(compute.ListClusterPoliciesRequest())
 
         List cluster policies.
-        
-        Returns a list of policies accessible by the requesting user.
-        
-        :param sort_column: :class:`ListSortColumn` (optional)
-          The cluster policy attribute to sort by. * `POLICY_CREATION_TIME` - Sort result list by policy
-          creation time. * `POLICY_NAME` - Sort result list by policy name.
-        :param sort_order: :class:`ListSortOrder` (optional)
-          The order in which the policies get listed. * `DESC` - Sort result list in descending order. * `ASC`
-          - Sort result list in ascending order.
-        
-        :returns: Iterator over :class:`Policy`
-        
+
+Returns a list of policies accessible by the requesting user.
+
+:param sort_column: :class:`ListSortColumn` (optional)
+  The cluster policy attribute to sort by. * `POLICY_CREATION_TIME` - Sort result list by policy
+  creation time. * `POLICY_NAME` - Sort result list by policy name.
+:param sort_order: :class:`ListSortOrder` (optional)
+  The order in which the policies get listed. * `DESC` - Sort result list in descending order. * `ASC`
+  - Sort result list in ascending order.
+
+:returns: Iterator over :class:`Policy`
+
 
     .. py:method:: set_permissions(cluster_policy_id: str [, access_control_list: Optional[List[ClusterPolicyAccessControlRequest]]]) -> ClusterPolicyPermissions
 
         Set cluster policy permissions.
-        
-        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-        permissions if none are specified. Objects can inherit permissions from their root object.
-        
-        :param cluster_policy_id: str
-          The cluster policy for which to get or manage permissions.
-        :param access_control_list: List[:class:`ClusterPolicyAccessControlRequest`] (optional)
-        
-        :returns: :class:`ClusterPolicyPermissions`
-        
+
+Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+permissions if none are specified. Objects can inherit permissions from their root object.
+
+:param cluster_policy_id: str
+  The cluster policy for which to get or manage permissions.
+:param access_control_list: List[:class:`ClusterPolicyAccessControlRequest`] (optional)
+
+:returns: :class:`ClusterPolicyPermissions`
+
 
     .. py:method:: update_permissions(cluster_policy_id: str [, access_control_list: Optional[List[ClusterPolicyAccessControlRequest]]]) -> ClusterPolicyPermissions
 
         Update cluster policy permissions.
-        
-        Updates the permissions on a cluster policy. Cluster policies can inherit permissions from their root
-        object.
-        
-        :param cluster_policy_id: str
-          The cluster policy for which to get or manage permissions.
-        :param access_control_list: List[:class:`ClusterPolicyAccessControlRequest`] (optional)
-        
-        :returns: :class:`ClusterPolicyPermissions`
-        
\ No newline at end of file
+
+Updates the permissions on a cluster policy. Cluster policies can inherit permissions from their root
+object.
+
+:param cluster_policy_id: str
+  The cluster policy for which to get or manage permissions.
+:param access_control_list: List[:class:`ClusterPolicyAccessControlRequest`] (optional)
+
+:returns: :class:`ClusterPolicyPermissions`
diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst
index c51a61602..aef271caa 100644
--- a/docs/workspace/compute/clusters.rst
+++ b/docs/workspace/compute/clusters.rst
@@ -5,25 +5,25 @@
 .. py:class:: ClustersExt
 
     The Clusters API allows you to create, start, edit, list, terminate, and delete clusters.
-    
-    Databricks maps cluster node instance types to compute units known as DBUs. See the instance type pricing
-    page for a list of the supported instance types and their corresponding DBUs.
-    
-    A Databricks cluster is a set of computation resources and configurations on which you run data
-    engineering, data science, and data analytics workloads, such as production ETL pipelines, streaming
-    analytics, ad-hoc analytics, and machine learning.
-    
-    You run these workloads as a set of commands in a notebook or as an automated job. Databricks makes a
-    distinction between all-purpose clusters and job clusters. You use all-purpose clusters to analyze data
-    collaboratively using interactive notebooks. You use job clusters to run fast and robust automated jobs.
-    
-    You can create an all-purpose cluster using the UI, CLI, or REST API. You can manually terminate and
-    restart an all-purpose cluster. Multiple users can share such clusters to do collaborative interactive
-    analysis.
-    
-    IMPORTANT: Databricks retains cluster configuration information for terminated clusters for 30 days. To
-    keep an all-purpose cluster configuration even after it has been terminated for more than 30 days, an
-    administrator can pin a cluster to the cluster list.
+
+Databricks maps cluster node instance types to compute units known as DBUs. See the instance type pricing
+page for a list of the supported instance types and their corresponding DBUs.
+
+A Databricks cluster is a set of computation resources and configurations on which you run data
+engineering, data science, and data analytics workloads, such as production ETL pipelines, streaming
+analytics, ad-hoc analytics, and machine learning.
+
+You run these workloads as a set of commands in a notebook or as an automated job. Databricks makes a
+distinction between all-purpose clusters and job clusters. You use all-purpose clusters to analyze data
+collaboratively using interactive notebooks. You use job clusters to run fast and robust automated jobs.
+
+You can create an all-purpose cluster using the UI, CLI, or REST API. You can manually terminate and
+restart an all-purpose cluster. Multiple users can share such clusters to do collaborative interactive
+analysis.
+
+IMPORTANT: Databricks retains cluster configuration information for terminated clusters for 30 days. To
+keep an all-purpose cluster configuration even after it has been terminated for more than 30 days, an
+administrator can pin a cluster to the cluster list.
 
     .. py:method:: change_owner(cluster_id: str, owner_username: str)
 
@@ -58,18 +58,18 @@
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Change cluster owner.
-        
-        Change the owner of the cluster. You must be an admin and the cluster must be terminated to perform
-        this operation. The service principal application ID can be supplied as an argument to
-        `owner_username`.
-        
-        :param cluster_id: str
-          
-        :param owner_username: str
-          New owner of the cluster_id after this RPC.
-        
-        
-        
+
+Change the owner of the cluster. You must be an admin and the cluster must be terminated to perform
+this operation. The service principal application ID can be supplied as an argument to
+`owner_username`.
+
+:param cluster_id: str
+  
+:param owner_username: str
+  New owner of the cluster_id after this RPC.
+
+
+
 
     .. py:method:: create(spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], clone_from: Optional[CloneCluster], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType]]) -> Wait[ClusterDetails]
 
@@ -99,171 +99,171 @@
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Create new cluster.
-        
-        Creates a new Spark cluster. This method will acquire new instances from the cloud provider if
-        necessary. Note: Databricks may not be able to acquire some of the requested nodes, due to cloud
-        provider limitations (account limits, spot price, etc.) or transient network issues.
-        
-        If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will succeed.
-        Otherwise the cluster will terminate with an informative error message.
-        
-        Rather than authoring the cluster's JSON definition from scratch, Databricks recommends filling out
-        the [create compute UI] and then copying the generated JSON definition from the UI.
-        
-        [create compute UI]: https://docs.databricks.com/compute/configure.html
-        
-        :param spark_version: str
-          The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be
-          retrieved by using the :method:clusters/sparkVersions API call.
-        :param apply_policy_default_values: bool (optional)
-          When set to true, fixed and default values from the policy will be used for fields that are omitted.
-          When set to false, only fixed values from the policy will be applied.
-        :param autoscale: :class:`AutoScale` (optional)
-          Parameters needed in order to automatically scale clusters up and down based on load. Note:
-          autoscaling works best with DB runtime versions 3.0 or later.
-        :param autotermination_minutes: int (optional)
-          Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this
-          cluster will not be automatically terminated. If specified, the threshold must be between 10 and
-          10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.
-        :param aws_attributes: :class:`AwsAttributes` (optional)
-          Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation,
-          a set of default values will be used.
-        :param azure_attributes: :class:`AzureAttributes` (optional)
-          Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a
-          set of default values will be used.
-        :param clone_from: :class:`CloneCluster` (optional)
-          When specified, this clones libraries from a source cluster during the creation of a new cluster.
-        :param cluster_log_conf: :class:`ClusterLogConf` (optional)
-          The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-          destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. If
-          the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of
-          driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is
-          `$destination/$clusterId/executor`.
-        :param cluster_name: str (optional)
-          Cluster name requested by the user. This doesn't have to be unique. If not specified at creation,
-          the cluster name will be an empty string.
-        :param custom_tags: Dict[str,str] (optional)
-          Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS
-          instances and EBS volumes) with these tags in addition to `default_tags`. Notes:
-          
-          - Currently, Databricks allows at most 45 custom tags
-          
-          - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags
-        :param data_security_mode: :class:`DataSecurityMode` (optional)
-          Data security mode decides what data governance model to use when accessing data from a cluster.
-          
-          The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
-          choose the most appropriate access mode depending on your compute configuration. *
-          `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias
-          for `SINGLE_USER`.
-          
-          The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple
-          users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`:
-          A secure cluster that can only be exclusively used by a single user specified in `single_user_name`.
-          Most programming languages, cluster features and data governance features are available in this
-          mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are
-          fully isolated so that they cannot see each other's data and credentials. Most data governance
-          features are supported in this mode. But programming languages and cluster features might be
-          limited.
-          
-          The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
-          future Databricks Runtime versions:
-          
-          * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
-          `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency
-          clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on
-          standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC
-          nor passthrough enabled.
-        :param docker_image: :class:`DockerImage` (optional)
-        :param driver_instance_pool_id: str (optional)
-          The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses
-          the instance pool with id (instance_pool_id) if the driver pool is not assigned.
-        :param driver_node_type_id: str (optional)
-          The node type of the Spark driver. Note that this field is optional; if unset, the driver node type
-          will be set as the same value as `node_type_id` defined above.
-        :param enable_elastic_disk: bool (optional)
-          Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space
-          when its Spark workers are running low on disk space. This feature requires specific AWS permissions
-          to function correctly - refer to the User Guide for more details.
-        :param enable_local_disk_encryption: bool (optional)
-          Whether to enable LUKS on cluster VMs' local disks
-        :param gcp_attributes: :class:`GcpAttributes` (optional)
-          Attributes related to clusters running on Google Cloud Platform. If not specified at cluster
-          creation, a set of default values will be used.
-        :param init_scripts: List[:class:`InitScriptInfo`] (optional)
-          The configuration for storing init scripts. Any number of destinations can be specified. The scripts
-          are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script
-          logs are sent to `//init_scripts`.
-        :param instance_pool_id: str (optional)
-          The optional ID of the instance pool to which the cluster belongs.
-        :param is_single_node: bool (optional)
-          This field can only be used with `kind`.
-          
-          When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`,
-          and `num_workers`
-        :param kind: :class:`Kind` (optional)
-          The kind of compute described by this compute specification.
-          
-          Depending on `kind`, different validations and default values will be applied.
-          
-          The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
-        :param node_type_id: str (optional)
-          This field encodes, through a single value, the resources available to each of the Spark nodes in
-          this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
-          intensive workloads. A list of available node types can be retrieved by using the
-          :method:clusters/listNodeTypes API call.
-        :param num_workers: int (optional)
-          Number of worker nodes that this cluster should have. A cluster has one Spark Driver and
-          `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.
-          
-          Note: When reading the properties of a cluster, this field reflects the desired number of workers
-          rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10
-          workers, this field will immediately be updated to reflect the target size of 10 workers, whereas
-          the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are
-          provisioned.
-        :param policy_id: str (optional)
-          The ID of the cluster policy used to create the cluster if applicable.
-        :param runtime_engine: :class:`RuntimeEngine` (optional)
-          Determines the cluster's runtime engine, either standard or Photon.
-          
-          This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
-          `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
-          
-          If left unspecified, the runtime engine defaults to standard unless the spark_version contains
-          -photon-, in which case Photon will be used.
-        :param single_user_name: str (optional)
-          Single user name if data_security_mode is `SINGLE_USER`
-        :param spark_conf: Dict[str,str] (optional)
-          An object containing a set of optional, user-specified Spark configuration key-value pairs. Users
-          can also pass in a string of extra JVM options to the driver and the executors via
-          `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.
-        :param spark_env_vars: Dict[str,str] (optional)
-          An object containing a set of optional, user-specified environment variable key-value pairs. Please
-          note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while
-          launching the driver and workers.
-          
-          In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them to
-          `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default databricks
-          managed environmental variables are included as well.
-          
-          Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS":
-          "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS
-          -Dspark.shuffle.service.enabled=true"}`
-        :param ssh_public_keys: List[str] (optional)
-          SSH public key contents that will be added to each Spark node in this cluster. The corresponding
-          private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be
-          specified.
-        :param use_ml_runtime: bool (optional)
-          This field can only be used with `kind`.
-          
-          `effective_spark_version` is determined by `spark_version` (DBR release), this field
-          `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
-        :param workload_type: :class:`WorkloadType` (optional)
-        
-        :returns:
-          Long-running operation waiter for :class:`ClusterDetails`.
-          See :method:wait_get_cluster_running for more details.
-        
+
+Creates a new Spark cluster. This method will acquire new instances from the cloud provider if
+necessary. Note: Databricks may not be able to acquire some of the requested nodes, due to cloud
+provider limitations (account limits, spot price, etc.) or transient network issues.
+
+If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will succeed.
+Otherwise the cluster will terminate with an informative error message.
+
+Rather than authoring the cluster's JSON definition from scratch, Databricks recommends filling out
+the [create compute UI] and then copying the generated JSON definition from the UI.
+
+[create compute UI]: https://docs.databricks.com/compute/configure.html
+
+:param spark_version: str
+  The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be
+  retrieved by using the :method:clusters/sparkVersions API call.
+:param apply_policy_default_values: bool (optional)
+  When set to true, fixed and default values from the policy will be used for fields that are omitted.
+  When set to false, only fixed values from the policy will be applied.
+:param autoscale: :class:`AutoScale` (optional)
+  Parameters needed in order to automatically scale clusters up and down based on load. Note:
+  autoscaling works best with DB runtime versions 3.0 or later.
+:param autotermination_minutes: int (optional)
+  Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this
+  cluster will not be automatically terminated. If specified, the threshold must be between 10 and
+  10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.
+:param aws_attributes: :class:`AwsAttributes` (optional)
+  Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation,
+  a set of default values will be used.
+:param azure_attributes: :class:`AzureAttributes` (optional)
+  Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a
+  set of default values will be used.
+:param clone_from: :class:`CloneCluster` (optional)
+  When specified, this clones libraries from a source cluster during the creation of a new cluster.
+:param cluster_log_conf: :class:`ClusterLogConf` (optional)
+  The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+  destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+  specified for one cluster. If the conf is given, the logs will be delivered to the destination every
+  `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination
+  of executor logs is `$destination/$clusterId/executor`.
+:param cluster_name: str (optional)
+  Cluster name requested by the user. This doesn't have to be unique. If not specified at creation,
+  the cluster name will be an empty string.
+:param custom_tags: Dict[str,str] (optional)
+  Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS
+  instances and EBS volumes) with these tags in addition to `default_tags`. Notes:
+  
+  - Currently, Databricks allows at most 45 custom tags
+  
+  - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags
+:param data_security_mode: :class:`DataSecurityMode` (optional)
+  Data security mode decides what data governance model to use when accessing data from a cluster.
+  
+  The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+  choose the most appropriate access mode depending on your compute configuration. *
+  `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias
+  for `SINGLE_USER`.
+  
+  The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple
+  users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`:
+  A secure cluster that can only be exclusively used by a single user specified in `single_user_name`.
+  Most programming languages, cluster features and data governance features are available in this
+  mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are
+  fully isolated so that they cannot see each other's data and credentials. Most data governance
+  features are supported in this mode. But programming languages and cluster features might be
+  limited.
+  
+  The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
+  future Databricks Runtime versions:
+  
+  * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
+  `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency
+  clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on
+  standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC
+  nor passthrough enabled.
+:param docker_image: :class:`DockerImage` (optional)
+:param driver_instance_pool_id: str (optional)
+  The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses
+  the instance pool with id (instance_pool_id) if the driver pool is not assigned.
+:param driver_node_type_id: str (optional)
+  The node type of the Spark driver. Note that this field is optional; if unset, the driver node type
+  will be set as the same value as `node_type_id` defined above.
+:param enable_elastic_disk: bool (optional)
+  Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space
+  when its Spark workers are running low on disk space. This feature requires specific AWS permissions
+  to function correctly - refer to the User Guide for more details.
+:param enable_local_disk_encryption: bool (optional)
+  Whether to enable LUKS on cluster VMs' local disks
+:param gcp_attributes: :class:`GcpAttributes` (optional)
+  Attributes related to clusters running on Google Cloud Platform. If not specified at cluster
+  creation, a set of default values will be used.
+:param init_scripts: List[:class:`InitScriptInfo`] (optional)
+  The configuration for storing init scripts. Any number of destinations can be specified. The scripts
+  are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script
+  logs are sent to `//init_scripts`.
+:param instance_pool_id: str (optional)
+  The optional ID of the instance pool to which the cluster belongs.
+:param is_single_node: bool (optional)
+  This field can only be used with `kind`.
+  
+  When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`,
+  and `num_workers`
+:param kind: :class:`Kind` (optional)
+  The kind of compute described by this compute specification.
+  
+  Depending on `kind`, different validations and default values will be applied.
+  
+  The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
+:param node_type_id: str (optional)
+  This field encodes, through a single value, the resources available to each of the Spark nodes in
+  this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
+  intensive workloads. A list of available node types can be retrieved by using the
+  :method:clusters/listNodeTypes API call.
+:param num_workers: int (optional)
+  Number of worker nodes that this cluster should have. A cluster has one Spark Driver and
+  `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.
+  
+  Note: When reading the properties of a cluster, this field reflects the desired number of workers
+  rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10
+  workers, this field will immediately be updated to reflect the target size of 10 workers, whereas
+  the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are
+  provisioned.
+:param policy_id: str (optional)
+  The ID of the cluster policy used to create the cluster if applicable.
+:param runtime_engine: :class:`RuntimeEngine` (optional)
+  Determines the cluster's runtime engine, either standard or Photon.
+  
+  This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+  `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+  
+  If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+  -photon-, in which case Photon will be used.
+:param single_user_name: str (optional)
+  Single user name if data_security_mode is `SINGLE_USER`
+:param spark_conf: Dict[str,str] (optional)
+  An object containing a set of optional, user-specified Spark configuration key-value pairs. Users
+  can also pass in a string of extra JVM options to the driver and the executors via
+  `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.
+:param spark_env_vars: Dict[str,str] (optional)
+  An object containing a set of optional, user-specified environment variable key-value pairs. Please
+  note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while
+  launching the driver and workers.
+  
+  In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them to
+  `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default databricks
+  managed environmental variables are included as well.
+  
+  Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS":
+  "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS
+  -Dspark.shuffle.service.enabled=true"}`
+:param ssh_public_keys: List[str] (optional)
+  SSH public key contents that will be added to each Spark node in this cluster. The corresponding
+  private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be
+  specified.
+:param use_ml_runtime: bool (optional)
+  This field can only be used with `kind`.
+  
+  `effective_spark_version` is determined by `spark_version` (DBR release), this field
+  `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
+:param workload_type: :class:`WorkloadType` (optional)
+
+:returns:
+  Long-running operation waiter for :class:`ClusterDetails`.
+  See :method:wait_get_cluster_running for more details.
+
 
     .. py:method:: create_and_wait(spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], clone_from: Optional[CloneCluster], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails
 
@@ -298,18 +298,18 @@
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Terminate cluster.
-        
-        Terminates the Spark cluster with the specified ID. The cluster is removed asynchronously. Once the
-        termination has completed, the cluster will be in a `TERMINATED` state. If the cluster is already in a
-        `TERMINATING` or `TERMINATED` state, nothing will happen.
-        
-        :param cluster_id: str
-          The cluster to be terminated.
-        
-        :returns:
-          Long-running operation waiter for :class:`ClusterDetails`.
-          See :method:wait_get_cluster_terminated for more details.
-        
+
+Terminates the Spark cluster with the specified ID. The cluster is removed asynchronously. Once the
+termination has completed, the cluster will be in a `TERMINATED` state. If the cluster is already in a
+`TERMINATING` or `TERMINATED` state, nothing will happen.
+
+:param cluster_id: str
+  The cluster to be terminated.
+
+:returns:
+  Long-running operation waiter for :class:`ClusterDetails`.
+  See :method:wait_get_cluster_terminated for more details.
+
 
     .. py:method:: delete_and_wait(cluster_id: str, timeout: datetime.timedelta = 0:20:00) -> ClusterDetails
 
@@ -349,171 +349,171 @@
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Update cluster configuration.
-        
-        Updates the configuration of a cluster to match the provided attributes and size. A cluster can be
-        updated if it is in a `RUNNING` or `TERMINATED` state.
-        
-        If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes
-        can take effect.
-        
-        If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time it
-        is started using the `clusters/start` API, the new attributes will take effect. Any attempt to update
-        a cluster in any other state will be rejected with an `INVALID_STATE` error code.
-        
-        Clusters created by the Databricks Jobs service cannot be edited.
-        
-        :param cluster_id: str
-          ID of the cluster
-        :param spark_version: str
-          The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be
-          retrieved by using the :method:clusters/sparkVersions API call.
-        :param apply_policy_default_values: bool (optional)
-          When set to true, fixed and default values from the policy will be used for fields that are omitted.
-          When set to false, only fixed values from the policy will be applied.
-        :param autoscale: :class:`AutoScale` (optional)
-          Parameters needed in order to automatically scale clusters up and down based on load. Note:
-          autoscaling works best with DB runtime versions 3.0 or later.
-        :param autotermination_minutes: int (optional)
-          Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this
-          cluster will not be automatically terminated. If specified, the threshold must be between 10 and
-          10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.
-        :param aws_attributes: :class:`AwsAttributes` (optional)
-          Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation,
-          a set of default values will be used.
-        :param azure_attributes: :class:`AzureAttributes` (optional)
-          Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a
-          set of default values will be used.
-        :param cluster_log_conf: :class:`ClusterLogConf` (optional)
-          The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-          destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. If
-          the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of
-          driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is
-          `$destination/$clusterId/executor`.
-        :param cluster_name: str (optional)
-          Cluster name requested by the user. This doesn't have to be unique. If not specified at creation,
-          the cluster name will be an empty string.
-        :param custom_tags: Dict[str,str] (optional)
-          Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS
-          instances and EBS volumes) with these tags in addition to `default_tags`. Notes:
-          
-          - Currently, Databricks allows at most 45 custom tags
-          
-          - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags
-        :param data_security_mode: :class:`DataSecurityMode` (optional)
-          Data security mode decides what data governance model to use when accessing data from a cluster.
-          
-          The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
-          choose the most appropriate access mode depending on your compute configuration. *
-          `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias
-          for `SINGLE_USER`.
-          
-          The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple
-          users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`:
-          A secure cluster that can only be exclusively used by a single user specified in `single_user_name`.
-          Most programming languages, cluster features and data governance features are available in this
-          mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are
-          fully isolated so that they cannot see each other's data and credentials. Most data governance
-          features are supported in this mode. But programming languages and cluster features might be
-          limited.
-          
-          The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
-          future Databricks Runtime versions:
-          
-          * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
-          `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency
-          clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on
-          standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC
-          nor passthrough enabled.
-        :param docker_image: :class:`DockerImage` (optional)
-        :param driver_instance_pool_id: str (optional)
-          The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses
-          the instance pool with id (instance_pool_id) if the driver pool is not assigned.
-        :param driver_node_type_id: str (optional)
-          The node type of the Spark driver. Note that this field is optional; if unset, the driver node type
-          will be set as the same value as `node_type_id` defined above.
-        :param enable_elastic_disk: bool (optional)
-          Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space
-          when its Spark workers are running low on disk space. This feature requires specific AWS permissions
-          to function correctly - refer to the User Guide for more details.
-        :param enable_local_disk_encryption: bool (optional)
-          Whether to enable LUKS on cluster VMs' local disks
-        :param gcp_attributes: :class:`GcpAttributes` (optional)
-          Attributes related to clusters running on Google Cloud Platform. If not specified at cluster
-          creation, a set of default values will be used.
-        :param init_scripts: List[:class:`InitScriptInfo`] (optional)
-          The configuration for storing init scripts. Any number of destinations can be specified. The scripts
-          are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script
-          logs are sent to `//init_scripts`.
-        :param instance_pool_id: str (optional)
-          The optional ID of the instance pool to which the cluster belongs.
-        :param is_single_node: bool (optional)
-          This field can only be used with `kind`.
-          
-          When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`,
-          and `num_workers`
-        :param kind: :class:`Kind` (optional)
-          The kind of compute described by this compute specification.
-          
-          Depending on `kind`, different validations and default values will be applied.
-          
-          The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
-        :param node_type_id: str (optional)
-          This field encodes, through a single value, the resources available to each of the Spark nodes in
-          this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
-          intensive workloads. A list of available node types can be retrieved by using the
-          :method:clusters/listNodeTypes API call.
-        :param num_workers: int (optional)
-          Number of worker nodes that this cluster should have. A cluster has one Spark Driver and
-          `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.
-          
-          Note: When reading the properties of a cluster, this field reflects the desired number of workers
-          rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10
-          workers, this field will immediately be updated to reflect the target size of 10 workers, whereas
-          the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are
-          provisioned.
-        :param policy_id: str (optional)
-          The ID of the cluster policy used to create the cluster if applicable.
-        :param runtime_engine: :class:`RuntimeEngine` (optional)
-          Determines the cluster's runtime engine, either standard or Photon.
-          
-          This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
-          `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
-          
-          If left unspecified, the runtime engine defaults to standard unless the spark_version contains
-          -photon-, in which case Photon will be used.
-        :param single_user_name: str (optional)
-          Single user name if data_security_mode is `SINGLE_USER`
-        :param spark_conf: Dict[str,str] (optional)
-          An object containing a set of optional, user-specified Spark configuration key-value pairs. Users
-          can also pass in a string of extra JVM options to the driver and the executors via
-          `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.
-        :param spark_env_vars: Dict[str,str] (optional)
-          An object containing a set of optional, user-specified environment variable key-value pairs. Please
-          note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while
-          launching the driver and workers.
-          
-          In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them to
-          `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default databricks
-          managed environmental variables are included as well.
-          
-          Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS":
-          "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS
-          -Dspark.shuffle.service.enabled=true"}`
-        :param ssh_public_keys: List[str] (optional)
-          SSH public key contents that will be added to each Spark node in this cluster. The corresponding
-          private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be
-          specified.
-        :param use_ml_runtime: bool (optional)
-          This field can only be used with `kind`.
-          
-          `effective_spark_version` is determined by `spark_version` (DBR release), this field
-          `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
-        :param workload_type: :class:`WorkloadType` (optional)
-        
-        :returns:
-          Long-running operation waiter for :class:`ClusterDetails`.
-          See :method:wait_get_cluster_running for more details.
-        
+
+Updates the configuration of a cluster to match the provided attributes and size. A cluster can be
+updated if it is in a `RUNNING` or `TERMINATED` state.
+
+If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes
+can take effect.
+
+If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time it
+is started using the `clusters/start` API, the new attributes will take effect. Any attempt to update
+a cluster in any other state will be rejected with an `INVALID_STATE` error code.
+
+Clusters created by the Databricks Jobs service cannot be edited.
+
+:param cluster_id: str
+  ID of the cluster
+:param spark_version: str
+  The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be
+  retrieved by using the :method:clusters/sparkVersions API call.
+:param apply_policy_default_values: bool (optional)
+  When set to true, fixed and default values from the policy will be used for fields that are omitted.
+  When set to false, only fixed values from the policy will be applied.
+:param autoscale: :class:`AutoScale` (optional)
+  Parameters needed in order to automatically scale clusters up and down based on load. Note:
+  autoscaling works best with DB runtime versions 3.0 or later.
+:param autotermination_minutes: int (optional)
+  Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this
+  cluster will not be automatically terminated. If specified, the threshold must be between 10 and
+  10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.
+:param aws_attributes: :class:`AwsAttributes` (optional)
+  Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation,
+  a set of default values will be used.
+:param azure_attributes: :class:`AzureAttributes` (optional)
+  Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a
+  set of default values will be used.
+:param cluster_log_conf: :class:`ClusterLogConf` (optional)
+  The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+  destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+  specified for one cluster. If the conf is given, the logs will be delivered to the destination every
+  `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination
+  of executor logs is `$destination/$clusterId/executor`.
+:param cluster_name: str (optional)
+  Cluster name requested by the user. This doesn't have to be unique. If not specified at creation,
+  the cluster name will be an empty string.
+:param custom_tags: Dict[str,str] (optional)
+  Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS
+  instances and EBS volumes) with these tags in addition to `default_tags`. Notes:
+  
+  - Currently, Databricks allows at most 45 custom tags
+  
+  - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags
+:param data_security_mode: :class:`DataSecurityMode` (optional)
+  Data security mode decides what data governance model to use when accessing data from a cluster.
+  
+  The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+  choose the most appropriate access mode depending on your compute configuration. *
+  `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias
+  for `SINGLE_USER`.
+  
+  The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple
+  users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`:
+  A secure cluster that can only be exclusively used by a single user specified in `single_user_name`.
+  Most programming languages, cluster features and data governance features are available in this
+  mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are
+  fully isolated so that they cannot see each other's data and credentials. Most data governance
+  features are supported in this mode. But programming languages and cluster features might be
+  limited.
+  
+  The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
+  future Databricks Runtime versions:
+  
+  * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
+  `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency
+  clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on
+  standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC
+  nor passthrough enabled.
+:param docker_image: :class:`DockerImage` (optional)
+:param driver_instance_pool_id: str (optional)
+  The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses
+  the instance pool with id (instance_pool_id) if the driver pool is not assigned.
+:param driver_node_type_id: str (optional)
+  The node type of the Spark driver. Note that this field is optional; if unset, the driver node type
+  will be set as the same value as `node_type_id` defined above.
+:param enable_elastic_disk: bool (optional)
+  Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space
+  when its Spark workers are running low on disk space. This feature requires specific AWS permissions
+  to function correctly - refer to the User Guide for more details.
+:param enable_local_disk_encryption: bool (optional)
+  Whether to enable LUKS on cluster VMs' local disks
+:param gcp_attributes: :class:`GcpAttributes` (optional)
+  Attributes related to clusters running on Google Cloud Platform. If not specified at cluster
+  creation, a set of default values will be used.
+:param init_scripts: List[:class:`InitScriptInfo`] (optional)
+  The configuration for storing init scripts. Any number of destinations can be specified. The scripts
+  are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script
+  logs are sent to `//init_scripts`.
+:param instance_pool_id: str (optional)
+  The optional ID of the instance pool to which the cluster belongs.
+:param is_single_node: bool (optional)
+  This field can only be used with `kind`.
+  
+  When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`,
+  and `num_workers`
+:param kind: :class:`Kind` (optional)
+  The kind of compute described by this compute specification.
+  
+  Depending on `kind`, different validations and default values will be applied.
+  
+  The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
+:param node_type_id: str (optional)
+  This field encodes, through a single value, the resources available to each of the Spark nodes in
+  this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
+  intensive workloads. A list of available node types can be retrieved by using the
+  :method:clusters/listNodeTypes API call.
+:param num_workers: int (optional)
+  Number of worker nodes that this cluster should have. A cluster has one Spark Driver and
+  `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.
+  
+  Note: When reading the properties of a cluster, this field reflects the desired number of workers
+  rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10
+  workers, this field will immediately be updated to reflect the target size of 10 workers, whereas
+  the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are
+  provisioned.
+:param policy_id: str (optional)
+  The ID of the cluster policy used to create the cluster if applicable.
+:param runtime_engine: :class:`RuntimeEngine` (optional)
+  Determines the cluster's runtime engine, either standard or Photon.
+  
+  This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+  `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+  
+  If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+  -photon-, in which case Photon will be used.
+:param single_user_name: str (optional)
+  Single user name if data_security_mode is `SINGLE_USER`
+:param spark_conf: Dict[str,str] (optional)
+  An object containing a set of optional, user-specified Spark configuration key-value pairs. Users
+  can also pass in a string of extra JVM options to the driver and the executors via
+  `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.
+:param spark_env_vars: Dict[str,str] (optional)
+  An object containing a set of optional, user-specified environment variable key-value pairs. Please
+  note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while
+  launching the driver and workers.
+  
+  In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them to
+  `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default databricks
+  managed environmental variables are included as well.
+  
+  Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS":
+  "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS
+  -Dspark.shuffle.service.enabled=true"}`
+:param ssh_public_keys: List[str] (optional)
+  SSH public key contents that will be added to each Spark node in this cluster. The corresponding
+  private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be
+  specified.
+:param use_ml_runtime: bool (optional)
+  This field can only be used with `kind`.
+  
+  `effective_spark_version` is determined by `spark_version` (DBR release), this field
+  `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
+:param workload_type: :class:`WorkloadType` (optional)
+
+:returns:
+  Long-running operation waiter for :class:`ClusterDetails`.
+  See :method:wait_get_cluster_running for more details.
+
 
     .. py:method:: edit_and_wait(cluster_id: str, spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails
 
@@ -573,30 +573,30 @@
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         List cluster activity events.
-        
-        Retrieves a list of events about the activity of a cluster. This API is paginated. If there are more
-        events to read, the response includes all the nparameters necessary to request the next page of
-        events.
-        
-        :param cluster_id: str
-          The ID of the cluster to retrieve events about.
-        :param end_time: int (optional)
-          The end time in epoch milliseconds. If empty, returns events up to the current time.
-        :param event_types: List[:class:`EventType`] (optional)
-          An optional set of event types to filter on. If empty, all event types are returned.
-        :param limit: int (optional)
-          The maximum number of events to include in a page of events. Defaults to 50, and maximum allowed
-          value is 500.
-        :param offset: int (optional)
-          The offset in the result set. Defaults to 0 (no offset). When an offset is specified and the results
-          are requested in descending order, the end_time field is required.
-        :param order: :class:`GetEventsOrder` (optional)
-          The order to list events in; either "ASC" or "DESC". Defaults to "DESC".
-        :param start_time: int (optional)
-          The start time in epoch milliseconds. If empty, returns events starting from the beginning of time.
-        
-        :returns: Iterator over :class:`ClusterEvent`
-        
+
+Retrieves a list of events about the activity of a cluster. This API is paginated. If there are more
+events to read, the response includes all the nparameters necessary to request the next page of
+events.
+
+:param cluster_id: str
+  The ID of the cluster to retrieve events about.
+:param end_time: int (optional)
+  The end time in epoch milliseconds. If empty, returns events up to the current time.
+:param event_types: List[:class:`EventType`] (optional)
+  An optional set of event types to filter on. If empty, all event types are returned.
+:param limit: int (optional)
+  The maximum number of events to include in a page of events. Defaults to 50, and maximum allowed
+  value is 500.
+:param offset: int (optional)
+  The offset in the result set. Defaults to 0 (no offset). When an offset is specified and the results
+  are requested in descending order, the end_time field is required.
+:param order: :class:`GetEventsOrder` (optional)
+  The order to list events in; either "ASC" or "DESC". Defaults to "DESC".
+:param start_time: int (optional)
+  The start time in epoch milliseconds. If empty, returns events starting from the beginning of time.
+
+:returns: Iterator over :class:`ClusterEvent`
+
 
     .. py:method:: get(cluster_id: str) -> ClusterDetails
 
@@ -628,39 +628,39 @@
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Get cluster info.
-        
-        Retrieves the information for a cluster given its identifier. Clusters can be described while they are
-        running, or up to 60 days after they are terminated.
-        
-        :param cluster_id: str
-          The cluster about which to retrieve information.
-        
-        :returns: :class:`ClusterDetails`
-        
+
+Retrieves the information for a cluster given its identifier. Clusters can be described while they are
+running, or up to 60 days after they are terminated.
+
+:param cluster_id: str
+  The cluster about which to retrieve information.
+
+:returns: :class:`ClusterDetails`
+
 
     .. py:method:: get_permission_levels(cluster_id: str) -> GetClusterPermissionLevelsResponse
 
         Get cluster permission levels.
-        
-        Gets the permission levels that a user can have on an object.
-        
-        :param cluster_id: str
-          The cluster for which to get or manage permissions.
-        
-        :returns: :class:`GetClusterPermissionLevelsResponse`
-        
+
+Gets the permission levels that a user can have on an object.
+
+:param cluster_id: str
+  The cluster for which to get or manage permissions.
+
+:returns: :class:`GetClusterPermissionLevelsResponse`
+
 
     .. py:method:: get_permissions(cluster_id: str) -> ClusterPermissions
 
         Get cluster permissions.
-        
-        Gets the permissions of a cluster. Clusters can inherit permissions from their root object.
-        
-        :param cluster_id: str
-          The cluster for which to get or manage permissions.
-        
-        :returns: :class:`ClusterPermissions`
-        
+
+Gets the permissions of a cluster. Clusters can inherit permissions from their root object.
+
+:param cluster_id: str
+  The cluster for which to get or manage permissions.
+
+:returns: :class:`ClusterPermissions`
+
 
     .. py:method:: list( [, filter_by: Optional[ListClustersFilterBy], page_size: Optional[int], page_token: Optional[str], sort_by: Optional[ListClustersSortBy]]) -> Iterator[ClusterDetails]
 
@@ -677,23 +677,23 @@
             all = w.clusters.list(compute.ListClustersRequest())
 
         List clusters.
-        
-        Return information about all pinned and active clusters, and all clusters terminated within the last
-        30 days. Clusters terminated prior to this period are not included.
-        
-        :param filter_by: :class:`ListClustersFilterBy` (optional)
-          Filters to apply to the list of clusters.
-        :param page_size: int (optional)
-          Use this field to specify the maximum number of results to be returned by the server. The server may
-          further constrain the maximum number of results returned in a single page.
-        :param page_token: str (optional)
-          Use next_page_token or prev_page_token returned from the previous request to list the next or
-          previous page of clusters respectively.
-        :param sort_by: :class:`ListClustersSortBy` (optional)
-          Sort the list of clusters by a specific criteria.
-        
-        :returns: Iterator over :class:`ClusterDetails`
-        
+
+Return information about all pinned and active clusters, and all clusters terminated within the last
+30 days. Clusters terminated prior to this period are not included.
+
+:param filter_by: :class:`ListClustersFilterBy` (optional)
+  Filters to apply to the list of clusters.
+:param page_size: int (optional)
+  Use this field to specify the maximum number of results to be returned by the server. The server may
+  further constrain the maximum number of results returned in a single page.
+:param page_token: str (optional)
+  Use next_page_token or prev_page_token returned from the previous request to list the next or
+  previous page of clusters respectively.
+:param sort_by: :class:`ListClustersSortBy` (optional)
+  Sort the list of clusters by a specific criteria.
+
+:returns: Iterator over :class:`ClusterDetails`
+
 
     .. py:method:: list_node_types() -> ListNodeTypesResponse
 
@@ -709,37 +709,37 @@
             nodes = w.clusters.list_node_types()
 
         List node types.
-        
-        Returns a list of supported Spark node types. These node types can be used to launch a cluster.
-        
-        :returns: :class:`ListNodeTypesResponse`
-        
+
+Returns a list of supported Spark node types. These node types can be used to launch a cluster.
+
+:returns: :class:`ListNodeTypesResponse`
+
 
     .. py:method:: list_zones() -> ListAvailableZonesResponse
 
         List availability zones.
-        
-        Returns a list of availability zones where clusters can be created in (For example, us-west-2a). These
-        zones can be used to launch a cluster.
-        
-        :returns: :class:`ListAvailableZonesResponse`
-        
+
+Returns a list of availability zones where clusters can be created in (For example, us-west-2a). These
+zones can be used to launch a cluster.
+
+:returns: :class:`ListAvailableZonesResponse`
+
 
     .. py:method:: permanent_delete(cluster_id: str)
 
         Permanently delete cluster.
-        
-        Permanently deletes a Spark cluster. This cluster is terminated and resources are asynchronously
-        removed.
-        
-        In addition, users will no longer see permanently deleted clusters in the cluster list, and API users
-        can no longer perform any action on permanently deleted clusters.
-        
-        :param cluster_id: str
-          The cluster to be deleted.
-        
-        
-        
+
+Permanently deletes a Spark cluster. This cluster is terminated and resources are asynchronously
+removed.
+
+In addition, users will no longer see permanently deleted clusters in the cluster list, and API users
+can no longer perform any action on permanently deleted clusters.
+
+:param cluster_id: str
+  The cluster to be deleted.
+
+
+
 
     .. py:method:: pin(cluster_id: str)
 
@@ -771,15 +771,15 @@
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Pin cluster.
-        
-        Pinning a cluster ensures that the cluster will always be returned by the ListClusters API. Pinning a
-        cluster that is already pinned will have no effect. This API can only be called by workspace admins.
-        
-        :param cluster_id: str
-          
-        
-        
-        
+
+Pinning a cluster ensures that the cluster will always be returned by the ListClusters API. Pinning a
+cluster that is already pinned will have no effect. This API can only be called by workspace admins.
+
+:param cluster_id: str
+  
+
+
+
 
     .. py:method:: resize(cluster_id: str [, autoscale: Optional[AutoScale], num_workers: Optional[int]]) -> Wait[ClusterDetails]
 
@@ -811,29 +811,29 @@
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Resize cluster.
-        
-        Resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a
-        `RUNNING` state.
-        
-        :param cluster_id: str
-          The cluster to be resized.
-        :param autoscale: :class:`AutoScale` (optional)
-          Parameters needed in order to automatically scale clusters up and down based on load. Note:
-          autoscaling works best with DB runtime versions 3.0 or later.
-        :param num_workers: int (optional)
-          Number of worker nodes that this cluster should have. A cluster has one Spark Driver and
-          `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.
-          
-          Note: When reading the properties of a cluster, this field reflects the desired number of workers
-          rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10
-          workers, this field will immediately be updated to reflect the target size of 10 workers, whereas
-          the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are
-          provisioned.
-        
-        :returns:
-          Long-running operation waiter for :class:`ClusterDetails`.
-          See :method:wait_get_cluster_running for more details.
-        
+
+Resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a
+`RUNNING` state.
+
+:param cluster_id: str
+  The cluster to be resized.
+:param autoscale: :class:`AutoScale` (optional)
+  Parameters needed in order to automatically scale clusters up and down based on load. Note:
+  autoscaling works best with DB runtime versions 3.0 or later.
+:param num_workers: int (optional)
+  Number of worker nodes that this cluster should have. A cluster has one Spark Driver and
+  `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.
+  
+  Note: When reading the properties of a cluster, this field reflects the desired number of workers
+  rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10
+  workers, this field will immediately be updated to reflect the target size of 10 workers, whereas
+  the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are
+  provisioned.
+
+:returns:
+  Long-running operation waiter for :class:`ClusterDetails`.
+  See :method:wait_get_cluster_running for more details.
+
 
     .. py:method:: resize_and_wait(cluster_id: str [, autoscale: Optional[AutoScale], num_workers: Optional[int], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails
 
@@ -868,19 +868,19 @@
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Restart cluster.
-        
-        Restarts a Spark cluster with the supplied ID. If the cluster is not currently in a `RUNNING` state,
-        nothing will happen.
-        
-        :param cluster_id: str
-          The cluster to be started.
-        :param restart_user: str (optional)
-          
-        
-        :returns:
-          Long-running operation waiter for :class:`ClusterDetails`.
-          See :method:wait_get_cluster_running for more details.
-        
+
+Restarts a Spark cluster with the supplied ID. If the cluster is not currently in a `RUNNING` state,
+nothing will happen.
+
+:param cluster_id: str
+  The cluster to be started.
+:param restart_user: str (optional)
+  
+
+:returns:
+  Long-running operation waiter for :class:`ClusterDetails`.
+  See :method:wait_get_cluster_running for more details.
+
 
     .. py:method:: restart_and_wait(cluster_id: str [, restart_user: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails
 
@@ -900,22 +900,22 @@
 
         Selects smallest available node type given the conditions.
 
-        :param min_memory_gb: int
-        :param gb_per_core: int
-        :param min_cores: int
-        :param min_gpus: int
-        :param local_disk: bool
-        :param local_disk_min_size: bool
-        :param category: bool
-        :param photon_worker_capable: bool
-        :param photon_driver_capable: bool
-        :param graviton: bool
-        :param is_io_cache_enabled: bool
-        :param support_port_forwarding: bool
-        :param fleet: bool
-
-        :returns: `node_type` compatible string
-        
+:param min_memory_gb: int
+:param gb_per_core: int
+:param min_cores: int
+:param min_gpus: int
+:param local_disk: bool
+:param local_disk_min_size: bool
+:param category: bool
+:param photon_worker_capable: bool
+:param photon_driver_capable: bool
+:param graviton: bool
+:param is_io_cache_enabled: bool
+:param support_port_forwarding: bool
+:param fleet: bool
+
+:returns: `node_type` compatible string
+
 
     .. py:method:: select_spark_version(long_term_support: bool = False, beta: bool = False, latest: bool = True, ml: bool = False, genomics: bool = False, gpu: bool = False, scala: str = 2.12, spark_version: str, photon: bool = False, graviton: bool = False) -> str
 
@@ -932,42 +932,42 @@
 
         Selects the latest Databricks Runtime Version.
 
-        :param long_term_support: bool
-        :param beta: bool
-        :param latest: bool
-        :param ml: bool
-        :param genomics: bool
-        :param gpu: bool
-        :param scala: str
-        :param spark_version: str
-        :param photon: bool
-        :param graviton: bool
+:param long_term_support: bool
+:param beta: bool
+:param latest: bool
+:param ml: bool
+:param genomics: bool
+:param gpu: bool
+:param scala: str
+:param spark_version: str
+:param photon: bool
+:param graviton: bool
+
+:returns: `spark_version` compatible string
 
-        :returns: `spark_version` compatible string
-        
 
     .. py:method:: set_permissions(cluster_id: str [, access_control_list: Optional[List[ClusterAccessControlRequest]]]) -> ClusterPermissions
 
         Set cluster permissions.
-        
-        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-        permissions if none are specified. Objects can inherit permissions from their root object.
-        
-        :param cluster_id: str
-          The cluster for which to get or manage permissions.
-        :param access_control_list: List[:class:`ClusterAccessControlRequest`] (optional)
-        
-        :returns: :class:`ClusterPermissions`
-        
+
+Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+permissions if none are specified. Objects can inherit permissions from their root object.
+
+:param cluster_id: str
+  The cluster for which to get or manage permissions.
+:param access_control_list: List[:class:`ClusterAccessControlRequest`] (optional)
+
+:returns: :class:`ClusterPermissions`
+
 
     .. py:method:: spark_versions() -> GetSparkVersionsResponse
 
         List available Spark versions.
-        
-        Returns the list of available Spark versions. These versions can be used to launch a cluster.
-        
-        :returns: :class:`GetSparkVersionsResponse`
-        
+
+Returns the list of available Spark versions. These versions can be used to launch a cluster.
+
+:returns: :class:`GetSparkVersionsResponse`
+
 
     .. py:method:: start(cluster_id: str) -> Wait[ClusterDetails]
 
@@ -999,21 +999,21 @@
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Start terminated cluster.
-        
-        Starts a terminated Spark cluster with the supplied ID. This works similar to `createCluster` except:
-        
-        * The previous cluster id and attributes are preserved. * The cluster starts with the last specified
-        cluster size. * If the previous cluster was an autoscaling cluster, the current cluster starts with
-        the minimum number of nodes. * If the cluster is not currently in a `TERMINATED` state, nothing will
-        happen. * Clusters launched to run a job cannot be started.
-        
-        :param cluster_id: str
-          The cluster to be started.
-        
-        :returns:
-          Long-running operation waiter for :class:`ClusterDetails`.
-          See :method:wait_get_cluster_running for more details.
-        
+
+Starts a terminated Spark cluster with the supplied ID. This works similar to `createCluster` except:
+
+* The previous cluster id and attributes are preserved. * The cluster starts with the last specified
+cluster size. * If the previous cluster was an autoscaling cluster, the current cluster starts with
+the minimum number of nodes. * If the cluster is not currently in a `TERMINATED` state, nothing will
+happen. * Clusters launched to run a job cannot be started.
+
+:param cluster_id: str
+  The cluster to be started.
+
+:returns:
+  Long-running operation waiter for :class:`ClusterDetails`.
+  See :method:wait_get_cluster_running for more details.
+
 
     .. py:method:: start_and_wait(cluster_id: str, timeout: datetime.timedelta = 0:20:00) -> ClusterDetails
 
@@ -1048,44 +1048,44 @@
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Unpin cluster.
-        
-        Unpinning a cluster will allow the cluster to eventually be removed from the ListClusters API.
-        Unpinning a cluster that is not pinned will have no effect. This API can only be called by workspace
-        admins.
-        
-        :param cluster_id: str
-          
-        
-        
-        
+
+Unpinning a cluster will allow the cluster to eventually be removed from the ListClusters API.
+Unpinning a cluster that is not pinned will have no effect. This API can only be called by workspace
+admins.
+
+:param cluster_id: str
+  
+
+
+
 
     .. py:method:: update(cluster_id: str, update_mask: str [, cluster: Optional[UpdateClusterResource]]) -> Wait[ClusterDetails]
 
         Update cluster configuration (partial).
-        
-        Updates the configuration of a cluster to match the partial set of attributes and size. Denote which
-        fields to update using the `update_mask` field in the request body. A cluster can be updated if it is
-        in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a `RUNNING` state, it will be
-        restarted so that the new attributes can take effect. If a cluster is updated while in a `TERMINATED`
-        state, it will remain `TERMINATED`. The updated attributes will take effect the next time the cluster
-        is started using the `clusters/start` API. Attempts to update a cluster in any other state will be
-        rejected with an `INVALID_STATE` error code. Clusters created by the Databricks Jobs service cannot be
-        updated.
-        
-        :param cluster_id: str
-          ID of the cluster.
-        :param update_mask: str
-          Specifies which fields of the cluster will be updated. This is required in the POST request. The
-          update mask should be supplied as a single string. To specify multiple fields, separate them with
-          commas (no spaces). To delete a field from a cluster configuration, add it to the `update_mask`
-          string but omit it from the `cluster` object.
-        :param cluster: :class:`UpdateClusterResource` (optional)
-          The cluster to be updated.
-        
-        :returns:
-          Long-running operation waiter for :class:`ClusterDetails`.
-          See :method:wait_get_cluster_running for more details.
-        
+
+Updates the configuration of a cluster to match the partial set of attributes and size. Denote which
+fields to update using the `update_mask` field in the request body. A cluster can be updated if it is
+in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a `RUNNING` state, it will be
+restarted so that the new attributes can take effect. If a cluster is updated while in a `TERMINATED`
+state, it will remain `TERMINATED`. The updated attributes will take effect the next time the cluster
+is started using the `clusters/start` API. Attempts to update a cluster in any other state will be
+rejected with an `INVALID_STATE` error code. Clusters created by the Databricks Jobs service cannot be
+updated.
+
+:param cluster_id: str
+  ID of the cluster.
+:param update_mask: str
+  Specifies which fields of the cluster will be updated. This is required in the POST request. The
+  update mask should be supplied as a single string. To specify multiple fields, separate them with
+  commas (no spaces). To delete a field from a cluster configuration, add it to the `update_mask`
+  string but omit it from the `cluster` object.
+:param cluster: :class:`UpdateClusterResource` (optional)
+  The cluster to be updated.
+
+:returns:
+  Long-running operation waiter for :class:`ClusterDetails`.
+  See :method:wait_get_cluster_running for more details.
+
 
     .. py:method:: update_and_wait(cluster_id: str, update_mask: str [, cluster: Optional[UpdateClusterResource], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails
 
@@ -1093,15 +1093,15 @@
     .. py:method:: update_permissions(cluster_id: str [, access_control_list: Optional[List[ClusterAccessControlRequest]]]) -> ClusterPermissions
 
         Update cluster permissions.
-        
-        Updates the permissions on a cluster. Clusters can inherit permissions from their root object.
-        
-        :param cluster_id: str
-          The cluster for which to get or manage permissions.
-        :param access_control_list: List[:class:`ClusterAccessControlRequest`] (optional)
-        
-        :returns: :class:`ClusterPermissions`
-        
+
+Updates the permissions on a cluster. Clusters can inherit permissions from their root object.
+
+:param cluster_id: str
+  The cluster for which to get or manage permissions.
+:param access_control_list: List[:class:`ClusterAccessControlRequest`] (optional)
+
+:returns: :class:`ClusterPermissions`
+
 
     .. py:method:: wait_get_cluster_running(cluster_id: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[ClusterDetails], None]]) -> ClusterDetails
 
diff --git a/docs/workspace/compute/command_execution.rst b/docs/workspace/compute/command_execution.rst
index 916a48ba5..1b6f7e9fb 100644
--- a/docs/workspace/compute/command_execution.rst
+++ b/docs/workspace/compute/command_execution.rst
@@ -5,24 +5,24 @@
 .. py:class:: CommandExecutionAPI
 
     This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters. This API
-    only supports (classic) all-purpose clusters. Serverless compute is not supported.
+only supports (classic) all-purpose clusters. Serverless compute is not supported.
 
     .. py:method:: cancel( [, cluster_id: Optional[str], command_id: Optional[str], context_id: Optional[str]]) -> Wait[CommandStatusResponse]
 
         Cancel a command.
-        
-        Cancels a currently running command within an execution context.
-        
-        The command ID is obtained from a prior successful call to __execute__.
-        
-        :param cluster_id: str (optional)
-        :param command_id: str (optional)
-        :param context_id: str (optional)
-        
-        :returns:
-          Long-running operation waiter for :class:`CommandStatusResponse`.
-          See :method:wait_command_status_command_execution_cancelled for more details.
-        
+
+Cancels a currently running command within an execution context.
+
+The command ID is obtained from a prior successful call to __execute__.
+
+:param cluster_id: str (optional)
+:param command_id: str (optional)
+:param context_id: str (optional)
+
+:returns:
+  Long-running operation waiter for :class:`CommandStatusResponse`.
+  See :method:wait_command_status_command_execution_cancelled for more details.
+
 
     .. py:method:: cancel_and_wait( [, cluster_id: Optional[str], command_id: Optional[str], context_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> CommandStatusResponse
 
@@ -30,29 +30,29 @@
     .. py:method:: command_status(cluster_id: str, context_id: str, command_id: str) -> CommandStatusResponse
 
         Get command info.
-        
-        Gets the status of and, if available, the results from a currently executing command.
-        
-        The command ID is obtained from a prior successful call to __execute__.
-        
-        :param cluster_id: str
-        :param context_id: str
-        :param command_id: str
-        
-        :returns: :class:`CommandStatusResponse`
-        
+
+Gets the status of and, if available, the results from a currently executing command.
+
+The command ID is obtained from a prior successful call to __execute__.
+
+:param cluster_id: str
+:param context_id: str
+:param command_id: str
+
+:returns: :class:`CommandStatusResponse`
+
 
     .. py:method:: context_status(cluster_id: str, context_id: str) -> ContextStatusResponse
 
         Get status.
-        
-        Gets the status for an execution context.
-        
-        :param cluster_id: str
-        :param context_id: str
-        
-        :returns: :class:`ContextStatusResponse`
-        
+
+Gets the status for an execution context.
+
+:param cluster_id: str
+:param context_id: str
+
+:returns: :class:`ContextStatusResponse`
+
 
     .. py:method:: create( [, cluster_id: Optional[str], language: Optional[Language]]) -> Wait[ContextStatusResponse]
 
@@ -76,19 +76,19 @@
             w.command_execution.destroy(cluster_id=cluster_id, context_id=context.id)
 
         Create an execution context.
-        
-        Creates an execution context for running cluster commands.
-        
-        If successful, this method returns the ID of the new execution context.
-        
-        :param cluster_id: str (optional)
-          Running cluster id
-        :param language: :class:`Language` (optional)
-        
-        :returns:
-          Long-running operation waiter for :class:`ContextStatusResponse`.
-          See :method:wait_context_status_command_execution_running for more details.
-        
+
+Creates an execution context for running cluster commands.
+
+If successful, this method returns the ID of the new execution context.
+
+:param cluster_id: str (optional)
+  Running cluster id
+:param language: :class:`Language` (optional)
+
+:returns:
+  Long-running operation waiter for :class:`ContextStatusResponse`.
+  See :method:wait_context_status_command_execution_running for more details.
+
 
     .. py:method:: create_and_wait( [, cluster_id: Optional[str], language: Optional[Language], timeout: datetime.timedelta = 0:20:00]) -> ContextStatusResponse
 
@@ -96,14 +96,14 @@
     .. py:method:: destroy(cluster_id: str, context_id: str)
 
         Delete an execution context.
-        
-        Deletes an execution context.
-        
-        :param cluster_id: str
-        :param context_id: str
-        
-        
-        
+
+Deletes an execution context.
+
+:param cluster_id: str
+:param context_id: str
+
+
+
 
     .. py:method:: execute( [, cluster_id: Optional[str], command: Optional[str], context_id: Optional[str], language: Optional[Language]]) -> Wait[CommandStatusResponse]
 
@@ -132,23 +132,23 @@
             w.command_execution.destroy(cluster_id=cluster_id, context_id=context.id)
 
         Run a command.
-        
-        Runs a cluster command in the given execution context, using the provided language.
-        
-        If successful, it returns an ID for tracking the status of the command's execution.
-        
-        :param cluster_id: str (optional)
-          Running cluster id
-        :param command: str (optional)
-          Executable code
-        :param context_id: str (optional)
-          Running context id
-        :param language: :class:`Language` (optional)
-        
-        :returns:
-          Long-running operation waiter for :class:`CommandStatusResponse`.
-          See :method:wait_command_status_command_execution_finished_or_error for more details.
-        
+
+Runs a cluster command in the given execution context, using the provided language.
+
+If successful, it returns an ID for tracking the status of the command's execution.
+
+:param cluster_id: str (optional)
+  Running cluster id
+:param command: str (optional)
+  Executable code
+:param context_id: str (optional)
+  Running context id
+:param language: :class:`Language` (optional)
+
+:returns:
+  Long-running operation waiter for :class:`CommandStatusResponse`.
+  See :method:wait_command_status_command_execution_finished_or_error for more details.
+
 
     .. py:method:: execute_and_wait( [, cluster_id: Optional[str], command: Optional[str], context_id: Optional[str], language: Optional[Language], timeout: datetime.timedelta = 0:20:00]) -> CommandStatusResponse
 
diff --git a/docs/workspace/compute/global_init_scripts.rst b/docs/workspace/compute/global_init_scripts.rst
index 9d2372a6d..62d5e16c4 100644
--- a/docs/workspace/compute/global_init_scripts.rst
+++ b/docs/workspace/compute/global_init_scripts.rst
@@ -5,12 +5,12 @@
 .. py:class:: GlobalInitScriptsAPI
 
     The Global Init Scripts API enables Workspace administrators to configure global initialization scripts
-    for their workspace. These scripts run on every node in every cluster in the workspace.
-    
-    **Important:** Existing clusters must be restarted to pick up any changes made to global init scripts.
-    Global init scripts are run in order. If the init script returns with a bad exit code, the Apache Spark
-    container fails to launch and init scripts with later position are skipped. If enough containers fail, the
-    entire cluster fails with a `GLOBAL_INIT_SCRIPT_FAILURE` error code.
+for their workspace. These scripts run on every node in every cluster in the workspace.
+
+**Important:** Existing clusters must be restarted to pick up any changes made to global init scripts.
+Global init scripts are run in order. If the init script returns with a bad exit code, the Apache Spark
+container fails to launch and init scripts with later position are skipped. If enough containers fail, the
+entire cluster fails with a `GLOBAL_INIT_SCRIPT_FAILURE` error code.
 
     .. py:method:: create(name: str, script: str [, enabled: Optional[bool], position: Optional[int]]) -> CreateResponse
 
@@ -35,40 +35,40 @@
             w.global_init_scripts.delete(script_id=created.script_id)
 
         Create init script.
-        
-        Creates a new global init script in this workspace.
-        
-        :param name: str
-          The name of the script
-        :param script: str
-          The Base64-encoded content of the script.
-        :param enabled: bool (optional)
-          Specifies whether the script is enabled. The script runs only if enabled.
-        :param position: int (optional)
-          The position of a global init script, where 0 represents the first script to run, 1 is the second
-          script to run, in ascending order.
-          
-          If you omit the numeric position for a new global init script, it defaults to last position. It will
-          run after all current scripts. Setting any value greater than the position of the last script is
-          equivalent to the last position. Example: Take three existing scripts with positions 0, 1, and 2.
-          Any position of (3) or greater puts the script in the last position. If an explicit position value
-          conflicts with an existing script value, your request succeeds, but the original script at that
-          position and all later scripts have their positions incremented by 1.
-        
-        :returns: :class:`CreateResponse`
-        
+
+Creates a new global init script in this workspace.
+
+:param name: str
+  The name of the script
+:param script: str
+  The Base64-encoded content of the script.
+:param enabled: bool (optional)
+  Specifies whether the script is enabled. The script runs only if enabled.
+:param position: int (optional)
+  The position of a global init script, where 0 represents the first script to run, 1 is the second
+  script to run, in ascending order.
+  
+  If you omit the numeric position for a new global init script, it defaults to last position. It will
+  run after all current scripts. Setting any value greater than the position of the last script is
+  equivalent to the last position. Example: Take three existing scripts with positions 0, 1, and 2.
+  Any position of (3) or greater puts the script in the last position. If an explicit position value
+  conflicts with an existing script value, your request succeeds, but the original script at that
+  position and all later scripts have their positions incremented by 1.
+
+:returns: :class:`CreateResponse`
+
 
     .. py:method:: delete(script_id: str)
 
         Delete init script.
-        
-        Deletes a global init script.
-        
-        :param script_id: str
-          The ID of the global init script.
-        
-        
-        
+
+Deletes a global init script.
+
+:param script_id: str
+  The ID of the global init script.
+
+
+
 
     .. py:method:: get(script_id: str) -> GlobalInitScriptDetailsWithContent
 
@@ -95,14 +95,14 @@
             w.global_init_scripts.delete(script_id=created.script_id)
 
         Get an init script.
-        
-        Gets all the details of a script, including its Base64-encoded contents.
-        
-        :param script_id: str
-          The ID of the global init script.
-        
-        :returns: :class:`GlobalInitScriptDetailsWithContent`
-        
+
+Gets all the details of a script, including its Base64-encoded contents.
+
+:param script_id: str
+  The ID of the global init script.
+
+:returns: :class:`GlobalInitScriptDetailsWithContent`
+
 
     .. py:method:: list() -> Iterator[GlobalInitScriptDetails]
 
@@ -118,13 +118,13 @@
             all = w.global_init_scripts.list()
 
         Get init scripts.
-        
-        Get a list of all global init scripts for this workspace. This returns all properties for each script
-        but **not** the script contents. To retrieve the contents of a script, use the [get a global init
-        script](:method:globalinitscripts/get) operation.
-        
-        :returns: Iterator over :class:`GlobalInitScriptDetails`
-        
+
+Get a list of all global init scripts for this workspace. This returns all properties for each script
+but **not** the script contents. To retrieve the contents of a script, use the [get a global init
+script](:method:globalinitscripts/get) operation.
+
+:returns: Iterator over :class:`GlobalInitScriptDetails`
+
 
     .. py:method:: update(script_id: str, name: str, script: str [, enabled: Optional[bool], position: Optional[int]])
 
@@ -153,28 +153,27 @@
             w.global_init_scripts.delete(script_id=created.script_id)
 
         Update init script.
-        
-        Updates a global init script, specifying only the fields to change. All fields are optional.
-        Unspecified fields retain their current value.
-        
-        :param script_id: str
-          The ID of the global init script.
-        :param name: str
-          The name of the script
-        :param script: str
-          The Base64-encoded content of the script.
-        :param enabled: bool (optional)
-          Specifies whether the script is enabled. The script runs only if enabled.
-        :param position: int (optional)
-          The position of a script, where 0 represents the first script to run, 1 is the second script to run,
-          in ascending order. To move the script to run first, set its position to 0.
-          
-          To move the script to the end, set its position to any value greater or equal to the position of the
-          last script. Example, three existing scripts with positions 0, 1, and 2. Any position value of 2 or
-          greater puts the script in the last position (2).
-          
-          If an explicit position value conflicts with an existing script, your request succeeds, but the
-          original script at that position and all later scripts have their positions incremented by 1.
-        
-        
-        
\ No newline at end of file
+
+Updates a global init script, specifying only the fields to change. All fields are optional.
+Unspecified fields retain their current value.
+
+:param script_id: str
+  The ID of the global init script.
+:param name: str
+  The name of the script
+:param script: str
+  The Base64-encoded content of the script.
+:param enabled: bool (optional)
+  Specifies whether the script is enabled. The script runs only if enabled.
+:param position: int (optional)
+  The position of a script, where 0 represents the first script to run, 1 is the second script to run,
+  in ascending order. To move the script to run first, set its position to 0.
+  
+  To move the script to the end, set its position to any value greater or equal to the position of the
+  last script. Example, three existing scripts with positions 0, 1, and 2. Any position value of 2 or
+  greater puts the script in the last position (2).
+  
+  If an explicit position value conflicts with an existing script, your request succeeds, but the
+  original script at that position and all later scripts have their positions incremented by 1.
+
+
diff --git a/docs/workspace/compute/instance_pools.rst b/docs/workspace/compute/instance_pools.rst
index 333c44938..61c55d0e0 100644
--- a/docs/workspace/compute/instance_pools.rst
+++ b/docs/workspace/compute/instance_pools.rst
@@ -5,19 +5,19 @@
 .. py:class:: InstancePoolsAPI
 
     Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud
-    instances which reduces a cluster start and auto-scaling times.
-    
-    Databricks pools reduce cluster start and auto-scaling times by maintaining a set of idle, ready-to-use
-    instances. When a cluster is attached to a pool, cluster nodes are created using the pool’s idle
-    instances. If the pool has no idle instances, the pool expands by allocating a new instance from the
-    instance provider in order to accommodate the cluster’s request. When a cluster releases an instance, it
-    returns to the pool and is free for another cluster to use. Only clusters attached to a pool can use that
-    pool’s idle instances.
-    
-    You can specify a different pool for the driver node and worker nodes, or use the same pool for both.
-    
-    Databricks does not charge DBUs while instances are idle in the pool. Instance provider billing does
-    apply. See pricing.
+instances which reduces a cluster start and auto-scaling times.
+
+Databricks pools reduce cluster start and auto-scaling times by maintaining a set of idle, ready-to-use
+instances. When a cluster is attached to a pool, cluster nodes are created using the pool’s idle
+instances. If the pool has no idle instances, the pool expands by allocating a new instance from the
+instance provider in order to accommodate the cluster’s request. When a cluster releases an instance, it
+returns to the pool and is free for another cluster to use. Only clusters attached to a pool can use that
+pool’s idle instances.
+
+You can specify a different pool for the driver node and worker nodes, or use the same pool for both.
+
+Databricks does not charge DBUs while instances are idle in the pool. Instance provider billing does
+apply. See pricing.
 
     .. py:method:: create(instance_pool_name: str, node_type_id: str [, aws_attributes: Optional[InstancePoolAwsAttributes], azure_attributes: Optional[InstancePoolAzureAttributes], custom_tags: Optional[Dict[str, str]], disk_spec: Optional[DiskSpec], enable_elastic_disk: Optional[bool], gcp_attributes: Optional[InstancePoolGcpAttributes], idle_instance_autotermination_minutes: Optional[int], max_capacity: Optional[int], min_idle_instances: Optional[int], preloaded_docker_images: Optional[List[DockerImage]], preloaded_spark_versions: Optional[List[str]]]) -> CreateInstancePoolResponse
 
@@ -40,70 +40,70 @@
             w.instance_pools.delete(instance_pool_id=created.instance_pool_id)
 
         Create a new instance pool.
-        
-        Creates a new instance pool using idle and ready-to-use cloud instances.
-        
-        :param instance_pool_name: str
-          Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100
-          characters.
-        :param node_type_id: str
-          This field encodes, through a single value, the resources available to each of the Spark nodes in
-          this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
-          intensive workloads. A list of available node types can be retrieved by using the
-          :method:clusters/listNodeTypes API call.
-        :param aws_attributes: :class:`InstancePoolAwsAttributes` (optional)
-          Attributes related to instance pools running on Amazon Web Services. If not specified at pool
-          creation, a set of default values will be used.
-        :param azure_attributes: :class:`InstancePoolAzureAttributes` (optional)
-          Attributes related to instance pools running on Azure. If not specified at pool creation, a set of
-          default values will be used.
-        :param custom_tags: Dict[str,str] (optional)
-          Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and
-          EBS volumes) with these tags in addition to `default_tags`. Notes:
-          
-          - Currently, Databricks allows at most 45 custom tags
-        :param disk_spec: :class:`DiskSpec` (optional)
-          Defines the specification of the disks that will be attached to all spark containers.
-        :param enable_elastic_disk: bool (optional)
-          Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire
-          additional disk space when its Spark workers are running low on disk space. In AWS, this feature
-          requires specific AWS permissions to function correctly - refer to the User Guide for more details.
-        :param gcp_attributes: :class:`InstancePoolGcpAttributes` (optional)
-          Attributes related to instance pools running on Google Cloud Platform. If not specified at pool
-          creation, a set of default values will be used.
-        :param idle_instance_autotermination_minutes: int (optional)
-          Automatically terminates the extra instances in the pool cache after they are inactive for this time
-          in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances
-          will be automatically terminated after a default timeout. If specified, the threshold must be
-          between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle instances
-          from the cache if min cache size could still hold.
-        :param max_capacity: int (optional)
-          Maximum number of outstanding instances to keep in the pool, including both instances used by
-          clusters and idle instances. Clusters that require further instance provisioning will fail during
-          upsize requests.
-        :param min_idle_instances: int (optional)
-          Minimum number of idle instances to keep in the instance pool
-        :param preloaded_docker_images: List[:class:`DockerImage`] (optional)
-          Custom Docker Image BYOC
-        :param preloaded_spark_versions: List[str] (optional)
-          A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters
-          started with the preloaded Spark version will start faster. A list of available Spark versions can
-          be retrieved by using the :method:clusters/sparkVersions API call.
-        
-        :returns: :class:`CreateInstancePoolResponse`
-        
+
+Creates a new instance pool using idle and ready-to-use cloud instances.
+
+:param instance_pool_name: str
+  Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100
+  characters.
+:param node_type_id: str
+  This field encodes, through a single value, the resources available to each of the Spark nodes in
+  this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
+  intensive workloads. A list of available node types can be retrieved by using the
+  :method:clusters/listNodeTypes API call.
+:param aws_attributes: :class:`InstancePoolAwsAttributes` (optional)
+  Attributes related to instance pools running on Amazon Web Services. If not specified at pool
+  creation, a set of default values will be used.
+:param azure_attributes: :class:`InstancePoolAzureAttributes` (optional)
+  Attributes related to instance pools running on Azure. If not specified at pool creation, a set of
+  default values will be used.
+:param custom_tags: Dict[str,str] (optional)
+  Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and
+  EBS volumes) with these tags in addition to `default_tags`. Notes:
+  
+  - Currently, Databricks allows at most 45 custom tags
+:param disk_spec: :class:`DiskSpec` (optional)
+  Defines the specification of the disks that will be attached to all spark containers.
+:param enable_elastic_disk: bool (optional)
+  Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire
+  additional disk space when its Spark workers are running low on disk space. In AWS, this feature
+  requires specific AWS permissions to function correctly - refer to the User Guide for more details.
+:param gcp_attributes: :class:`InstancePoolGcpAttributes` (optional)
+  Attributes related to instance pools running on Google Cloud Platform. If not specified at pool
+  creation, a set of default values will be used.
+:param idle_instance_autotermination_minutes: int (optional)
+  Automatically terminates the extra instances in the pool cache after they are inactive for this time
+  in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances
+  will be automatically terminated after a default timeout. If specified, the threshold must be
+  between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle instances
+  from the cache if min cache size could still hold.
+:param max_capacity: int (optional)
+  Maximum number of outstanding instances to keep in the pool, including both instances used by
+  clusters and idle instances. Clusters that require further instance provisioning will fail during
+  upsize requests.
+:param min_idle_instances: int (optional)
+  Minimum number of idle instances to keep in the instance pool
+:param preloaded_docker_images: List[:class:`DockerImage`] (optional)
+  Custom Docker Image BYOC
+:param preloaded_spark_versions: List[str] (optional)
+  A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters
+  started with the preloaded Spark version will start faster. A list of available Spark versions can
+  be retrieved by using the :method:clusters/sparkVersions API call.
+
+:returns: :class:`CreateInstancePoolResponse`
+
 
     .. py:method:: delete(instance_pool_id: str)
 
         Delete an instance pool.
-        
-        Deletes the instance pool permanently. The idle instances in the pool are terminated asynchronously.
-        
-        :param instance_pool_id: str
-          The instance pool to be terminated.
-        
-        
-        
+
+Deletes the instance pool permanently. The idle instances in the pool are terminated asynchronously.
+
+:param instance_pool_id: str
+  The instance pool to be terminated.
+
+
+
 
     .. py:method:: edit(instance_pool_id: str, instance_pool_name: str, node_type_id: str [, custom_tags: Optional[Dict[str, str]], idle_instance_autotermination_minutes: Optional[int], max_capacity: Optional[int], min_idle_instances: Optional[int]])
 
@@ -130,39 +130,39 @@
             w.instance_pools.delete(instance_pool_id=created.instance_pool_id)
 
         Edit an existing instance pool.
-        
-        Modifies the configuration of an existing instance pool.
-        
-        :param instance_pool_id: str
-          Instance pool ID
-        :param instance_pool_name: str
-          Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100
-          characters.
-        :param node_type_id: str
-          This field encodes, through a single value, the resources available to each of the Spark nodes in
-          this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
-          intensive workloads. A list of available node types can be retrieved by using the
-          :method:clusters/listNodeTypes API call.
-        :param custom_tags: Dict[str,str] (optional)
-          Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and
-          EBS volumes) with these tags in addition to `default_tags`. Notes:
-          
-          - Currently, Databricks allows at most 45 custom tags
-        :param idle_instance_autotermination_minutes: int (optional)
-          Automatically terminates the extra instances in the pool cache after they are inactive for this time
-          in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances
-          will be automatically terminated after a default timeout. If specified, the threshold must be
-          between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle instances
-          from the cache if min cache size could still hold.
-        :param max_capacity: int (optional)
-          Maximum number of outstanding instances to keep in the pool, including both instances used by
-          clusters and idle instances. Clusters that require further instance provisioning will fail during
-          upsize requests.
-        :param min_idle_instances: int (optional)
-          Minimum number of idle instances to keep in the instance pool
-        
-        
-        
+
+Modifies the configuration of an existing instance pool.
+
+:param instance_pool_id: str
+  Instance pool ID
+:param instance_pool_name: str
+  Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100
+  characters.
+:param node_type_id: str
+  This field encodes, through a single value, the resources available to each of the Spark nodes in
+  this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
+  intensive workloads. A list of available node types can be retrieved by using the
+  :method:clusters/listNodeTypes API call.
+:param custom_tags: Dict[str,str] (optional)
+  Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and
+  EBS volumes) with these tags in addition to `default_tags`. Notes:
+  
+  - Currently, Databricks allows at most 45 custom tags
+:param idle_instance_autotermination_minutes: int (optional)
+  Automatically terminates the extra instances in the pool cache after they are inactive for this time
+  in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances
+  will be automatically terminated after a default timeout. If specified, the threshold must be
+  between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle instances
+  from the cache if min cache size could still hold.
+:param max_capacity: int (optional)
+  Maximum number of outstanding instances to keep in the pool, including both instances used by
+  clusters and idle instances. Clusters that require further instance provisioning will fail during
+  upsize requests.
+:param min_idle_instances: int (optional)
+  Minimum number of idle instances to keep in the instance pool
+
+
+
 
     .. py:method:: get(instance_pool_id: str) -> GetInstancePool
 
@@ -187,39 +187,39 @@
             w.instance_pools.delete(instance_pool_id=created.instance_pool_id)
 
         Get instance pool information.
-        
-        Retrieve the information for an instance pool based on its identifier.
-        
-        :param instance_pool_id: str
-          The canonical unique identifier for the instance pool.
-        
-        :returns: :class:`GetInstancePool`
-        
+
+Retrieve the information for an instance pool based on its identifier.
+
+:param instance_pool_id: str
+  The canonical unique identifier for the instance pool.
+
+:returns: :class:`GetInstancePool`
+
 
     .. py:method:: get_permission_levels(instance_pool_id: str) -> GetInstancePoolPermissionLevelsResponse
 
         Get instance pool permission levels.
-        
-        Gets the permission levels that a user can have on an object.
-        
-        :param instance_pool_id: str
-          The instance pool for which to get or manage permissions.
-        
-        :returns: :class:`GetInstancePoolPermissionLevelsResponse`
-        
+
+Gets the permission levels that a user can have on an object.
+
+:param instance_pool_id: str
+  The instance pool for which to get or manage permissions.
+
+:returns: :class:`GetInstancePoolPermissionLevelsResponse`
+
 
     .. py:method:: get_permissions(instance_pool_id: str) -> InstancePoolPermissions
 
         Get instance pool permissions.
-        
-        Gets the permissions of an instance pool. Instance pools can inherit permissions from their root
-        object.
-        
-        :param instance_pool_id: str
-          The instance pool for which to get or manage permissions.
-        
-        :returns: :class:`InstancePoolPermissions`
-        
+
+Gets the permissions of an instance pool. Instance pools can inherit permissions from their root
+object.
+
+:param instance_pool_id: str
+  The instance pool for which to get or manage permissions.
+
+:returns: :class:`InstancePoolPermissions`
+
 
     .. py:method:: list() -> Iterator[InstancePoolAndStats]
 
@@ -235,36 +235,35 @@
             all = w.instance_pools.list()
 
         List instance pool info.
-        
-        Gets a list of instance pools with their statistics.
-        
-        :returns: Iterator over :class:`InstancePoolAndStats`
-        
+
+Gets a list of instance pools with their statistics.
+
+:returns: Iterator over :class:`InstancePoolAndStats`
+
 
     .. py:method:: set_permissions(instance_pool_id: str [, access_control_list: Optional[List[InstancePoolAccessControlRequest]]]) -> InstancePoolPermissions
 
         Set instance pool permissions.
-        
-        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-        permissions if none are specified. Objects can inherit permissions from their root object.
-        
-        :param instance_pool_id: str
-          The instance pool for which to get or manage permissions.
-        :param access_control_list: List[:class:`InstancePoolAccessControlRequest`] (optional)
-        
-        :returns: :class:`InstancePoolPermissions`
-        
+
+Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+permissions if none are specified. Objects can inherit permissions from their root object.
+
+:param instance_pool_id: str
+  The instance pool for which to get or manage permissions.
+:param access_control_list: List[:class:`InstancePoolAccessControlRequest`] (optional)
+
+:returns: :class:`InstancePoolPermissions`
+
 
     .. py:method:: update_permissions(instance_pool_id: str [, access_control_list: Optional[List[InstancePoolAccessControlRequest]]]) -> InstancePoolPermissions
 
         Update instance pool permissions.
-        
-        Updates the permissions on an instance pool. Instance pools can inherit permissions from their root
-        object.
-        
-        :param instance_pool_id: str
-          The instance pool for which to get or manage permissions.
-        :param access_control_list: List[:class:`InstancePoolAccessControlRequest`] (optional)
-        
-        :returns: :class:`InstancePoolPermissions`
-        
\ No newline at end of file
+
+Updates the permissions on an instance pool. Instance pools can inherit permissions from their root
+object.
+
+:param instance_pool_id: str
+  The instance pool for which to get or manage permissions.
+:param access_control_list: List[:class:`InstancePoolAccessControlRequest`] (optional)
+
+:returns: :class:`InstancePoolPermissions`
diff --git a/docs/workspace/compute/instance_profiles.rst b/docs/workspace/compute/instance_profiles.rst
index a7a25f869..4b863deb8 100644
--- a/docs/workspace/compute/instance_profiles.rst
+++ b/docs/workspace/compute/instance_profiles.rst
@@ -5,10 +5,10 @@
 .. py:class:: InstanceProfilesAPI
 
     The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch
-    clusters with. Regular users can list the instance profiles available to them. See [Secure access to S3
-    buckets] using instance profiles for more information.
-    
-    [Secure access to S3 buckets]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/instance-profiles.html
+clusters with. Regular users can list the instance profiles available to them. See [Secure access to S3
+buckets] using instance profiles for more information.
+
+[Secure access to S3 buckets]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/instance-profiles.html
 
     .. py:method:: add(instance_profile_arn: str [, iam_role_arn: Optional[str], is_meta_instance_profile: Optional[bool], skip_validation: Optional[bool]])
 
@@ -28,34 +28,34 @@
                                     iam_role_arn="arn:aws:iam::000000000000:role/bcd")
 
         Register an instance profile.
-        
-        In the UI, you can select the instance profile when launching clusters. This API is only available to
-        admin users.
-        
-        :param instance_profile_arn: str
-          The AWS ARN of the instance profile to register with Databricks. This field is required.
-        :param iam_role_arn: str (optional)
-          The AWS IAM role ARN of the role associated with the instance profile. This field is required if
-          your role name and instance profile name do not match and you want to use the instance profile with
-          [Databricks SQL Serverless].
-          
-          Otherwise, this field is optional.
-          
-          [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html
-        :param is_meta_instance_profile: bool (optional)
-          Boolean flag indicating whether the instance profile should only be used in credential passthrough
-          scenarios. If true, it means the instance profile contains an meta IAM role which could assume a
-          wide range of roles. Therefore it should always be used with authorization. This field is optional,
-          the default value is `false`.
-        :param skip_validation: bool (optional)
-          By default, Databricks validates that it has sufficient permissions to launch instances with the
-          instance profile. This validation uses AWS dry-run mode for the RunInstances API. If validation
-          fails with an error message that does not indicate an IAM related permission issue, (e.g. “Your
-          requested instance type is not supported in your requested availability zone”), you can pass this
-          flag to skip the validation and forcibly add the instance profile.
-        
-        
-        
+
+In the UI, you can select the instance profile when launching clusters. This API is only available to
+admin users.
+
+:param instance_profile_arn: str
+  The AWS ARN of the instance profile to register with Databricks. This field is required.
+:param iam_role_arn: str (optional)
+  The AWS IAM role ARN of the role associated with the instance profile. This field is required if
+  your role name and instance profile name do not match and you want to use the instance profile with
+  [Databricks SQL Serverless].
+  
+  Otherwise, this field is optional.
+  
+  [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html
+:param is_meta_instance_profile: bool (optional)
+  Boolean flag indicating whether the instance profile should only be used in credential passthrough
+  scenarios. If true, it means the instance profile contains an meta IAM role which could assume a
+  wide range of roles. Therefore it should always be used with authorization. This field is optional,
+  the default value is `false`.
+:param skip_validation: bool (optional)
+  By default, Databricks validates that it has sufficient permissions to launch instances with the
+  instance profile. This validation uses AWS dry-run mode for the RunInstances API. If validation
+  fails with an error message that does not indicate an IAM related permission issue, (e.g. “Your
+  requested instance type is not supported in your requested availability zone”), you can pass this
+  flag to skip the validation and forcibly add the instance profile.
+
+
+
 
     .. py:method:: edit(instance_profile_arn: str [, iam_role_arn: Optional[str], is_meta_instance_profile: Optional[bool]])
 
@@ -73,38 +73,38 @@
             w.instance_profiles.edit(instance_profile_arn=arn, iam_role_arn="arn:aws:iam::000000000000:role/bcdf")
 
         Edit an instance profile.
-        
-        The only supported field to change is the optional IAM role ARN associated with the instance profile.
-        It is required to specify the IAM role ARN if both of the following are true:
-        
-        * Your role name and instance profile name do not match. The name is the part after the last slash in
-        each ARN. * You want to use the instance profile with [Databricks SQL Serverless].
-        
-        To understand where these fields are in the AWS console, see [Enable serverless SQL warehouses].
-        
-        This API is only available to admin users.
-        
-        [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html
-        [Enable serverless SQL warehouses]: https://docs.databricks.com/sql/admin/serverless.html
-        
-        :param instance_profile_arn: str
-          The AWS ARN of the instance profile to register with Databricks. This field is required.
-        :param iam_role_arn: str (optional)
-          The AWS IAM role ARN of the role associated with the instance profile. This field is required if
-          your role name and instance profile name do not match and you want to use the instance profile with
-          [Databricks SQL Serverless].
-          
-          Otherwise, this field is optional.
-          
-          [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html
-        :param is_meta_instance_profile: bool (optional)
-          Boolean flag indicating whether the instance profile should only be used in credential passthrough
-          scenarios. If true, it means the instance profile contains an meta IAM role which could assume a
-          wide range of roles. Therefore it should always be used with authorization. This field is optional,
-          the default value is `false`.
-        
-        
-        
+
+The only supported field to change is the optional IAM role ARN associated with the instance profile.
+It is required to specify the IAM role ARN if both of the following are true:
+
+* Your role name and instance profile name do not match. The name is the part after the last slash in
+each ARN. * You want to use the instance profile with [Databricks SQL Serverless].
+
+To understand where these fields are in the AWS console, see [Enable serverless SQL warehouses].
+
+This API is only available to admin users.
+
+[Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html
+[Enable serverless SQL warehouses]: https://docs.databricks.com/sql/admin/serverless.html
+
+:param instance_profile_arn: str
+  The AWS ARN of the instance profile to register with Databricks. This field is required.
+:param iam_role_arn: str (optional)
+  The AWS IAM role ARN of the role associated with the instance profile. This field is required if
+  your role name and instance profile name do not match and you want to use the instance profile with
+  [Databricks SQL Serverless].
+  
+  Otherwise, this field is optional.
+  
+  [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html
+:param is_meta_instance_profile: bool (optional)
+  Boolean flag indicating whether the instance profile should only be used in credential passthrough
+  scenarios. If true, it means the instance profile contains an meta IAM role which could assume a
+  wide range of roles. Therefore it should always be used with authorization. This field is optional,
+  the default value is `false`.
+
+
+
 
     .. py:method:: list() -> Iterator[InstanceProfile]
 
@@ -120,25 +120,24 @@
             all = w.instance_profiles.list()
 
         List available instance profiles.
-        
-        List the instance profiles that the calling user can use to launch a cluster.
-        
-        This API is available to all users.
-        
-        :returns: Iterator over :class:`InstanceProfile`
-        
+
+List the instance profiles that the calling user can use to launch a cluster.
+
+This API is available to all users.
+
+:returns: Iterator over :class:`InstanceProfile`
+
 
     .. py:method:: remove(instance_profile_arn: str)
 
         Remove the instance profile.
-        
-        Remove the instance profile with the provided ARN. Existing clusters with this instance profile will
-        continue to function.
-        
-        This API is only accessible to admin users.
-        
-        :param instance_profile_arn: str
-          The ARN of the instance profile to remove. This field is required.
-        
-        
-        
\ No newline at end of file
+
+Remove the instance profile with the provided ARN. Existing clusters with this instance profile will
+continue to function.
+
+This API is only accessible to admin users.
+
+:param instance_profile_arn: str
+  The ARN of the instance profile to remove. This field is required.
+
+
diff --git a/docs/workspace/compute/libraries.rst b/docs/workspace/compute/libraries.rst
index 64f688fdc..305039e26 100644
--- a/docs/workspace/compute/libraries.rst
+++ b/docs/workspace/compute/libraries.rst
@@ -5,71 +5,70 @@
 .. py:class:: LibrariesAPI
 
     The Libraries API allows you to install and uninstall libraries and get the status of libraries on a
-    cluster.
-    
-    To make third-party or custom code available to notebooks and jobs running on your clusters, you can
-    install a library. Libraries can be written in Python, Java, Scala, and R. You can upload Python, Java,
-    Scala and R libraries and point to external packages in PyPI, Maven, and CRAN repositories.
-    
-    Cluster libraries can be used by all notebooks running on a cluster. You can install a cluster library
-    directly from a public repository such as PyPI or Maven, using a previously installed workspace library,
-    or using an init script.
-    
-    When you uninstall a library from a cluster, the library is removed only when you restart the cluster.
-    Until you restart the cluster, the status of the uninstalled library appears as Uninstall pending restart.
+cluster.
+
+To make third-party or custom code available to notebooks and jobs running on your clusters, you can
+install a library. Libraries can be written in Python, Java, Scala, and R. You can upload Python, Java,
+Scala and R libraries and point to external packages in PyPI, Maven, and CRAN repositories.
+
+Cluster libraries can be used by all notebooks running on a cluster. You can install a cluster library
+directly from a public repository such as PyPI or Maven, using a previously installed workspace library,
+or using an init script.
+
+When you uninstall a library from a cluster, the library is removed only when you restart the cluster.
+Until you restart the cluster, the status of the uninstalled library appears as Uninstall pending restart.
 
     .. py:method:: all_cluster_statuses() -> Iterator[ClusterLibraryStatuses]
 
         Get all statuses.
-        
-        Get the status of all libraries on all clusters. A status is returned for all libraries installed on
-        this cluster via the API or the libraries UI.
-        
-        :returns: Iterator over :class:`ClusterLibraryStatuses`
-        
+
+Get the status of all libraries on all clusters. A status is returned for all libraries installed on
+this cluster via the API or the libraries UI.
+
+:returns: Iterator over :class:`ClusterLibraryStatuses`
+
 
     .. py:method:: cluster_status(cluster_id: str) -> Iterator[LibraryFullStatus]
 
         Get status.
-        
-        Get the status of libraries on a cluster. A status is returned for all libraries installed on this
-        cluster via the API or the libraries UI. The order of returned libraries is as follows: 1. Libraries
-        set to be installed on this cluster, in the order that the libraries were added to the cluster, are
-        returned first. 2. Libraries that were previously requested to be installed on this cluster or, but
-        are now marked for removal, in no particular order, are returned last.
-        
-        :param cluster_id: str
-          Unique identifier of the cluster whose status should be retrieved.
-        
-        :returns: Iterator over :class:`LibraryFullStatus`
-        
+
+Get the status of libraries on a cluster. A status is returned for all libraries installed on this
+cluster via the API or the libraries UI. The order of returned libraries is as follows: 1. Libraries
+set to be installed on this cluster, in the order that the libraries were added to the cluster, are
+returned first. 2. Libraries that were previously requested to be installed on this cluster or, but
+are now marked for removal, in no particular order, are returned last.
+
+:param cluster_id: str
+  Unique identifier of the cluster whose status should be retrieved.
+
+:returns: Iterator over :class:`LibraryFullStatus`
+
 
     .. py:method:: install(cluster_id: str, libraries: List[Library])
 
         Add a library.
-        
-        Add libraries to install on a cluster. The installation is asynchronous; it happens in the background
-        after the completion of this request.
-        
-        :param cluster_id: str
-          Unique identifier for the cluster on which to install these libraries.
-        :param libraries: List[:class:`Library`]
-          The libraries to install.
-        
-        
-        
+
+Add libraries to install on a cluster. The installation is asynchronous; it happens in the background
+after the completion of this request.
+
+:param cluster_id: str
+  Unique identifier for the cluster on which to install these libraries.
+:param libraries: List[:class:`Library`]
+  The libraries to install.
+
+
+
 
     .. py:method:: uninstall(cluster_id: str, libraries: List[Library])
 
         Uninstall libraries.
-        
-        Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster is
-        restarted. A request to uninstall a library that is not currently installed is ignored.
-        
-        :param cluster_id: str
-          Unique identifier for the cluster on which to uninstall these libraries.
-        :param libraries: List[:class:`Library`]
-          The libraries to uninstall.
-        
-        
-        
\ No newline at end of file
+
+Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster is
+restarted. A request to uninstall a library that is not currently installed is ignored.
+
+:param cluster_id: str
+  Unique identifier for the cluster on which to uninstall these libraries.
+:param libraries: List[:class:`Library`]
+  The libraries to uninstall.
+
+
diff --git a/docs/workspace/compute/policy_compliance_for_clusters.rst b/docs/workspace/compute/policy_compliance_for_clusters.rst
index 90c3aeb98..b22207cd3 100644
--- a/docs/workspace/compute/policy_compliance_for_clusters.rst
+++ b/docs/workspace/compute/policy_compliance_for_clusters.rst
@@ -5,67 +5,66 @@
 .. py:class:: PolicyComplianceForClustersAPI
 
     The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your
-    workspace.
-    
-    A cluster is compliant with its policy if its configuration satisfies all its policy rules. Clusters could
-    be out of compliance if their policy was updated after the cluster was last edited.
-    
-    The get and list compliance APIs allow you to view the policy compliance status of a cluster. The enforce
-    compliance API allows you to update a cluster to be compliant with the current version of its policy.
+workspace.
+
+A cluster is compliant with its policy if its configuration satisfies all its policy rules. Clusters could
+be out of compliance if their policy was updated after the cluster was last edited.
+
+The get and list compliance APIs allow you to view the policy compliance status of a cluster. The enforce
+compliance API allows you to update a cluster to be compliant with the current version of its policy.
 
     .. py:method:: enforce_compliance(cluster_id: str [, validate_only: Optional[bool]]) -> EnforceClusterComplianceResponse
 
         Enforce cluster policy compliance.
-        
-        Updates a cluster to be compliant with the current version of its policy. A cluster can be updated if
-        it is in a `RUNNING` or `TERMINATED` state.
-        
-        If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes
-        can take effect.
-        
-        If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time the
-        cluster is started, the new attributes will take effect.
-        
-        Clusters created by the Databricks Jobs, DLT, or Models services cannot be enforced by this API.
-        Instead, use the "Enforce job policy compliance" API to enforce policy compliance on jobs.
-        
-        :param cluster_id: str
-          The ID of the cluster you want to enforce policy compliance on.
-        :param validate_only: bool (optional)
-          If set, previews the changes that would be made to a cluster to enforce compliance but does not
-          update the cluster.
-        
-        :returns: :class:`EnforceClusterComplianceResponse`
-        
+
+Updates a cluster to be compliant with the current version of its policy. A cluster can be updated if
+it is in a `RUNNING` or `TERMINATED` state.
+
+If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes
+can take effect.
+
+If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time the
+cluster is started, the new attributes will take effect.
+
+Clusters created by the Databricks Jobs, DLT, or Models services cannot be enforced by this API.
+Instead, use the "Enforce job policy compliance" API to enforce policy compliance on jobs.
+
+:param cluster_id: str
+  The ID of the cluster you want to enforce policy compliance on.
+:param validate_only: bool (optional)
+  If set, previews the changes that would be made to a cluster to enforce compliance but does not
+  update the cluster.
+
+:returns: :class:`EnforceClusterComplianceResponse`
+
 
     .. py:method:: get_compliance(cluster_id: str) -> GetClusterComplianceResponse
 
         Get cluster policy compliance.
-        
-        Returns the policy compliance status of a cluster. Clusters could be out of compliance if their policy
-        was updated after the cluster was last edited.
-        
-        :param cluster_id: str
-          The ID of the cluster to get the compliance status
-        
-        :returns: :class:`GetClusterComplianceResponse`
-        
+
+Returns the policy compliance status of a cluster. Clusters could be out of compliance if their policy
+was updated after the cluster was last edited.
+
+:param cluster_id: str
+  The ID of the cluster to get the compliance status
+
+:returns: :class:`GetClusterComplianceResponse`
+
 
     .. py:method:: list_compliance(policy_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ClusterCompliance]
 
         List cluster policy compliance.
-        
-        Returns the policy compliance status of all clusters that use a given policy. Clusters could be out of
-        compliance if their policy was updated after the cluster was last edited.
-        
-        :param policy_id: str
-          Canonical unique identifier for the cluster policy.
-        :param page_size: int (optional)
-          Use this field to specify the maximum number of results to be returned by the server. The server may
-          further constrain the maximum number of results returned in a single page.
-        :param page_token: str (optional)
-          A page token that can be used to navigate to the next page or previous page as returned by
-          `next_page_token` or `prev_page_token`.
-        
-        :returns: Iterator over :class:`ClusterCompliance`
-        
\ No newline at end of file
+
+Returns the policy compliance status of all clusters that use a given policy. Clusters could be out of
+compliance if their policy was updated after the cluster was last edited.
+
+:param policy_id: str
+  Canonical unique identifier for the cluster policy.
+:param page_size: int (optional)
+  Use this field to specify the maximum number of results to be returned by the server. The server may
+  further constrain the maximum number of results returned in a single page.
+:param page_token: str (optional)
+  A page token that can be used to navigate to the next page or previous page as returned by
+  `next_page_token` or `prev_page_token`.
+
+:returns: Iterator over :class:`ClusterCompliance`
diff --git a/docs/workspace/compute/policy_families.rst b/docs/workspace/compute/policy_families.rst
index 56e4f4275..ad8061a91 100644
--- a/docs/workspace/compute/policy_families.rst
+++ b/docs/workspace/compute/policy_families.rst
@@ -5,14 +5,14 @@
 .. py:class:: PolicyFamiliesAPI
 
     View available policy families. A policy family contains a policy definition providing best practices for
-    configuring clusters for a particular use case.
-    
-    Databricks manages and provides policy families for several common cluster use cases. You cannot create,
-    edit, or delete policy families.
-    
-    Policy families cannot be used directly to create clusters. Instead, you create cluster policies using a
-    policy family. Cluster policies created using a policy family inherit the policy family's policy
-    definition.
+configuring clusters for a particular use case.
+
+Databricks manages and provides policy families for several common cluster use cases. You cannot create,
+edit, or delete policy families.
+
+Policy families cannot be used directly to create clusters. Instead, you create cluster policies using a
+policy family. Cluster policies created using a policy family inherit the policy family's policy
+definition.
 
     .. py:method:: get(policy_family_id: str [, version: Optional[int]]) -> PolicyFamily
 
@@ -31,16 +31,16 @@
             first_family = w.policy_families.get(policy_family_id=all[0].policy_family_id)
 
         Get policy family information.
-        
-        Retrieve the information for an policy family based on its identifier and version
-        
-        :param policy_family_id: str
-          The family ID about which to retrieve information.
-        :param version: int (optional)
-          The version number for the family to fetch. Defaults to the latest version.
-        
-        :returns: :class:`PolicyFamily`
-        
+
+Retrieve the information for an policy family based on its identifier and version
+
+:param policy_family_id: str
+  The family ID about which to retrieve information.
+:param version: int (optional)
+  The version number for the family to fetch. Defaults to the latest version.
+
+:returns: :class:`PolicyFamily`
+
 
     .. py:method:: list( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[PolicyFamily]
 
@@ -57,14 +57,13 @@
             all = w.policy_families.list(compute.ListPolicyFamiliesRequest())
 
         List policy families.
-        
-        Returns the list of policy definition types available to use at their latest version. This API is
-        paginated.
-        
-        :param max_results: int (optional)
-          Maximum number of policy families to return.
-        :param page_token: str (optional)
-          A token that can be used to get the next page of results.
-        
-        :returns: Iterator over :class:`PolicyFamily`
-        
\ No newline at end of file
+
+Returns the list of policy definition types available to use at their latest version. This API is
+paginated.
+
+:param max_results: int (optional)
+  Maximum number of policy families to return.
+:param page_token: str (optional)
+  A token that can be used to get the next page of results.
+
+:returns: Iterator over :class:`PolicyFamily`
diff --git a/docs/workspace/dashboards/genie.rst b/docs/workspace/dashboards/genie.rst
index 5581870b9..aed179d96 100644
--- a/docs/workspace/dashboards/genie.rst
+++ b/docs/workspace/dashboards/genie.rst
@@ -5,28 +5,28 @@
 .. py:class:: GenieAPI
 
     Genie provides a no-code experience for business users, powered by AI/BI. Analysts set up spaces that
-    business users can use to ask questions using natural language. Genie uses data registered to Unity
-    Catalog and requires at least CAN USE permission on a Pro or Serverless SQL warehouse. Also, Databricks
-    Assistant must be enabled.
+business users can use to ask questions using natural language. Genie uses data registered to Unity
+Catalog and requires at least CAN USE permission on a Pro or Serverless SQL warehouse. Also, Databricks
+Assistant must be enabled.
 
     .. py:method:: create_message(space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage]
 
         Create conversation message.
-        
-        Create new message in [conversation](:method:genie/startconversation). The AI response uses all
-        previously created messages in the conversation to respond.
-        
-        :param space_id: str
-          The ID associated with the Genie space where the conversation is started.
-        :param conversation_id: str
-          The ID associated with the conversation.
-        :param content: str
-          User message content.
-        
-        :returns:
-          Long-running operation waiter for :class:`GenieMessage`.
-          See :method:wait_get_message_genie_completed for more details.
-        
+
+Create new message in [conversation](:method:genie/startconversation). The AI response uses all
+previously created messages in the conversation to respond.
+
+:param space_id: str
+  The ID associated with the Genie space where the conversation is started.
+:param conversation_id: str
+  The ID associated with the conversation.
+:param content: str
+  User message content.
+
+:returns:
+  Long-running operation waiter for :class:`GenieMessage`.
+  See :method:wait_get_message_genie_completed for more details.
+
 
     .. py:method:: create_message_and_wait(space_id: str, conversation_id: str, content: str, timeout: datetime.timedelta = 0:20:00) -> GenieMessage
 
@@ -34,67 +34,86 @@
     .. py:method:: execute_message_query(space_id: str, conversation_id: str, message_id: str) -> GenieGetMessageQueryResultResponse
 
         Execute SQL query in a conversation message.
-        
-        Execute the SQL query in the message.
-        
-        :param space_id: str
-          Genie space ID
-        :param conversation_id: str
-          Conversation ID
-        :param message_id: str
-          Message ID
-        
-        :returns: :class:`GenieGetMessageQueryResultResponse`
-        
+
+Execute the SQL query in the message.
+
+:param space_id: str
+  Genie space ID
+:param conversation_id: str
+  Conversation ID
+:param message_id: str
+  Message ID
+
+:returns: :class:`GenieGetMessageQueryResultResponse`
+
 
     .. py:method:: get_message(space_id: str, conversation_id: str, message_id: str) -> GenieMessage
 
         Get conversation message.
-        
-        Get message from conversation.
-        
-        :param space_id: str
-          The ID associated with the Genie space where the target conversation is located.
-        :param conversation_id: str
-          The ID associated with the target conversation.
-        :param message_id: str
-          The ID associated with the target message from the identified conversation.
-        
-        :returns: :class:`GenieMessage`
-        
+
+Get message from conversation.
+
+:param space_id: str
+  The ID associated with the Genie space where the target conversation is located.
+:param conversation_id: str
+  The ID associated with the target conversation.
+:param message_id: str
+  The ID associated with the target message from the identified conversation.
+
+:returns: :class:`GenieMessage`
+
 
     .. py:method:: get_message_query_result(space_id: str, conversation_id: str, message_id: str) -> GenieGetMessageQueryResultResponse
 
         Get conversation message SQL query result.
-        
-        Get the result of SQL query if the message has a query attachment. This is only available if a message
-        has a query attachment and the message status is `EXECUTING_QUERY`.
-        
-        :param space_id: str
-          Genie space ID
-        :param conversation_id: str
-          Conversation ID
-        :param message_id: str
-          Message ID
-        
-        :returns: :class:`GenieGetMessageQueryResultResponse`
-        
+
+Get the result of SQL query if the message has a query attachment. This is only available if a message
+has a query attachment and the message status is `EXECUTING_QUERY`.
+
+:param space_id: str
+  Genie space ID
+:param conversation_id: str
+  Conversation ID
+:param message_id: str
+  Message ID
+
+:returns: :class:`GenieGetMessageQueryResultResponse`
+
+
+    .. py:method:: get_message_query_result_by_attachment(space_id: str, conversation_id: str, message_id: str, attachment_id: str) -> GenieGetMessageQueryResultResponse
+
+        Get conversation message SQL query result by attachment id.
+
+Get the result of SQL query by attachment id This is only available if a message has a query
+attachment and the message status is `EXECUTING_QUERY`.
+
+:param space_id: str
+  Genie space ID
+:param conversation_id: str
+  Conversation ID
+:param message_id: str
+  Message ID
+:param attachment_id: str
+  Attachment ID
+
+:returns: :class:`GenieGetMessageQueryResultResponse`
+
 
     .. py:method:: start_conversation(space_id: str, content: str) -> Wait[GenieMessage]
 
         Start conversation.
-        
-        Start a new conversation.
-        
-        :param space_id: str
-          The ID associated with the Genie space where you want to start a conversation.
-        :param content: str
-          The text of the message that starts the conversation.
-        
-        :returns:
-          Long-running operation waiter for :class:`GenieMessage`.
-          See :method:wait_get_message_genie_completed for more details.
-        
+
+Start a new conversation.
+
+:param space_id: str
+  The ID associated with the Genie space where you want to start a conversation.
+:param content: str
+  The text of the message that starts the conversation.
+
+:returns:
+  Long-running operation waiter for :class:`GenieMessage`.
+  See :method:wait_get_message_genie_completed for more details.
+
 
     .. py:method:: start_conversation_and_wait(space_id: str, content: str, timeout: datetime.timedelta = 0:20:00) -> GenieMessage
 
diff --git a/docs/workspace/dashboards/lakeview.rst b/docs/workspace/dashboards/lakeview.rst
index c37479dcb..b8c64f15d 100644
--- a/docs/workspace/dashboards/lakeview.rst
+++ b/docs/workspace/dashboards/lakeview.rst
@@ -5,257 +5,256 @@
 .. py:class:: LakeviewAPI
 
     These APIs provide specific management operations for Lakeview dashboards. Generic resource management can
-    be done with Workspace API (import, export, get-status, list, delete).
+be done with Workspace API (import, export, get-status, list, delete).
 
     .. py:method:: create( [, dashboard: Optional[Dashboard]]) -> Dashboard
 
         Create dashboard.
-        
-        Create a draft dashboard.
-        
-        :param dashboard: :class:`Dashboard` (optional)
-        
-        :returns: :class:`Dashboard`
-        
+
+Create a draft dashboard.
+
+:param dashboard: :class:`Dashboard` (optional)
+
+:returns: :class:`Dashboard`
+
 
     .. py:method:: create_schedule(dashboard_id: str [, schedule: Optional[Schedule]]) -> Schedule
 
         Create dashboard schedule.
-        
-        :param dashboard_id: str
-          UUID identifying the dashboard to which the schedule belongs.
-        :param schedule: :class:`Schedule` (optional)
-        
-        :returns: :class:`Schedule`
-        
+
+:param dashboard_id: str
+  UUID identifying the dashboard to which the schedule belongs.
+:param schedule: :class:`Schedule` (optional)
+
+:returns: :class:`Schedule`
+
 
     .. py:method:: create_subscription(dashboard_id: str, schedule_id: str [, subscription: Optional[Subscription]]) -> Subscription
 
         Create schedule subscription.
-        
-        :param dashboard_id: str
-          UUID identifying the dashboard to which the subscription belongs.
-        :param schedule_id: str
-          UUID identifying the schedule to which the subscription belongs.
-        :param subscription: :class:`Subscription` (optional)
-        
-        :returns: :class:`Subscription`
-        
+
+:param dashboard_id: str
+  UUID identifying the dashboard to which the subscription belongs.
+:param schedule_id: str
+  UUID identifying the schedule to which the subscription belongs.
+:param subscription: :class:`Subscription` (optional)
+
+:returns: :class:`Subscription`
+
 
     .. py:method:: delete_schedule(dashboard_id: str, schedule_id: str [, etag: Optional[str]])
 
         Delete dashboard schedule.
-        
-        :param dashboard_id: str
-          UUID identifying the dashboard to which the schedule belongs.
-        :param schedule_id: str
-          UUID identifying the schedule.
-        :param etag: str (optional)
-          The etag for the schedule. Optionally, it can be provided to verify that the schedule has not been
-          modified from its last retrieval.
-        
-        
-        
+
+:param dashboard_id: str
+  UUID identifying the dashboard to which the schedule belongs.
+:param schedule_id: str
+  UUID identifying the schedule.
+:param etag: str (optional)
+  The etag for the schedule. Optionally, it can be provided to verify that the schedule has not been
+  modified from its last retrieval.
+
+
+
 
     .. py:method:: delete_subscription(dashboard_id: str, schedule_id: str, subscription_id: str [, etag: Optional[str]])
 
         Delete schedule subscription.
-        
-        :param dashboard_id: str
-          UUID identifying the dashboard which the subscription belongs.
-        :param schedule_id: str
-          UUID identifying the schedule which the subscription belongs.
-        :param subscription_id: str
-          UUID identifying the subscription.
-        :param etag: str (optional)
-          The etag for the subscription. Can be optionally provided to ensure that the subscription has not
-          been modified since the last read.
-        
-        
-        
+
+:param dashboard_id: str
+  UUID identifying the dashboard which the subscription belongs.
+:param schedule_id: str
+  UUID identifying the schedule which the subscription belongs.
+:param subscription_id: str
+  UUID identifying the subscription.
+:param etag: str (optional)
+  The etag for the subscription. Can be optionally provided to ensure that the subscription has not
+  been modified since the last read.
+
+
+
 
     .. py:method:: get(dashboard_id: str) -> Dashboard
 
         Get dashboard.
-        
-        Get a draft dashboard.
-        
-        :param dashboard_id: str
-          UUID identifying the dashboard.
-        
-        :returns: :class:`Dashboard`
-        
+
+Get a draft dashboard.
+
+:param dashboard_id: str
+  UUID identifying the dashboard.
+
+:returns: :class:`Dashboard`
+
 
     .. py:method:: get_published(dashboard_id: str) -> PublishedDashboard
 
         Get published dashboard.
-        
-        Get the current published dashboard.
-        
-        :param dashboard_id: str
-          UUID identifying the published dashboard.
-        
-        :returns: :class:`PublishedDashboard`
-        
+
+Get the current published dashboard.
+
+:param dashboard_id: str
+  UUID identifying the published dashboard.
+
+:returns: :class:`PublishedDashboard`
+
 
     .. py:method:: get_schedule(dashboard_id: str, schedule_id: str) -> Schedule
 
         Get dashboard schedule.
-        
-        :param dashboard_id: str
-          UUID identifying the dashboard to which the schedule belongs.
-        :param schedule_id: str
-          UUID identifying the schedule.
-        
-        :returns: :class:`Schedule`
-        
+
+:param dashboard_id: str
+  UUID identifying the dashboard to which the schedule belongs.
+:param schedule_id: str
+  UUID identifying the schedule.
+
+:returns: :class:`Schedule`
+
 
     .. py:method:: get_subscription(dashboard_id: str, schedule_id: str, subscription_id: str) -> Subscription
 
         Get schedule subscription.
-        
-        :param dashboard_id: str
-          UUID identifying the dashboard which the subscription belongs.
-        :param schedule_id: str
-          UUID identifying the schedule which the subscription belongs.
-        :param subscription_id: str
-          UUID identifying the subscription.
-        
-        :returns: :class:`Subscription`
-        
+
+:param dashboard_id: str
+  UUID identifying the dashboard which the subscription belongs.
+:param schedule_id: str
+  UUID identifying the schedule which the subscription belongs.
+:param subscription_id: str
+  UUID identifying the subscription.
+
+:returns: :class:`Subscription`
+
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str], show_trashed: Optional[bool], view: Optional[DashboardView]]) -> Iterator[Dashboard]
 
         List dashboards.
-        
-        :param page_size: int (optional)
-          The number of dashboards to return per page.
-        :param page_token: str (optional)
-          A page token, received from a previous `ListDashboards` call. This token can be used to retrieve the
-          subsequent page.
-        :param show_trashed: bool (optional)
-          The flag to include dashboards located in the trash. If unspecified, only active dashboards will be
-          returned.
-        :param view: :class:`DashboardView` (optional)
-          `DASHBOARD_VIEW_BASIC`only includes summary metadata from the dashboard.
-        
-        :returns: Iterator over :class:`Dashboard`
-        
+
+:param page_size: int (optional)
+  The number of dashboards to return per page.
+:param page_token: str (optional)
+  A page token, received from a previous `ListDashboards` call. This token can be used to retrieve the
+  subsequent page.
+:param show_trashed: bool (optional)
+  The flag to include dashboards located in the trash. If unspecified, only active dashboards will be
+  returned.
+:param view: :class:`DashboardView` (optional)
+  `DASHBOARD_VIEW_BASIC`only includes summary metadata from the dashboard.
+
+:returns: Iterator over :class:`Dashboard`
+
 
     .. py:method:: list_schedules(dashboard_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Schedule]
 
         List dashboard schedules.
-        
-        :param dashboard_id: str
-          UUID identifying the dashboard to which the schedules belongs.
-        :param page_size: int (optional)
-          The number of schedules to return per page.
-        :param page_token: str (optional)
-          A page token, received from a previous `ListSchedules` call. Use this to retrieve the subsequent
-          page.
-        
-        :returns: Iterator over :class:`Schedule`
-        
+
+:param dashboard_id: str
+  UUID identifying the dashboard to which the schedules belongs.
+:param page_size: int (optional)
+  The number of schedules to return per page.
+:param page_token: str (optional)
+  A page token, received from a previous `ListSchedules` call. Use this to retrieve the subsequent
+  page.
+
+:returns: Iterator over :class:`Schedule`
+
 
     .. py:method:: list_subscriptions(dashboard_id: str, schedule_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Subscription]
 
         List schedule subscriptions.
-        
-        :param dashboard_id: str
-          UUID identifying the dashboard which the subscriptions belongs.
-        :param schedule_id: str
-          UUID identifying the schedule which the subscriptions belongs.
-        :param page_size: int (optional)
-          The number of subscriptions to return per page.
-        :param page_token: str (optional)
-          A page token, received from a previous `ListSubscriptions` call. Use this to retrieve the subsequent
-          page.
-        
-        :returns: Iterator over :class:`Subscription`
-        
+
+:param dashboard_id: str
+  UUID identifying the dashboard which the subscriptions belongs.
+:param schedule_id: str
+  UUID identifying the schedule which the subscriptions belongs.
+:param page_size: int (optional)
+  The number of subscriptions to return per page.
+:param page_token: str (optional)
+  A page token, received from a previous `ListSubscriptions` call. Use this to retrieve the subsequent
+  page.
+
+:returns: Iterator over :class:`Subscription`
+
 
     .. py:method:: migrate(source_dashboard_id: str [, display_name: Optional[str], parent_path: Optional[str], update_parameter_syntax: Optional[bool]]) -> Dashboard
 
         Migrate dashboard.
-        
-        Migrates a classic SQL dashboard to Lakeview.
-        
-        :param source_dashboard_id: str
-          UUID of the dashboard to be migrated.
-        :param display_name: str (optional)
-          Display name for the new Lakeview dashboard.
-        :param parent_path: str (optional)
-          The workspace path of the folder to contain the migrated Lakeview dashboard.
-        :param update_parameter_syntax: bool (optional)
-          Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named syntax
-          (:param) when converting datasets in the dashboard.
-        
-        :returns: :class:`Dashboard`
-        
+
+Migrates a classic SQL dashboard to Lakeview.
+
+:param source_dashboard_id: str
+  UUID of the dashboard to be migrated.
+:param display_name: str (optional)
+  Display name for the new Lakeview dashboard.
+:param parent_path: str (optional)
+  The workspace path of the folder to contain the migrated Lakeview dashboard.
+:param update_parameter_syntax: bool (optional)
+  Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named syntax
+  (:param) when converting datasets in the dashboard.
+
+:returns: :class:`Dashboard`
+
 
     .. py:method:: publish(dashboard_id: str [, embed_credentials: Optional[bool], warehouse_id: Optional[str]]) -> PublishedDashboard
 
         Publish dashboard.
-        
-        Publish the current draft dashboard.
-        
-        :param dashboard_id: str
-          UUID identifying the dashboard to be published.
-        :param embed_credentials: bool (optional)
-          Flag to indicate if the publisher's credentials should be embedded in the published dashboard. These
-          embedded credentials will be used to execute the published dashboard's queries.
-        :param warehouse_id: str (optional)
-          The ID of the warehouse that can be used to override the warehouse which was set in the draft.
-        
-        :returns: :class:`PublishedDashboard`
-        
+
+Publish the current draft dashboard.
+
+:param dashboard_id: str
+  UUID identifying the dashboard to be published.
+:param embed_credentials: bool (optional)
+  Flag to indicate if the publisher's credentials should be embedded in the published dashboard. These
+  embedded credentials will be used to execute the published dashboard's queries.
+:param warehouse_id: str (optional)
+  The ID of the warehouse that can be used to override the warehouse which was set in the draft.
+
+:returns: :class:`PublishedDashboard`
+
 
     .. py:method:: trash(dashboard_id: str)
 
         Trash dashboard.
-        
-        Trash a dashboard.
-        
-        :param dashboard_id: str
-          UUID identifying the dashboard.
-        
-        
-        
+
+Trash a dashboard.
+
+:param dashboard_id: str
+  UUID identifying the dashboard.
+
+
+
 
     .. py:method:: unpublish(dashboard_id: str)
 
         Unpublish dashboard.
-        
-        Unpublish the dashboard.
-        
-        :param dashboard_id: str
-          UUID identifying the published dashboard.
-        
-        
-        
+
+Unpublish the dashboard.
+
+:param dashboard_id: str
+  UUID identifying the published dashboard.
+
+
+
 
     .. py:method:: update(dashboard_id: str [, dashboard: Optional[Dashboard]]) -> Dashboard
 
         Update dashboard.
-        
-        Update a draft dashboard.
-        
-        :param dashboard_id: str
-          UUID identifying the dashboard.
-        :param dashboard: :class:`Dashboard` (optional)
-        
-        :returns: :class:`Dashboard`
-        
+
+Update a draft dashboard.
+
+:param dashboard_id: str
+  UUID identifying the dashboard.
+:param dashboard: :class:`Dashboard` (optional)
+
+:returns: :class:`Dashboard`
+
 
     .. py:method:: update_schedule(dashboard_id: str, schedule_id: str [, schedule: Optional[Schedule]]) -> Schedule
 
         Update dashboard schedule.
-        
-        :param dashboard_id: str
-          UUID identifying the dashboard to which the schedule belongs.
-        :param schedule_id: str
-          UUID identifying the schedule.
-        :param schedule: :class:`Schedule` (optional)
-        
-        :returns: :class:`Schedule`
-        
\ No newline at end of file
+
+:param dashboard_id: str
+  UUID identifying the dashboard to which the schedule belongs.
+:param schedule_id: str
+  UUID identifying the schedule.
+:param schedule: :class:`Schedule` (optional)
+
+:returns: :class:`Schedule`
diff --git a/docs/workspace/dashboards/lakeview_embedded.rst b/docs/workspace/dashboards/lakeview_embedded.rst
index 4c06031f5..460874edb 100644
--- a/docs/workspace/dashboards/lakeview_embedded.rst
+++ b/docs/workspace/dashboards/lakeview_embedded.rst
@@ -9,11 +9,10 @@
     .. py:method:: get_published_dashboard_embedded(dashboard_id: str)
 
         Read a published dashboard in an embedded ui.
-        
-        Get the current published dashboard within an embedded context.
-        
-        :param dashboard_id: str
-          UUID identifying the published dashboard.
-        
-        
-        
\ No newline at end of file
+
+Get the current published dashboard within an embedded context.
+
+:param dashboard_id: str
+  UUID identifying the published dashboard.
+
+
diff --git a/docs/workspace/dashboards/query_execution.rst b/docs/workspace/dashboards/query_execution.rst
index 5672183d9..2e4dfc6e7 100644
--- a/docs/workspace/dashboards/query_execution.rst
+++ b/docs/workspace/dashboards/query_execution.rst
@@ -9,38 +9,37 @@
     .. py:method:: cancel_published_query_execution(dashboard_name: str, dashboard_revision_id: str [, tokens: Optional[List[str]]]) -> CancelQueryExecutionResponse
 
         Cancel the results for the a query for a published, embedded dashboard.
-        
-        :param dashboard_name: str
-        :param dashboard_revision_id: str
-        :param tokens: List[str] (optional)
-          Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
-        
-        :returns: :class:`CancelQueryExecutionResponse`
-        
+
+:param dashboard_name: str
+:param dashboard_revision_id: str
+:param tokens: List[str] (optional)
+  Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+
+:returns: :class:`CancelQueryExecutionResponse`
+
 
     .. py:method:: execute_published_dashboard_query(dashboard_name: str, dashboard_revision_id: str [, override_warehouse_id: Optional[str]])
 
         Execute a query for a published dashboard.
-        
-        :param dashboard_name: str
-          Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains the
-          list of datasets, warehouse_id, and embedded_credentials
-        :param dashboard_revision_id: str
-        :param override_warehouse_id: str (optional)
-          A dashboard schedule can override the warehouse used as compute for processing the published
-          dashboard queries
-        
-        
-        
+
+:param dashboard_name: str
+  Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains the
+  list of datasets, warehouse_id, and embedded_credentials
+:param dashboard_revision_id: str
+:param override_warehouse_id: str (optional)
+  A dashboard schedule can override the warehouse used as compute for processing the published
+  dashboard queries
+
+
+
 
     .. py:method:: poll_published_query_status(dashboard_name: str, dashboard_revision_id: str [, tokens: Optional[List[str]]]) -> PollQueryStatusResponse
 
         Poll the results for the a query for a published, embedded dashboard.
-        
-        :param dashboard_name: str
-        :param dashboard_revision_id: str
-        :param tokens: List[str] (optional)
-          Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
-        
-        :returns: :class:`PollQueryStatusResponse`
-        
\ No newline at end of file
+
+:param dashboard_name: str
+:param dashboard_revision_id: str
+:param tokens: List[str] (optional)
+  Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+
+:returns: :class:`PollQueryStatusResponse`
diff --git a/docs/workspace/files/dbfs.rst b/docs/workspace/files/dbfs.rst
index c52d11bc8..e200363ac 100644
--- a/docs/workspace/files/dbfs.rst
+++ b/docs/workspace/files/dbfs.rst
@@ -5,37 +5,37 @@
 .. py:class:: DbfsExt
 
     DBFS API makes it simple to interact with various data sources without having to include a users
-    credentials every time to read a file.
+credentials every time to read a file.
 
     .. py:method:: add_block(handle: int, data: str)
 
         Append data block.
-        
-        Appends a block of data to the stream specified by the input handle. If the handle does not exist,
-        this call will throw an exception with ``RESOURCE_DOES_NOT_EXIST``.
-        
-        If the block of data exceeds 1 MB, this call will throw an exception with ``MAX_BLOCK_SIZE_EXCEEDED``.
-        
-        :param handle: int
-          The handle on an open stream.
-        :param data: str
-          The base64-encoded data to append to the stream. This has a limit of 1 MB.
-        
-        
-        
+
+Appends a block of data to the stream specified by the input handle. If the handle does not exist,
+this call will throw an exception with ``RESOURCE_DOES_NOT_EXIST``.
+
+If the block of data exceeds 1 MB, this call will throw an exception with ``MAX_BLOCK_SIZE_EXCEEDED``.
+
+:param handle: int
+  The handle on an open stream.
+:param data: str
+  The base64-encoded data to append to the stream. This has a limit of 1 MB.
+
+
+
 
     .. py:method:: close(handle: int)
 
         Close the stream.
-        
-        Closes the stream specified by the input handle. If the handle does not exist, this call throws an
-        exception with ``RESOURCE_DOES_NOT_EXIST``.
-        
-        :param handle: int
-          The handle on an open stream.
-        
-        
-        
+
+Closes the stream specified by the input handle. If the handle does not exist, this call throws an
+exception with ``RESOURCE_DOES_NOT_EXIST``.
+
+:param handle: int
+  The handle on an open stream.
+
+
+
 
     .. py:method:: copy(src: str, dst: str [, recursive: bool = False, overwrite: bool = False])
 
@@ -44,23 +44,23 @@
     .. py:method:: create(path: str [, overwrite: Optional[bool]]) -> CreateResponse
 
         Open a stream.
-        
-        Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute idle
-        timeout on this handle. If a file or directory already exists on the given path and __overwrite__ is
-        set to false, this call will throw an exception with ``RESOURCE_ALREADY_EXISTS``.
-        
-        A typical workflow for file upload would be:
-        
-        1. Issue a ``create`` call and get a handle. 2. Issue one or more ``add-block`` calls with the handle
-        you have. 3. Issue a ``close`` call with the handle you have.
-        
-        :param path: str
-          The path of the new file. The path should be the absolute DBFS path.
-        :param overwrite: bool (optional)
-          The flag that specifies whether to overwrite existing file/files.
-        
-        :returns: :class:`CreateResponse`
-        
+
+Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute idle
+timeout on this handle. If a file or directory already exists on the given path and __overwrite__ is
+set to false, this call will throw an exception with ``RESOURCE_ALREADY_EXISTS``.
+
+A typical workflow for file upload would be:
+
+1. Issue a ``create`` call and get a handle. 2. Issue one or more ``add-block`` calls with the handle
+you have. 3. Issue a ``close`` call with the handle you have.
+
+:param path: str
+  The path of the new file. The path should be the absolute DBFS path.
+:param overwrite: bool (optional)
+  The flag that specifies whether to overwrite existing file/files.
+
+:returns: :class:`CreateResponse`
+
 
     .. py:method:: delete(path: str [, recursive: bool = False])
 
@@ -98,30 +98,30 @@
     .. py:method:: get_status(path: str) -> FileInfo
 
         Get the information of a file or directory.
-        
-        Gets the file information for a file or directory. If the file or directory does not exist, this call
-        throws an exception with `RESOURCE_DOES_NOT_EXIST`.
-        
-        :param path: str
-          The path of the file or directory. The path should be the absolute DBFS path.
-        
-        :returns: :class:`FileInfo`
-        
+
+Gets the file information for a file or directory. If the file or directory does not exist, this call
+throws an exception with `RESOURCE_DOES_NOT_EXIST`.
+
+:param path: str
+  The path of the file or directory. The path should be the absolute DBFS path.
+
+:returns: :class:`FileInfo`
+
 
     .. py:method:: list(path: str [, recursive: bool = False]) -> Iterator[files.FileInfo]
 
         List directory contents or file details.
 
-        List the contents of a directory, or details of the file. If the file or directory does not exist,
-        this call throws an exception with `RESOURCE_DOES_NOT_EXIST`.
+List the contents of a directory, or details of the file. If the file or directory does not exist,
+this call throws an exception with `RESOURCE_DOES_NOT_EXIST`.
+
+When calling list on a large directory, the list operation will time out after approximately 60
+seconds.
 
-        When calling list on a large directory, the list operation will time out after approximately 60
-        seconds.
+:param path: the DBFS or UC Volume path to list
+:param recursive: traverse deep into directory tree
+:returns iterator of metadata for every file
 
-        :param path: the DBFS or UC Volume path to list
-        :param recursive: traverse deep into directory tree
-        :returns iterator of metadata for every file
-        
 
     .. py:method:: mkdirs(path: str)
 
@@ -130,19 +130,19 @@
     .. py:method:: move(source_path: str, destination_path: str)
 
         Move a file.
-        
-        Moves a file from one location to another location within DBFS. If the source file does not exist,
-        this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If a file already exists in the
-        destination path, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. If the given source
-        path is a directory, this call always recursively moves all files.
-        
-        :param source_path: str
-          The source path of the file or directory. The path should be the absolute DBFS path.
-        :param destination_path: str
-          The destination path of the file or directory. The path should be the absolute DBFS path.
-        
-        
-        
+
+Moves a file from one location to another location within DBFS. If the source file does not exist,
+this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If a file already exists in the
+destination path, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. If the given source
+path is a directory, this call always recursively moves all files.
+
+:param source_path: str
+  The source path of the file or directory. The path should be the absolute DBFS path.
+:param destination_path: str
+  The destination path of the file or directory. The path should be the absolute DBFS path.
+
+
+
 
     .. py:method:: move_(src: str, dst: str [, recursive: bool = False, overwrite: bool = False])
 
@@ -154,50 +154,50 @@
     .. py:method:: put(path: str [, contents: Optional[str], overwrite: Optional[bool]])
 
         Upload a file.
-        
-        Uploads a file through the use of multipart form post. It is mainly used for streaming uploads, but
-        can also be used as a convenient single call for data upload.
-        
-        Alternatively you can pass contents as base64 string.
-        
-        The amount of data that can be passed (when not streaming) using the __contents__ parameter is limited
-        to 1 MB. `MAX_BLOCK_SIZE_EXCEEDED` will be thrown if this limit is exceeded.
-        
-        If you want to upload large files, use the streaming upload. For details, see :method:dbfs/create,
-        :method:dbfs/addBlock, :method:dbfs/close.
-        
-        :param path: str
-          The path of the new file. The path should be the absolute DBFS path.
-        :param contents: str (optional)
-          This parameter might be absent, and instead a posted file will be used.
-        :param overwrite: bool (optional)
-          The flag that specifies whether to overwrite existing file/files.
-        
-        
-        
+
+Uploads a file through the use of multipart form post. It is mainly used for streaming uploads, but
+can also be used as a convenient single call for data upload.
+
+Alternatively you can pass contents as base64 string.
+
+The amount of data that can be passed (when not streaming) using the __contents__ parameter is limited
+to 1 MB. `MAX_BLOCK_SIZE_EXCEEDED` will be thrown if this limit is exceeded.
+
+If you want to upload large files, use the streaming upload. For details, see :method:dbfs/create,
+:method:dbfs/addBlock, :method:dbfs/close.
+
+:param path: str
+  The path of the new file. The path should be the absolute DBFS path.
+:param contents: str (optional)
+  This parameter might be absent, and instead a posted file will be used.
+:param overwrite: bool (optional)
+  The flag that specifies whether to overwrite existing file/files.
+
+
+
 
     .. py:method:: read(path: str [, length: Optional[int], offset: Optional[int]]) -> ReadResponse
 
         Get the contents of a file.
-        
-        Returns the contents of a file. If the file does not exist, this call throws an exception with
-        `RESOURCE_DOES_NOT_EXIST`. If the path is a directory, the read length is negative, or if the offset
-        is negative, this call throws an exception with `INVALID_PARAMETER_VALUE`. If the read length exceeds
-        1 MB, this call throws an exception with `MAX_READ_SIZE_EXCEEDED`.
-        
-        If `offset + length` exceeds the number of bytes in a file, it reads the contents until the end of
-        file.
-        
-        :param path: str
-          The path of the file to read. The path should be the absolute DBFS path.
-        :param length: int (optional)
-          The number of bytes to read starting from the offset. This has a limit of 1 MB, and a default value
-          of 0.5 MB.
-        :param offset: int (optional)
-          The offset to read from in bytes.
-        
-        :returns: :class:`ReadResponse`
-        
+
+Returns the contents of a file. If the file does not exist, this call throws an exception with
+`RESOURCE_DOES_NOT_EXIST`. If the path is a directory, the read length is negative, or if the offset
+is negative, this call throws an exception with `INVALID_PARAMETER_VALUE`. If the read length exceeds
+1 MB, this call throws an exception with `MAX_READ_SIZE_EXCEEDED`.
+
+If `offset + length` exceeds the number of bytes in a file, it reads the contents until the end of
+file.
+
+:param path: str
+  The path of the file to read. The path should be the absolute DBFS path.
+:param length: int (optional)
+  The number of bytes to read starting from the offset. This has a limit of 1 MB, and a default value
+  of 0.5 MB.
+:param offset: int (optional)
+  The offset to read from in bytes.
+
+:returns: :class:`ReadResponse`
+
 
     .. py:method:: upload(path: str, src: BinaryIO [, overwrite: bool = False])
 
diff --git a/docs/workspace/files/files.rst b/docs/workspace/files/files.rst
index 0151fcce2..6314481a3 100644
--- a/docs/workspace/files/files.rst
+++ b/docs/workspace/files/files.rst
@@ -5,151 +5,150 @@
 .. py:class:: FilesAPI
 
     The Files API is a standard HTTP API that allows you to read, write, list, and delete files and
-    directories by referring to their URI. The API makes working with file content as raw bytes easier and
-    more efficient.
-    
-    The API supports [Unity Catalog volumes], where files and directories to operate on are specified using
-    their volume URI path, which follows the format
-    /Volumes/<catalog_name>/<schema_name>/<volume_name>/<path_to_file>.
-    
-    The Files API has two distinct endpoints, one for working with files (`/fs/files`) and another one for
-    working with directories (`/fs/directories`). Both endpoints use the standard HTTP methods GET, HEAD, PUT,
-    and DELETE to manage files and directories specified using their URI path. The path is always absolute.
-    
-    Some Files API client features are currently experimental. To enable them, set
-    `enable_experimental_files_api_client = True` in your configuration profile or use the environment
-    variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`.
-    
-    [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html
+directories by referring to their URI. The API makes working with file content as raw bytes easier and
+more efficient.
+
+The API supports [Unity Catalog volumes], where files and directories to operate on are specified using
+their volume URI path, which follows the format
+/Volumes/<catalog_name>/<schema_name>/<volume_name>/<path_to_file>.
+
+The Files API has two distinct endpoints, one for working with files (`/fs/files`) and another one for
+working with directories (`/fs/directories`). Both endpoints use the standard HTTP methods GET, HEAD, PUT,
+and DELETE to manage files and directories specified using their URI path. The path is always absolute.
+
+Some Files API client features are currently experimental. To enable them, set
+`enable_experimental_files_api_client = True` in your configuration profile or use the environment
+variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`.
+
+[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html
 
     .. py:method:: create_directory(directory_path: str)
 
         Create a directory.
-        
-        Creates an empty directory. If necessary, also creates any parent directories of the new, empty
-        directory (like the shell command `mkdir -p`). If called on an existing directory, returns a success
-        response; this method is idempotent (it will succeed if the directory already exists).
-        
-        :param directory_path: str
-          The absolute path of a directory.
-        
-        
-        
+
+Creates an empty directory. If necessary, also creates any parent directories of the new, empty
+directory (like the shell command `mkdir -p`). If called on an existing directory, returns a success
+response; this method is idempotent (it will succeed if the directory already exists).
+
+:param directory_path: str
+  The absolute path of a directory.
+
+
+
 
     .. py:method:: delete(file_path: str)
 
         Delete a file.
-        
-        Deletes a file. If the request is successful, there is no response body.
-        
-        :param file_path: str
-          The absolute path of the file.
-        
-        
-        
+
+Deletes a file. If the request is successful, there is no response body.
+
+:param file_path: str
+  The absolute path of the file.
+
+
+
 
     .. py:method:: delete_directory(directory_path: str)
 
         Delete a directory.
-        
-        Deletes an empty directory.
-        
-        To delete a non-empty directory, first delete all of its contents. This can be done by listing the
-        directory contents and deleting each file and subdirectory recursively.
-        
-        :param directory_path: str
-          The absolute path of a directory.
-        
-        
-        
+
+Deletes an empty directory.
+
+To delete a non-empty directory, first delete all of its contents. This can be done by listing the
+directory contents and deleting each file and subdirectory recursively.
+
+:param directory_path: str
+  The absolute path of a directory.
+
+
+
 
     .. py:method:: download(file_path: str) -> DownloadResponse
 
         Download a file.
-        
-        Downloads a file. The file contents are the response body. This is a standard HTTP file download, not
-        a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers.
-        
-        :param file_path: str
-          The absolute path of the file.
-        
-        :returns: :class:`DownloadResponse`
-        
+
+Downloads a file. The file contents are the response body. This is a standard HTTP file download, not
+a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers.
+
+:param file_path: str
+  The absolute path of the file.
+
+:returns: :class:`DownloadResponse`
+
 
     .. py:method:: get_directory_metadata(directory_path: str)
 
         Get directory metadata.
-        
-        Get the metadata of a directory. The response HTTP headers contain the metadata. There is no response
-        body.
-        
-        This method is useful to check if a directory exists and the caller has access to it.
-        
-        If you wish to ensure the directory exists, you can instead use `PUT`, which will create the directory
-        if it does not exist, and is idempotent (it will succeed if the directory already exists).
-        
-        :param directory_path: str
-          The absolute path of a directory.
-        
-        
-        
+
+Get the metadata of a directory. The response HTTP headers contain the metadata. There is no response
+body.
+
+This method is useful to check if a directory exists and the caller has access to it.
+
+If you wish to ensure the directory exists, you can instead use `PUT`, which will create the directory
+if it does not exist, and is idempotent (it will succeed if the directory already exists).
+
+:param directory_path: str
+  The absolute path of a directory.
+
+
+
 
     .. py:method:: get_metadata(file_path: str) -> GetMetadataResponse
 
         Get file metadata.
-        
-        Get the metadata of a file. The response HTTP headers contain the metadata. There is no response body.
-        
-        :param file_path: str
-          The absolute path of the file.
-        
-        :returns: :class:`GetMetadataResponse`
-        
+
+Get the metadata of a file. The response HTTP headers contain the metadata. There is no response body.
+
+:param file_path: str
+  The absolute path of the file.
+
+:returns: :class:`GetMetadataResponse`
+
 
     .. py:method:: list_directory_contents(directory_path: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[DirectoryEntry]
 
         List directory contents.
-        
-        Returns the contents of a directory. If there is no directory at the specified path, the API returns a
-        HTTP 404 error.
-        
-        :param directory_path: str
-          The absolute path of a directory.
-        :param page_size: int (optional)
-          The maximum number of directory entries to return. The response may contain fewer entries. If the
-          response contains a `next_page_token`, there may be more entries, even if fewer than `page_size`
-          entries are in the response.
-          
-          We recommend not to set this value unless you are intentionally listing less than the complete
-          directory contents.
-          
-          If unspecified, at most 1000 directory entries will be returned. The maximum value is 1000. Values
-          above 1000 will be coerced to 1000.
-        :param page_token: str (optional)
-          An opaque page token which was the `next_page_token` in the response of the previous request to list
-          the contents of this directory. Provide this token to retrieve the next page of directory entries.
-          When providing a `page_token`, all other parameters provided to the request must match the previous
-          request. To list all of the entries in a directory, it is necessary to continue requesting pages of
-          entries until the response contains no `next_page_token`. Note that the number of entries returned
-          must not be used to determine when the listing is complete.
-        
-        :returns: Iterator over :class:`DirectoryEntry`
-        
+
+Returns the contents of a directory. If there is no directory at the specified path, the API returns a
+HTTP 404 error.
+
+:param directory_path: str
+  The absolute path of a directory.
+:param page_size: int (optional)
+  The maximum number of directory entries to return. The response may contain fewer entries. If the
+  response contains a `next_page_token`, there may be more entries, even if fewer than `page_size`
+  entries are in the response.
+  
+  We recommend not to set this value unless you are intentionally listing less than the complete
+  directory contents.
+  
+  If unspecified, at most 1000 directory entries will be returned. The maximum value is 1000. Values
+  above 1000 will be coerced to 1000.
+:param page_token: str (optional)
+  An opaque page token which was the `next_page_token` in the response of the previous request to list
+  the contents of this directory. Provide this token to retrieve the next page of directory entries.
+  When providing a `page_token`, all other parameters provided to the request must match the previous
+  request. To list all of the entries in a directory, it is necessary to continue requesting pages of
+  entries until the response contains no `next_page_token`. Note that the number of entries returned
+  must not be used to determine when the listing is complete.
+
+:returns: Iterator over :class:`DirectoryEntry`
+
 
     .. py:method:: upload(file_path: str, contents: BinaryIO [, overwrite: Optional[bool]])
 
         Upload a file.
-        
-        Uploads a file of up to 5 GiB. The file contents should be sent as the request body as raw bytes (an
-        octet stream); do not encode or otherwise modify the bytes before sending. The contents of the
-        resulting file will be exactly the bytes sent in the request body. If the request is successful, there
-        is no response body.
-        
-        :param file_path: str
-          The absolute path of the file.
-        :param contents: BinaryIO
-        :param overwrite: bool (optional)
-          If true, an existing file will be overwritten.
-        
-        
-        
\ No newline at end of file
+
+Uploads a file of up to 5 GiB. The file contents should be sent as the request body as raw bytes (an
+octet stream); do not encode or otherwise modify the bytes before sending. The contents of the
+resulting file will be exactly the bytes sent in the request body. If the request is successful, there
+is no response body.
+
+:param file_path: str
+  The absolute path of the file.
+:param contents: BinaryIO
+:param overwrite: bool (optional)
+  If true, an existing file will be overwritten.
+
+
diff --git a/docs/workspace/iam/access_control.rst b/docs/workspace/iam/access_control.rst
index a5f1feeda..d5d3b3252 100644
--- a/docs/workspace/iam/access_control.rst
+++ b/docs/workspace/iam/access_control.rst
@@ -9,15 +9,14 @@
     .. py:method:: check_policy(actor: Actor, permission: str, resource: str, consistency_token: ConsistencyToken, authz_identity: RequestAuthzIdentity [, resource_info: Optional[ResourceInfo]]) -> CheckPolicyResponse
 
         Check access policy to a resource.
-        
-        :param actor: :class:`Actor`
-        :param permission: str
-        :param resource: str
-          Ex: (servicePrincipal/use, accounts//servicePrincipals/) Ex:
-          (servicePrincipal.ruleSet/update, accounts//servicePrincipals//ruleSets/default)
-        :param consistency_token: :class:`ConsistencyToken`
-        :param authz_identity: :class:`RequestAuthzIdentity`
-        :param resource_info: :class:`ResourceInfo` (optional)
-        
-        :returns: :class:`CheckPolicyResponse`
-        
\ No newline at end of file
+
+:param actor: :class:`Actor`
+:param permission: str
+:param resource: str
+  Ex: (servicePrincipal/use, accounts//servicePrincipals/) Ex:
+  (servicePrincipal.ruleSet/update, accounts//servicePrincipals//ruleSets/default)
+:param consistency_token: :class:`ConsistencyToken`
+:param authz_identity: :class:`RequestAuthzIdentity`
+:param resource_info: :class:`ResourceInfo` (optional)
+
+:returns: :class:`CheckPolicyResponse`
diff --git a/docs/workspace/iam/account_access_control_proxy.rst b/docs/workspace/iam/account_access_control_proxy.rst
index 3265b29cc..3242b7944 100644
--- a/docs/workspace/iam/account_access_control_proxy.rst
+++ b/docs/workspace/iam/account_access_control_proxy.rst
@@ -5,52 +5,51 @@
 .. py:class:: AccountAccessControlProxyAPI
 
     These APIs manage access rules on resources in an account. Currently, only grant rules are supported. A
-    grant rule specifies a role assigned to a set of principals. A list of rules attached to a resource is
-    called a rule set. A workspace must belong to an account for these APIs to work.
+grant rule specifies a role assigned to a set of principals. A list of rules attached to a resource is
+called a rule set. A workspace must belong to an account for these APIs to work.
 
     .. py:method:: get_assignable_roles_for_resource(resource: str) -> GetAssignableRolesForResourceResponse
 
         Get assignable roles for a resource.
-        
-        Gets all the roles that can be granted on an account-level resource. A role is grantable if the rule
-        set on the resource can contain an access rule of the role.
-        
-        :param resource: str
-          The resource name for which assignable roles will be listed.
-        
-        :returns: :class:`GetAssignableRolesForResourceResponse`
-        
+
+Gets all the roles that can be granted on an account-level resource. A role is grantable if the rule
+set on the resource can contain an access rule of the role.
+
+:param resource: str
+  The resource name for which assignable roles will be listed.
+
+:returns: :class:`GetAssignableRolesForResourceResponse`
+
 
     .. py:method:: get_rule_set(name: str, etag: str) -> RuleSetResponse
 
         Get a rule set.
-        
-        Get a rule set by its name. A rule set is always attached to a resource and contains a list of access
-        rules on the said resource. Currently only a default rule set for each resource is supported.
-        
-        :param name: str
-          The ruleset name associated with the request.
-        :param etag: str
-          Etag used for versioning. The response is at least as fresh as the eTag provided. Etag is used for
-          optimistic concurrency control as a way to help prevent simultaneous updates of a rule set from
-          overwriting each other. It is strongly suggested that systems make use of the etag in the read ->
-          modify -> write pattern to perform rule set updates in order to avoid race conditions that is get an
-          etag from a GET rule set request, and pass it with the PUT update request to identify the rule set
-          version you are updating.
-        
-        :returns: :class:`RuleSetResponse`
-        
+
+Get a rule set by its name. A rule set is always attached to a resource and contains a list of access
+rules on the said resource. Currently only a default rule set for each resource is supported.
+
+:param name: str
+  The ruleset name associated with the request.
+:param etag: str
+  Etag used for versioning. The response is at least as fresh as the eTag provided. Etag is used for
+  optimistic concurrency control as a way to help prevent simultaneous updates of a rule set from
+  overwriting each other. It is strongly suggested that systems make use of the etag in the read ->
+  modify -> write pattern to perform rule set updates in order to avoid race conditions that is get an
+  etag from a GET rule set request, and pass it with the PUT update request to identify the rule set
+  version you are updating.
+
+:returns: :class:`RuleSetResponse`
+
 
     .. py:method:: update_rule_set(name: str, rule_set: RuleSetUpdateRequest) -> RuleSetResponse
 
         Update a rule set.
-        
-        Replace the rules of a rule set. First, use a GET rule set request to read the current version of the
-        rule set before modifying it. This pattern helps prevent conflicts between concurrent updates.
-        
-        :param name: str
-          Name of the rule set.
-        :param rule_set: :class:`RuleSetUpdateRequest`
-        
-        :returns: :class:`RuleSetResponse`
-        
\ No newline at end of file
+
+Replace the rules of a rule set. First, use a GET rule set request to read the current version of the
+rule set before modifying it. This pattern helps prevent conflicts between concurrent updates.
+
+:param name: str
+  Name of the rule set.
+:param rule_set: :class:`RuleSetUpdateRequest`
+
+:returns: :class:`RuleSetResponse`
diff --git a/docs/workspace/iam/current_user.rst b/docs/workspace/iam/current_user.rst
index 47ef1eff3..6a877bb2f 100644
--- a/docs/workspace/iam/current_user.rst
+++ b/docs/workspace/iam/current_user.rst
@@ -20,8 +20,7 @@
             me2 = w.current_user.me()
 
         Get current user info.
-        
-        Get details about the current method caller's identity.
-        
-        :returns: :class:`User`
-        
\ No newline at end of file
+
+Get details about the current method caller's identity.
+
+:returns: :class:`User`
diff --git a/docs/workspace/iam/groups.rst b/docs/workspace/iam/groups.rst
index ef32112c8..98ece2ad1 100644
--- a/docs/workspace/iam/groups.rst
+++ b/docs/workspace/iam/groups.rst
@@ -5,11 +5,11 @@
 .. py:class:: GroupsAPI
 
     Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and
-    other securable objects.
-    
-    It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups,
-    instead of to users individually. All Databricks workspace identities can be assigned as members of
-    groups, and members inherit permissions that are assigned to their group.
+other securable objects.
+
+It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups,
+instead of to users individually. All Databricks workspace identities can be assigned as members of
+groups, and members inherit permissions that are assigned to their group.
 
     .. py:method:: create( [, display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], id: Optional[str], members: Optional[List[ComplexValue]], meta: Optional[ResourceMeta], roles: Optional[List[ComplexValue]], schemas: Optional[List[GroupSchema]]]) -> Group
 
@@ -30,30 +30,30 @@
             w.groups.delete(id=group.id)
 
         Create a new group.
-        
-        Creates a group in the Databricks workspace with a unique name, using the supplied group details.
-        
-        :param display_name: str (optional)
-          String that represents a human-readable group name
-        :param entitlements: List[:class:`ComplexValue`] (optional)
-          Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
-          values.
-          
-          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-        :param external_id: str (optional)
-        :param groups: List[:class:`ComplexValue`] (optional)
-        :param id: str (optional)
-          Databricks group ID
-        :param members: List[:class:`ComplexValue`] (optional)
-        :param meta: :class:`ResourceMeta` (optional)
-          Container for the group identifier. Workspace local versus account.
-        :param roles: List[:class:`ComplexValue`] (optional)
-          Corresponds to AWS instance profile/arn role.
-        :param schemas: List[:class:`GroupSchema`] (optional)
-          The schema of the group.
-        
-        :returns: :class:`Group`
-        
+
+Creates a group in the Databricks workspace with a unique name, using the supplied group details.
+
+:param display_name: str (optional)
+  String that represents a human-readable group name
+:param entitlements: List[:class:`ComplexValue`] (optional)
+  Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
+  values.
+  
+  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+:param external_id: str (optional)
+:param groups: List[:class:`ComplexValue`] (optional)
+:param id: str (optional)
+  Databricks group ID
+:param members: List[:class:`ComplexValue`] (optional)
+:param meta: :class:`ResourceMeta` (optional)
+  Container for the group identifier. Workspace local versus account.
+:param roles: List[:class:`ComplexValue`] (optional)
+  Corresponds to AWS instance profile/arn role.
+:param schemas: List[:class:`GroupSchema`] (optional)
+  The schema of the group.
+
+:returns: :class:`Group`
+
 
     .. py:method:: delete(id: str)
 
@@ -73,14 +73,14 @@
             w.groups.delete(id=group.id)
 
         Delete a group.
-        
-        Deletes a group from the Databricks workspace.
-        
-        :param id: str
-          Unique ID for a group in the Databricks workspace.
-        
-        
-        
+
+Deletes a group from the Databricks workspace.
+
+:param id: str
+  Unique ID for a group in the Databricks workspace.
+
+
+
 
     .. py:method:: get(id: str) -> Group
 
@@ -103,43 +103,43 @@
             w.groups.delete(id=group.id)
 
         Get group details.
-        
-        Gets the information for a specific group in the Databricks workspace.
-        
-        :param id: str
-          Unique ID for a group in the Databricks workspace.
-        
-        :returns: :class:`Group`
-        
+
+Gets the information for a specific group in the Databricks workspace.
+
+:param id: str
+  Unique ID for a group in the Databricks workspace.
+
+:returns: :class:`Group`
+
 
     .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[Group]
 
         List group details.
-        
-        Gets all details of the groups associated with the Databricks workspace.
-        
-        :param attributes: str (optional)
-          Comma-separated list of attributes to return in response.
-        :param count: int (optional)
-          Desired number of results per page.
-        :param excluded_attributes: str (optional)
-          Comma-separated list of attributes to exclude in response.
-        :param filter: str (optional)
-          Query by which the results have to be filtered. Supported operators are equals(`eq`),
-          contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
-          formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
-          only support simple expressions.
-          
-          [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
-        :param sort_by: str (optional)
-          Attribute to sort the results.
-        :param sort_order: :class:`ListSortOrder` (optional)
-          The order to sort the results.
-        :param start_index: int (optional)
-          Specifies the index of the first result. First item is number 1.
-        
-        :returns: Iterator over :class:`Group`
-        
+
+Gets all details of the groups associated with the Databricks workspace.
+
+:param attributes: str (optional)
+  Comma-separated list of attributes to return in response.
+:param count: int (optional)
+  Desired number of results per page.
+:param excluded_attributes: str (optional)
+  Comma-separated list of attributes to exclude in response.
+:param filter: str (optional)
+  Query by which the results have to be filtered. Supported operators are equals(`eq`),
+  contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
+  formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
+  only support simple expressions.
+  
+  [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
+:param sort_by: str (optional)
+  Attribute to sort the results.
+:param sort_order: :class:`ListSortOrder` (optional)
+  The order to sort the results.
+:param start_index: int (optional)
+  Specifies the index of the first result. First item is number 1.
+
+:returns: Iterator over :class:`Group`
+
 
     .. py:method:: patch(id: str [, operations: Optional[List[Patch]], schemas: Optional[List[PatchSchema]]])
 
@@ -174,42 +174,41 @@
             w.groups.delete(id=group.id)
 
         Update group details.
-        
-        Partially updates the details of a group.
-        
-        :param id: str
-          Unique ID for a group in the Databricks workspace.
-        :param operations: List[:class:`Patch`] (optional)
-        :param schemas: List[:class:`PatchSchema`] (optional)
-          The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
-        
-        
-        
+
+Partially updates the details of a group.
+
+:param id: str
+  Unique ID for a group in the Databricks workspace.
+:param operations: List[:class:`Patch`] (optional)
+:param schemas: List[:class:`PatchSchema`] (optional)
+  The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
+
+
+
 
     .. py:method:: update(id: str [, display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], members: Optional[List[ComplexValue]], meta: Optional[ResourceMeta], roles: Optional[List[ComplexValue]], schemas: Optional[List[GroupSchema]]])
 
         Replace a group.
-        
-        Updates the details of a group by replacing the entire group entity.
-        
-        :param id: str
-          Databricks group ID
-        :param display_name: str (optional)
-          String that represents a human-readable group name
-        :param entitlements: List[:class:`ComplexValue`] (optional)
-          Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
-          values.
-          
-          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-        :param external_id: str (optional)
-        :param groups: List[:class:`ComplexValue`] (optional)
-        :param members: List[:class:`ComplexValue`] (optional)
-        :param meta: :class:`ResourceMeta` (optional)
-          Container for the group identifier. Workspace local versus account.
-        :param roles: List[:class:`ComplexValue`] (optional)
-          Corresponds to AWS instance profile/arn role.
-        :param schemas: List[:class:`GroupSchema`] (optional)
-          The schema of the group.
-        
-        
-        
\ No newline at end of file
+
+Updates the details of a group by replacing the entire group entity.
+
+:param id: str
+  Databricks group ID
+:param display_name: str (optional)
+  String that represents a human-readable group name
+:param entitlements: List[:class:`ComplexValue`] (optional)
+  Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
+  values.
+  
+  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+:param external_id: str (optional)
+:param groups: List[:class:`ComplexValue`] (optional)
+:param members: List[:class:`ComplexValue`] (optional)
+:param meta: :class:`ResourceMeta` (optional)
+  Container for the group identifier. Workspace local versus account.
+:param roles: List[:class:`ComplexValue`] (optional)
+  Corresponds to AWS instance profile/arn role.
+:param schemas: List[:class:`GroupSchema`] (optional)
+  The schema of the group.
+
+
diff --git a/docs/workspace/iam/permission_migration.rst b/docs/workspace/iam/permission_migration.rst
index 8eef6e0e1..1aaba1a93 100644
--- a/docs/workspace/iam/permission_migration.rst
+++ b/docs/workspace/iam/permission_migration.rst
@@ -9,15 +9,14 @@
     .. py:method:: migrate_permissions(workspace_id: int, from_workspace_group_name: str, to_account_group_name: str [, size: Optional[int]]) -> MigratePermissionsResponse
 
         Migrate Permissions.
-        
-        :param workspace_id: int
-          WorkspaceId of the associated workspace where the permission migration will occur.
-        :param from_workspace_group_name: str
-          The name of the workspace group that permissions will be migrated from.
-        :param to_account_group_name: str
-          The name of the account group that permissions will be migrated to.
-        :param size: int (optional)
-          The maximum number of permissions that will be migrated.
-        
-        :returns: :class:`MigratePermissionsResponse`
-        
\ No newline at end of file
+
+:param workspace_id: int
+  WorkspaceId of the associated workspace where the permission migration will occur.
+:param from_workspace_group_name: str
+  The name of the workspace group that permissions will be migrated from.
+:param to_account_group_name: str
+  The name of the account group that permissions will be migrated to.
+:param size: int (optional)
+  The maximum number of permissions that will be migrated.
+
+:returns: :class:`MigratePermissionsResponse`
diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst
index bf8f8e77f..24894cc8b 100644
--- a/docs/workspace/iam/permissions.rst
+++ b/docs/workspace/iam/permissions.rst
@@ -5,54 +5,54 @@
 .. py:class:: PermissionsAPI
 
     Permissions API are used to create read, write, edit, update and manage access for various users on
-    different objects and endpoints.
-    
-    * **[Apps permissions](:service:apps)** — Manage which users can manage or use apps.
-    
-    * **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or attach to
-    clusters.
-    
-    * **[Cluster policy permissions](:service:clusterpolicies)** — Manage which users can use cluster
-    policies.
-    
-    * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can view,
-    manage, run, cancel, or own a Delta Live Tables pipeline.
-    
-    * **[Job permissions](:service:jobs)** — Manage which users can view, manage, trigger, cancel, or own a
-    job.
-    
-    * **[MLflow experiment permissions](:service:experiments)** — Manage which users can read, edit, or
-    manage MLflow experiments.
-    
-    * **[MLflow registered model permissions](:service:modelregistry)** — Manage which users can read, edit,
-    or manage MLflow registered models.
-    
-    * **[Password permissions](:service:users)** — Manage which users can use password login when SSO is
-    enabled.
-    
-    * **[Instance Pool permissions](:service:instancepools)** — Manage which users can manage or attach to
-    pools.
-    
-    * **[Repo permissions](repos)** — Manage which users can read, run, edit, or manage a repo.
-    
-    * **[Serving endpoint permissions](:service:servingendpoints)** — Manage which users can view, query, or
-    manage a serving endpoint.
-    
-    * **[SQL warehouse permissions](:service:warehouses)** — Manage which users can use or manage SQL
-    warehouses.
-    
-    * **[Token permissions](:service:tokenmanagement)** — Manage which users can create or use tokens.
-    
-    * **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, edit, or
-    manage alerts, dbsql-dashboards, directories, files, notebooks and queries.
-    
-    For the mapping of the required permissions for specific actions or abilities and other important
-    information, see [Access Control].
-    
-    Note that to manage access control on service principals, use **[Account Access Control
-    Proxy](:service:accountaccesscontrolproxy)**.
-    
-    [Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html
+different objects and endpoints.
+
+* **[Apps permissions](:service:apps)** — Manage which users can manage or use apps.
+
+* **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or attach to
+clusters.
+
+* **[Cluster policy permissions](:service:clusterpolicies)** — Manage which users can use cluster
+policies.
+
+* **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can view,
+manage, run, cancel, or own a Delta Live Tables pipeline.
+
+* **[Job permissions](:service:jobs)** — Manage which users can view, manage, trigger, cancel, or own a
+job.
+
+* **[MLflow experiment permissions](:service:experiments)** — Manage which users can read, edit, or
+manage MLflow experiments.
+
+* **[MLflow registered model permissions](:service:modelregistry)** — Manage which users can read, edit,
+or manage MLflow registered models.
+
+* **[Password permissions](:service:users)** — Manage which users can use password login when SSO is
+enabled.
+
+* **[Instance Pool permissions](:service:instancepools)** — Manage which users can manage or attach to
+pools.
+
+* **[Repo permissions](repos)** — Manage which users can read, run, edit, or manage a repo.
+
+* **[Serving endpoint permissions](:service:servingendpoints)** — Manage which users can view, query, or
+manage a serving endpoint.
+
+* **[SQL warehouse permissions](:service:warehouses)** — Manage which users can use or manage SQL
+warehouses.
+
+* **[Token permissions](:service:tokenmanagement)** — Manage which users can create or use tokens.
+
+* **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, edit, or
+manage alerts, dbsql-dashboards, directories, files, notebooks and queries.
+
+For the mapping of the required permissions for specific actions or abilities and other important
+information, see [Access Control].
+
+Note that to manage access control on service principals, use **[Account Access Control
+Proxy](:service:accountaccesscontrolproxy)**.
+
+[Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html
 
     .. py:method:: get(request_object_type: str, request_object_id: str) -> ObjectPermissions
 
@@ -75,19 +75,19 @@
                                                          request_object_id="%d" % (obj.object_id))
 
         Get object permissions.
-        
-        Gets the permissions of an object. Objects can inherit permissions from their parent objects or root
-        object.
-        
-        :param request_object_type: str
-          The type of the request object. Can be one of the following: alerts, authorization, clusters,
-          cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
-          jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.
-        :param request_object_id: str
-          The id of the request object.
-        
-        :returns: :class:`ObjectPermissions`
-        
+
+Gets the permissions of an object. Objects can inherit permissions from their parent objects or root
+object.
+
+:param request_object_type: str
+  The type of the request object. Can be one of the following: alerts, authorization, clusters,
+  cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
+  jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.
+:param request_object_id: str
+  The id of the request object.
+
+:returns: :class:`ObjectPermissions`
+
 
     .. py:method:: get_permission_levels(request_object_type: str, request_object_id: str) -> GetPermissionLevelsResponse
 
@@ -110,16 +110,16 @@
                                                          request_object_id="%d" % (obj.object_id))
 
         Get object permission levels.
-        
-        Gets the permission levels that a user can have on an object.
-        
-        :param request_object_type: str
-          
-        :param request_object_id: str
-          
-        
-        :returns: :class:`GetPermissionLevelsResponse`
-        
+
+Gets the permission levels that a user can have on an object.
+
+:param request_object_type: str
+  
+:param request_object_id: str
+  
+
+:returns: :class:`GetPermissionLevelsResponse`
+
 
     .. py:method:: set(request_object_type: str, request_object_id: str [, access_control_list: Optional[List[AccessControlRequest]]]) -> ObjectPermissions
 
@@ -152,36 +152,35 @@
             w.groups.delete(id=group.id)
 
         Set object permissions.
-        
-        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-        permissions if none are specified. Objects can inherit permissions from their parent objects or root
-        object.
-        
-        :param request_object_type: str
-          The type of the request object. Can be one of the following: alerts, authorization, clusters,
-          cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
-          jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.
-        :param request_object_id: str
-          The id of the request object.
-        :param access_control_list: List[:class:`AccessControlRequest`] (optional)
-        
-        :returns: :class:`ObjectPermissions`
-        
+
+Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+permissions if none are specified. Objects can inherit permissions from their parent objects or root
+object.
+
+:param request_object_type: str
+  The type of the request object. Can be one of the following: alerts, authorization, clusters,
+  cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
+  jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.
+:param request_object_id: str
+  The id of the request object.
+:param access_control_list: List[:class:`AccessControlRequest`] (optional)
+
+:returns: :class:`ObjectPermissions`
+
 
     .. py:method:: update(request_object_type: str, request_object_id: str [, access_control_list: Optional[List[AccessControlRequest]]]) -> ObjectPermissions
 
         Update object permissions.
-        
-        Updates the permissions on an object. Objects can inherit permissions from their parent objects or
-        root object.
-        
-        :param request_object_type: str
-          The type of the request object. Can be one of the following: alerts, authorization, clusters,
-          cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
-          jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.
-        :param request_object_id: str
-          The id of the request object.
-        :param access_control_list: List[:class:`AccessControlRequest`] (optional)
-        
-        :returns: :class:`ObjectPermissions`
-        
\ No newline at end of file
+
+Updates the permissions on an object. Objects can inherit permissions from their parent objects or
+root object.
+
+:param request_object_type: str
+  The type of the request object. Can be one of the following: alerts, authorization, clusters,
+  cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
+  jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.
+:param request_object_id: str
+  The id of the request object.
+:param access_control_list: List[:class:`AccessControlRequest`] (optional)
+
+:returns: :class:`ObjectPermissions`
diff --git a/docs/workspace/iam/service_principals.rst b/docs/workspace/iam/service_principals.rst
index 0fb8ca643..f1ba78396 100644
--- a/docs/workspace/iam/service_principals.rst
+++ b/docs/workspace/iam/service_principals.rst
@@ -5,10 +5,10 @@
 .. py:class:: ServicePrincipalsAPI
 
     Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
-    Databricks recommends creating service principals to run production jobs or modify production data. If all
-    processes that act on production data run with service principals, interactive users do not need any
-    write, delete, or modify privileges in production. This eliminates the risk of a user overwriting
-    production data by accident.
+Databricks recommends creating service principals to run production jobs or modify production data. If all
+processes that act on production data run with service principals, interactive users do not need any
+write, delete, or modify privileges in production. This eliminates the risk of a user overwriting
+production data by accident.
 
     .. py:method:: create( [, active: Optional[bool], application_id: Optional[str], display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], id: Optional[str], roles: Optional[List[ComplexValue]], schemas: Optional[List[ServicePrincipalSchema]]]) -> ServicePrincipal
 
@@ -33,43 +33,43 @@
             w.service_principals.delete(id=spn.id)
 
         Create a service principal.
-        
-        Creates a new service principal in the Databricks workspace.
-        
-        :param active: bool (optional)
-          If this user is active
-        :param application_id: str (optional)
-          UUID relating to the service principal
-        :param display_name: str (optional)
-          String that represents a concatenation of given and family names.
-        :param entitlements: List[:class:`ComplexValue`] (optional)
-          Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
-          supported values.
-          
-          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-        :param external_id: str (optional)
-        :param groups: List[:class:`ComplexValue`] (optional)
-        :param id: str (optional)
-          Databricks service principal ID.
-        :param roles: List[:class:`ComplexValue`] (optional)
-          Corresponds to AWS instance profile/arn role.
-        :param schemas: List[:class:`ServicePrincipalSchema`] (optional)
-          The schema of the List response.
-        
-        :returns: :class:`ServicePrincipal`
-        
+
+Creates a new service principal in the Databricks workspace.
+
+:param active: bool (optional)
+  If this user is active
+:param application_id: str (optional)
+  UUID relating to the service principal
+:param display_name: str (optional)
+  String that represents a concatenation of given and family names.
+:param entitlements: List[:class:`ComplexValue`] (optional)
+  Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
+  supported values.
+  
+  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+:param external_id: str (optional)
+:param groups: List[:class:`ComplexValue`] (optional)
+:param id: str (optional)
+  Databricks service principal ID.
+:param roles: List[:class:`ComplexValue`] (optional)
+  Corresponds to AWS instance profile/arn role.
+:param schemas: List[:class:`ServicePrincipalSchema`] (optional)
+  The schema of the List response.
+
+:returns: :class:`ServicePrincipal`
+
 
     .. py:method:: delete(id: str)
 
         Delete a service principal.
-        
-        Delete a single service principal in the Databricks workspace.
-        
-        :param id: str
-          Unique ID for a service principal in the Databricks workspace.
-        
-        
-        
+
+Delete a single service principal in the Databricks workspace.
+
+:param id: str
+  Unique ID for a service principal in the Databricks workspace.
+
+
+
 
     .. py:method:: get(id: str) -> ServicePrincipal
 
@@ -92,14 +92,14 @@
             w.service_principals.delete(id=created.id)
 
         Get service principal details.
-        
-        Gets the details for a single service principal define in the Databricks workspace.
-        
-        :param id: str
-          Unique ID for a service principal in the Databricks workspace.
-        
-        :returns: :class:`ServicePrincipal`
-        
+
+Gets the details for a single service principal define in the Databricks workspace.
+
+:param id: str
+  Unique ID for a service principal in the Databricks workspace.
+
+:returns: :class:`ServicePrincipal`
+
 
     .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[ServicePrincipal]
 
@@ -116,31 +116,31 @@
             all = w.service_principals.list(iam.ListServicePrincipalsRequest())
 
         List service principals.
-        
-        Gets the set of service principals associated with a Databricks workspace.
-        
-        :param attributes: str (optional)
-          Comma-separated list of attributes to return in response.
-        :param count: int (optional)
-          Desired number of results per page.
-        :param excluded_attributes: str (optional)
-          Comma-separated list of attributes to exclude in response.
-        :param filter: str (optional)
-          Query by which the results have to be filtered. Supported operators are equals(`eq`),
-          contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
-          formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
-          only support simple expressions.
-          
-          [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
-        :param sort_by: str (optional)
-          Attribute to sort the results.
-        :param sort_order: :class:`ListSortOrder` (optional)
-          The order to sort the results.
-        :param start_index: int (optional)
-          Specifies the index of the first result. First item is number 1.
-        
-        :returns: Iterator over :class:`ServicePrincipal`
-        
+
+Gets the set of service principals associated with a Databricks workspace.
+
+:param attributes: str (optional)
+  Comma-separated list of attributes to return in response.
+:param count: int (optional)
+  Desired number of results per page.
+:param excluded_attributes: str (optional)
+  Comma-separated list of attributes to exclude in response.
+:param filter: str (optional)
+  Query by which the results have to be filtered. Supported operators are equals(`eq`),
+  contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
+  formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
+  only support simple expressions.
+  
+  [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
+:param sort_by: str (optional)
+  Attribute to sort the results.
+:param sort_order: :class:`ListSortOrder` (optional)
+  The order to sort the results.
+:param start_index: int (optional)
+  Specifies the index of the first result. First item is number 1.
+
+:returns: Iterator over :class:`ServicePrincipal`
+
 
     .. py:method:: patch(id: str [, operations: Optional[List[Patch]], schemas: Optional[List[PatchSchema]]])
 
@@ -168,17 +168,17 @@
             w.service_principals.delete(id=created.id)
 
         Update service principal details.
-        
-        Partially updates the details of a single service principal in the Databricks workspace.
-        
-        :param id: str
-          Unique ID for a service principal in the Databricks workspace.
-        :param operations: List[:class:`Patch`] (optional)
-        :param schemas: List[:class:`PatchSchema`] (optional)
-          The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
-        
-        
-        
+
+Partially updates the details of a single service principal in the Databricks workspace.
+
+:param id: str
+  Unique ID for a service principal in the Databricks workspace.
+:param operations: List[:class:`Patch`] (optional)
+:param schemas: List[:class:`PatchSchema`] (optional)
+  The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
+
+
+
 
     .. py:method:: update(id: str [, active: Optional[bool], application_id: Optional[str], display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], roles: Optional[List[ComplexValue]], schemas: Optional[List[ServicePrincipalSchema]]])
 
@@ -204,30 +204,29 @@
             w.service_principals.delete(id=created.id)
 
         Replace service principal.
-        
-        Updates the details of a single service principal.
-        
-        This action replaces the existing service principal with the same name.
-        
-        :param id: str
-          Databricks service principal ID.
-        :param active: bool (optional)
-          If this user is active
-        :param application_id: str (optional)
-          UUID relating to the service principal
-        :param display_name: str (optional)
-          String that represents a concatenation of given and family names.
-        :param entitlements: List[:class:`ComplexValue`] (optional)
-          Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
-          supported values.
-          
-          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-        :param external_id: str (optional)
-        :param groups: List[:class:`ComplexValue`] (optional)
-        :param roles: List[:class:`ComplexValue`] (optional)
-          Corresponds to AWS instance profile/arn role.
-        :param schemas: List[:class:`ServicePrincipalSchema`] (optional)
-          The schema of the List response.
-        
-        
-        
\ No newline at end of file
+
+Updates the details of a single service principal.
+
+This action replaces the existing service principal with the same name.
+
+:param id: str
+  Databricks service principal ID.
+:param active: bool (optional)
+  If this user is active
+:param application_id: str (optional)
+  UUID relating to the service principal
+:param display_name: str (optional)
+  String that represents a concatenation of given and family names.
+:param entitlements: List[:class:`ComplexValue`] (optional)
+  Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
+  supported values.
+  
+  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+:param external_id: str (optional)
+:param groups: List[:class:`ComplexValue`] (optional)
+:param roles: List[:class:`ComplexValue`] (optional)
+  Corresponds to AWS instance profile/arn role.
+:param schemas: List[:class:`ServicePrincipalSchema`] (optional)
+  The schema of the List response.
+
+
diff --git a/docs/workspace/iam/users.rst b/docs/workspace/iam/users.rst
index 616ef7b86..e7c16c191 100644
--- a/docs/workspace/iam/users.rst
+++ b/docs/workspace/iam/users.rst
@@ -5,14 +5,14 @@
 .. py:class:: UsersAPI
 
     User identities recognized by Databricks and represented by email addresses.
-    
-    Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity
-    provider to your Databricks workspace. SCIM streamlines onboarding a new employee or team by using your
-    identity provider to create users and groups in Databricks workspace and give them the proper level of
-    access. When a user leaves your organization or no longer needs access to Databricks workspace, admins can
-    terminate the user in your identity provider and that user’s account will also be removed from
-    Databricks workspace. This ensures a consistent offboarding process and prevents unauthorized users from
-    accessing sensitive data.
+
+Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity
+provider to your Databricks workspace. SCIM streamlines onboarding a new employee or team by using your
+identity provider to create users and groups in Databricks workspace and give them the proper level of
+access. When a user leaves your organization or no longer needs access to Databricks workspace, admins can
+terminate the user in your identity provider and that user’s account will also be removed from
+Databricks workspace. This ensures a consistent offboarding process and prevents unauthorized users from
+accessing sensitive data.
 
     .. py:method:: create( [, active: Optional[bool], display_name: Optional[str], emails: Optional[List[ComplexValue]], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], id: Optional[str], name: Optional[Name], roles: Optional[List[ComplexValue]], schemas: Optional[List[UserSchema]], user_name: Optional[str]]) -> User
 
@@ -30,40 +30,40 @@
             user = w.users.create(display_name=f'sdk-{time.time_ns()}', user_name=f'sdk-{time.time_ns()}@example.com')
 
         Create a new user.
-        
-        Creates a new user in the Databricks workspace. This new user will also be added to the Databricks
-        account.
-        
-        :param active: bool (optional)
-          If this user is active
-        :param display_name: str (optional)
-          String that represents a concatenation of given and family names. For example `John Smith`. This
-          field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
-          Account SCIM APIs to update `displayName`.
-          
-          [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
-        :param emails: List[:class:`ComplexValue`] (optional)
-          All the emails associated with the Databricks user.
-        :param entitlements: List[:class:`ComplexValue`] (optional)
-          Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
-          
-          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-        :param external_id: str (optional)
-          External ID is not currently supported. It is reserved for future use.
-        :param groups: List[:class:`ComplexValue`] (optional)
-        :param id: str (optional)
-          Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
-          be ignored.
-        :param name: :class:`Name` (optional)
-        :param roles: List[:class:`ComplexValue`] (optional)
-          Corresponds to AWS instance profile/arn role.
-        :param schemas: List[:class:`UserSchema`] (optional)
-          The schema of the user.
-        :param user_name: str (optional)
-          Email address of the Databricks user.
-        
-        :returns: :class:`User`
-        
+
+Creates a new user in the Databricks workspace. This new user will also be added to the Databricks
+account.
+
+:param active: bool (optional)
+  If this user is active
+:param display_name: str (optional)
+  String that represents a concatenation of given and family names. For example `John Smith`. This
+  field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
+  Account SCIM APIs to update `displayName`.
+  
+  [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
+:param emails: List[:class:`ComplexValue`] (optional)
+  All the emails associated with the Databricks user.
+:param entitlements: List[:class:`ComplexValue`] (optional)
+  Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
+  
+  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+:param external_id: str (optional)
+  External ID is not currently supported. It is reserved for future use.
+:param groups: List[:class:`ComplexValue`] (optional)
+:param id: str (optional)
+  Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
+  be ignored.
+:param name: :class:`Name` (optional)
+:param roles: List[:class:`ComplexValue`] (optional)
+  Corresponds to AWS instance profile/arn role.
+:param schemas: List[:class:`UserSchema`] (optional)
+  The schema of the user.
+:param user_name: str (optional)
+  Email address of the Databricks user.
+
+:returns: :class:`User`
+
 
     .. py:method:: delete(id: str)
 
@@ -83,15 +83,15 @@
             w.users.delete(id=other_owner.id)
 
         Delete a user.
-        
-        Deletes a user. Deleting a user from a Databricks workspace also removes objects associated with the
-        user.
-        
-        :param id: str
-          Unique ID for a user in the Databricks workspace.
-        
-        
-        
+
+Deletes a user. Deleting a user from a Databricks workspace also removes objects associated with the
+user.
+
+:param id: str
+  Unique ID for a user in the Databricks workspace.
+
+
+
 
     .. py:method:: get(id: str [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[GetSortOrder], start_index: Optional[int]]) -> User
 
@@ -111,52 +111,52 @@
             fetch = w.users.get(id=user.id)
 
         Get user details.
-        
-        Gets information for a specific user in Databricks workspace.
-        
-        :param id: str
-          Unique ID for a user in the Databricks workspace.
-        :param attributes: str (optional)
-          Comma-separated list of attributes to return in response.
-        :param count: int (optional)
-          Desired number of results per page.
-        :param excluded_attributes: str (optional)
-          Comma-separated list of attributes to exclude in response.
-        :param filter: str (optional)
-          Query by which the results have to be filtered. Supported operators are equals(`eq`),
-          contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
-          formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
-          only support simple expressions.
-          
-          [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
-        :param sort_by: str (optional)
-          Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
-          `name.givenName`, and `emails`.
-        :param sort_order: :class:`GetSortOrder` (optional)
-          The order to sort the results.
-        :param start_index: int (optional)
-          Specifies the index of the first result. First item is number 1.
-        
-        :returns: :class:`User`
-        
+
+Gets information for a specific user in Databricks workspace.
+
+:param id: str
+  Unique ID for a user in the Databricks workspace.
+:param attributes: str (optional)
+  Comma-separated list of attributes to return in response.
+:param count: int (optional)
+  Desired number of results per page.
+:param excluded_attributes: str (optional)
+  Comma-separated list of attributes to exclude in response.
+:param filter: str (optional)
+  Query by which the results have to be filtered. Supported operators are equals(`eq`),
+  contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
+  formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
+  only support simple expressions.
+  
+  [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
+:param sort_by: str (optional)
+  Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
+  `name.givenName`, and `emails`.
+:param sort_order: :class:`GetSortOrder` (optional)
+  The order to sort the results.
+:param start_index: int (optional)
+  Specifies the index of the first result. First item is number 1.
+
+:returns: :class:`User`
+
 
     .. py:method:: get_permission_levels() -> GetPasswordPermissionLevelsResponse
 
         Get password permission levels.
-        
-        Gets the permission levels that a user can have on an object.
-        
-        :returns: :class:`GetPasswordPermissionLevelsResponse`
-        
+
+Gets the permission levels that a user can have on an object.
+
+:returns: :class:`GetPasswordPermissionLevelsResponse`
+
 
     .. py:method:: get_permissions() -> PasswordPermissions
 
         Get password permissions.
-        
-        Gets the permissions of all passwords. Passwords can inherit permissions from their root object.
-        
-        :returns: :class:`PasswordPermissions`
-        
+
+Gets the permissions of all passwords. Passwords can inherit permissions from their root object.
+
+:returns: :class:`PasswordPermissions`
+
 
     .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[User]
 
@@ -175,32 +175,32 @@
                                      sort_order=iam.ListSortOrder.DESCENDING)
 
         List users.
-        
-        Gets details for all the users associated with a Databricks workspace.
-        
-        :param attributes: str (optional)
-          Comma-separated list of attributes to return in response.
-        :param count: int (optional)
-          Desired number of results per page.
-        :param excluded_attributes: str (optional)
-          Comma-separated list of attributes to exclude in response.
-        :param filter: str (optional)
-          Query by which the results have to be filtered. Supported operators are equals(`eq`),
-          contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
-          formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
-          only support simple expressions.
-          
-          [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
-        :param sort_by: str (optional)
-          Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
-          `name.givenName`, and `emails`.
-        :param sort_order: :class:`ListSortOrder` (optional)
-          The order to sort the results.
-        :param start_index: int (optional)
-          Specifies the index of the first result. First item is number 1.
-        
-        :returns: Iterator over :class:`User`
-        
+
+Gets details for all the users associated with a Databricks workspace.
+
+:param attributes: str (optional)
+  Comma-separated list of attributes to return in response.
+:param count: int (optional)
+  Desired number of results per page.
+:param excluded_attributes: str (optional)
+  Comma-separated list of attributes to exclude in response.
+:param filter: str (optional)
+  Query by which the results have to be filtered. Supported operators are equals(`eq`),
+  contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
+  formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
+  only support simple expressions.
+  
+  [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
+:param sort_by: str (optional)
+  Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
+  `name.givenName`, and `emails`.
+:param sort_order: :class:`ListSortOrder` (optional)
+  The order to sort the results.
+:param start_index: int (optional)
+  Specifies the index of the first result. First item is number 1.
+
+:returns: Iterator over :class:`User`
+
 
     .. py:method:: patch(id: str [, operations: Optional[List[Patch]], schemas: Optional[List[PatchSchema]]])
 
@@ -223,29 +223,29 @@
                           schemas=[iam.PatchSchema.URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP])
 
         Update user details.
-        
-        Partially updates a user resource by applying the supplied operations on specific user attributes.
-        
-        :param id: str
-          Unique ID for a user in the Databricks workspace.
-        :param operations: List[:class:`Patch`] (optional)
-        :param schemas: List[:class:`PatchSchema`] (optional)
-          The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
-        
-        
-        
+
+Partially updates a user resource by applying the supplied operations on specific user attributes.
+
+:param id: str
+  Unique ID for a user in the Databricks workspace.
+:param operations: List[:class:`Patch`] (optional)
+:param schemas: List[:class:`PatchSchema`] (optional)
+  The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
+
+
+
 
     .. py:method:: set_permissions( [, access_control_list: Optional[List[PasswordAccessControlRequest]]]) -> PasswordPermissions
 
         Set password permissions.
-        
-        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-        permissions if none are specified. Objects can inherit permissions from their root object.
-        
-        :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional)
-        
-        :returns: :class:`PasswordPermissions`
-        
+
+Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+permissions if none are specified. Objects can inherit permissions from their root object.
+
+:param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional)
+
+:returns: :class:`PasswordPermissions`
+
 
     .. py:method:: update(id: str [, active: Optional[bool], display_name: Optional[str], emails: Optional[List[ComplexValue]], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], name: Optional[Name], roles: Optional[List[ComplexValue]], schemas: Optional[List[UserSchema]], user_name: Optional[str]])
 
@@ -265,47 +265,46 @@
             w.users.update(id=user.id, user_name=user.user_name, active=True)
 
         Replace a user.
-        
-        Replaces a user's information with the data supplied in request.
-        
-        :param id: str
-          Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
-          be ignored.
-        :param active: bool (optional)
-          If this user is active
-        :param display_name: str (optional)
-          String that represents a concatenation of given and family names. For example `John Smith`. This
-          field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
-          Account SCIM APIs to update `displayName`.
-          
-          [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
-        :param emails: List[:class:`ComplexValue`] (optional)
-          All the emails associated with the Databricks user.
-        :param entitlements: List[:class:`ComplexValue`] (optional)
-          Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
-          
-          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-        :param external_id: str (optional)
-          External ID is not currently supported. It is reserved for future use.
-        :param groups: List[:class:`ComplexValue`] (optional)
-        :param name: :class:`Name` (optional)
-        :param roles: List[:class:`ComplexValue`] (optional)
-          Corresponds to AWS instance profile/arn role.
-        :param schemas: List[:class:`UserSchema`] (optional)
-          The schema of the user.
-        :param user_name: str (optional)
-          Email address of the Databricks user.
-        
-        
-        
+
+Replaces a user's information with the data supplied in request.
+
+:param id: str
+  Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
+  be ignored.
+:param active: bool (optional)
+  If this user is active
+:param display_name: str (optional)
+  String that represents a concatenation of given and family names. For example `John Smith`. This
+  field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
+  Account SCIM APIs to update `displayName`.
+  
+  [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
+:param emails: List[:class:`ComplexValue`] (optional)
+  All the emails associated with the Databricks user.
+:param entitlements: List[:class:`ComplexValue`] (optional)
+  Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
+  
+  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+:param external_id: str (optional)
+  External ID is not currently supported. It is reserved for future use.
+:param groups: List[:class:`ComplexValue`] (optional)
+:param name: :class:`Name` (optional)
+:param roles: List[:class:`ComplexValue`] (optional)
+  Corresponds to AWS instance profile/arn role.
+:param schemas: List[:class:`UserSchema`] (optional)
+  The schema of the user.
+:param user_name: str (optional)
+  Email address of the Databricks user.
+
+
+
 
     .. py:method:: update_permissions( [, access_control_list: Optional[List[PasswordAccessControlRequest]]]) -> PasswordPermissions
 
         Update password permissions.
-        
-        Updates the permissions on all passwords. Passwords can inherit permissions from their root object.
-        
-        :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional)
-        
-        :returns: :class:`PasswordPermissions`
-        
\ No newline at end of file
+
+Updates the permissions on all passwords. Passwords can inherit permissions from their root object.
+
+:param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional)
+
+:returns: :class:`PasswordPermissions`
diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst
index f4857476a..f4168ed96 100644
--- a/docs/workspace/jobs/jobs.rst
+++ b/docs/workspace/jobs/jobs.rst
@@ -5,20 +5,20 @@
 .. py:class:: JobsExt
 
     The Jobs API allows you to create, edit, and delete jobs.
-    
-    You can use a Databricks job to run a data processing or data analysis task in a Databricks cluster with
-    scalable resources. Your job can consist of a single task or can be a large, multi-task workflow with
-    complex dependencies. Databricks manages the task orchestration, cluster management, monitoring, and error
-    reporting for all of your jobs. You can run your jobs immediately or periodically through an easy-to-use
-    scheduling system. You can implement job tasks using notebooks, JARS, Delta Live Tables pipelines, or
-    Python, Scala, Spark submit, and Java applications.
-    
-    You should never hard code secrets or store them in plain text. Use the [Secrets CLI] to manage secrets in
-    the [Databricks CLI]. Use the [Secrets utility] to reference secrets in notebooks and jobs.
-    
-    [Databricks CLI]: https://docs.databricks.com/dev-tools/cli/index.html
-    [Secrets CLI]: https://docs.databricks.com/dev-tools/cli/secrets-cli.html
-    [Secrets utility]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets
+
+You can use a Databricks job to run a data processing or data analysis task in a Databricks cluster with
+scalable resources. Your job can consist of a single task or can be a large, multi-task workflow with
+complex dependencies. Databricks manages the task orchestration, cluster management, monitoring, and error
+reporting for all of your jobs. You can run your jobs immediately or periodically through an easy-to-use
+scheduling system. You can implement job tasks using notebooks, JARS, Delta Live Tables pipelines, or
+Python, Scala, Spark submit, and Java applications.
+
+You should never hard code secrets or store them in plain text. Use the [Secrets CLI] to manage secrets in
+the [Databricks CLI]. Use the [Secrets utility] to reference secrets in notebooks and jobs.
+
+[Databricks CLI]: https://docs.databricks.com/dev-tools/cli/index.html
+[Secrets CLI]: https://docs.databricks.com/dev-tools/cli/secrets-cli.html
+[Secrets utility]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets
 
     .. py:method:: cancel_all_runs( [, all_queued_runs: Optional[bool], job_id: Optional[int]])
 
@@ -55,18 +55,18 @@
             w.jobs.delete(job_id=created_job.job_id)
 
         Cancel all runs of a job.
-        
-        Cancels all active runs of a job. The runs are canceled asynchronously, so it doesn't prevent new runs
-        from being started.
-        
-        :param all_queued_runs: bool (optional)
-          Optional boolean parameter to cancel all queued runs. If no job_id is provided, all queued runs in
-          the workspace are canceled.
-        :param job_id: int (optional)
-          The canonical identifier of the job to cancel all runs of.
-        
-        
-        
+
+Cancels all active runs of a job. The runs are canceled asynchronously, so it doesn't prevent new runs
+from being started.
+
+:param all_queued_runs: bool (optional)
+  Optional boolean parameter to cancel all queued runs. If no job_id is provided, all queued runs in
+  the workspace are canceled.
+:param job_id: int (optional)
+  The canonical identifier of the job to cancel all runs of.
+
+
+
 
     .. py:method:: cancel_run(run_id: int) -> Wait[Run]
 
@@ -105,17 +105,17 @@
             w.jobs.delete(job_id=created_job.job_id)
 
         Cancel a run.
-        
-        Cancels a job run or a task run. The run is canceled asynchronously, so it may still be running when
-        this request completes.
-        
-        :param run_id: int
-          This field is required.
-        
-        :returns:
-          Long-running operation waiter for :class:`Run`.
-          See :method:wait_get_run_job_terminated_or_skipped for more details.
-        
+
+Cancels a job run or a task run. The run is canceled asynchronously, so it may still be running when
+this request completes.
+
+:param run_id: int
+  This field is required.
+
+:returns:
+  Long-running operation waiter for :class:`Run`.
+  See :method:wait_get_run_job_terminated_or_skipped for more details.
+
 
     .. py:method:: cancel_run_and_wait(run_id: int, timeout: datetime.timedelta = 0:20:00) -> Run
 
@@ -153,125 +153,125 @@
             w.jobs.delete(job_id=created_job.job_id)
 
         Create a new job.
-        
-        Create a new job.
-        
-        :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
-          List of permissions to set on the job.
-        :param budget_policy_id: str (optional)
-          The id of the user specified budget policy to use for this job. If not specified, a default budget
-          policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for the
-          budget policy used by this workload.
-        :param continuous: :class:`Continuous` (optional)
-          An optional continuous property for this job. The continuous property will ensure that there is
-          always one run executing. Only one of `schedule` and `continuous` can be used.
-        :param deployment: :class:`JobDeployment` (optional)
-          Deployment information for jobs managed by external sources.
-        :param description: str (optional)
-          An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.
-        :param edit_mode: :class:`JobEditMode` (optional)
-          Edit mode of the job.
-          
-          * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is in
-          an editable state and can be modified.
-        :param email_notifications: :class:`JobEmailNotifications` (optional)
-          An optional set of email addresses that is notified when runs of this job begin or complete as well
-          as when this job is deleted.
-        :param environments: List[:class:`JobEnvironment`] (optional)
-          A list of task execution environment specifications that can be referenced by serverless tasks of
-          this job. An environment is required to be present for serverless tasks. For serverless notebook
-          tasks, the environment is accessible in the notebook environment panel. For other serverless tasks,
-          the task environment is required to be specified using environment_key in the task settings.
-        :param format: :class:`Format` (optional)
-          Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. When
-          using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`.
-        :param git_source: :class:`GitSource` (optional)
-          An optional specification for a remote Git repository containing the source code used by tasks.
-          Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
-          
-          If `git_source` is set, these tasks retrieve the file from the remote repository by default.
-          However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.
-          
-          Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are
-          used, `git_source` must be defined on the job.
-        :param health: :class:`JobsHealthRules` (optional)
-          An optional set of health rules that can be defined for this job.
-        :param job_clusters: List[:class:`JobCluster`] (optional)
-          A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries
-          cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.
-          If more than 100 job clusters are available, you can paginate through them using :method:jobs/get.
-        :param max_concurrent_runs: int (optional)
-          An optional maximum allowed number of concurrent runs of the job. Set this value if you want to be
-          able to execute multiple runs of the same job concurrently. This is useful for example if you
-          trigger your job on a frequent schedule and want to allow consecutive runs to overlap with each
-          other, or if you want to trigger multiple runs which differ by their input parameters. This setting
-          affects only new runs. For example, suppose the job’s concurrency is 4 and there are 4 concurrent
-          active runs. Then setting the concurrency to 3 won’t kill any of the active runs. However, from
-          then on, new runs are skipped unless there are fewer than 3 active runs. This value cannot exceed
-          1000. Setting this value to `0` causes all new runs to be skipped.
-        :param name: str (optional)
-          An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding.
-        :param notification_settings: :class:`JobNotificationSettings` (optional)
-          Optional notification settings that are used when sending notifications to each of the
-          `email_notifications` and `webhook_notifications` for this job.
-        :param parameters: List[:class:`JobParameterDefinition`] (optional)
-          Job-level parameter definitions
-        :param performance_target: :class:`PerformanceTarget` (optional)
-          PerformanceTarget defines how performant or cost efficient the execution of run on serverless should
-          be.
-        :param queue: :class:`QueueSettings` (optional)
-          The queue settings of the job.
-        :param run_as: :class:`JobRunAs` (optional)
-          Write-only setting. Specifies the user or service principal that the job runs as. If not specified,
-          the job runs as the user who created the job.
-          
-          Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.
-        :param schedule: :class:`CronSchedule` (optional)
-          An optional periodic schedule for this job. The default behavior is that the job only runs when
-          triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.
-        :param tags: Dict[str,str] (optional)
-          A map of tags associated with the job. These are forwarded to the cluster as cluster tags for jobs
-          clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added
-          to the job.
-        :param tasks: List[:class:`Task`] (optional)
-          A list of task specifications to be executed by this job. If more than 100 tasks are available, you
-          can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root
-          to determine if more results are available.
-        :param timeout_seconds: int (optional)
-          An optional timeout applied to each run of this job. A value of `0` means no timeout.
-        :param trigger: :class:`TriggerSettings` (optional)
-          A configuration to trigger a run when certain conditions are met. The default behavior is that the
-          job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to
-          `runNow`.
-        :param webhook_notifications: :class:`WebhookNotifications` (optional)
-          A collection of system notification IDs to notify when runs of this job begin or complete.
-        
-        :returns: :class:`CreateResponse`
-        
+
+Create a new job.
+
+:param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
+  List of permissions to set on the job.
+:param budget_policy_id: str (optional)
+  The id of the user specified budget policy to use for this job. If not specified, a default budget
+  policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for the
+  budget policy used by this workload.
+:param continuous: :class:`Continuous` (optional)
+  An optional continuous property for this job. The continuous property will ensure that there is
+  always one run executing. Only one of `schedule` and `continuous` can be used.
+:param deployment: :class:`JobDeployment` (optional)
+  Deployment information for jobs managed by external sources.
+:param description: str (optional)
+  An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.
+:param edit_mode: :class:`JobEditMode` (optional)
+  Edit mode of the job.
+  
+  * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is in
+  an editable state and can be modified.
+:param email_notifications: :class:`JobEmailNotifications` (optional)
+  An optional set of email addresses that is notified when runs of this job begin or complete as well
+  as when this job is deleted.
+:param environments: List[:class:`JobEnvironment`] (optional)
+  A list of task execution environment specifications that can be referenced by serverless tasks of
+  this job. An environment is required to be present for serverless tasks. For serverless notebook
+  tasks, the environment is accessible in the notebook environment panel. For other serverless tasks,
+  the task environment is required to be specified using environment_key in the task settings.
+:param format: :class:`Format` (optional)
+  Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. When
+  using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`.
+:param git_source: :class:`GitSource` (optional)
+  An optional specification for a remote Git repository containing the source code used by tasks.
+  Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
+  
+  If `git_source` is set, these tasks retrieve the file from the remote repository by default.
+  However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.
+  
+  Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are
+  used, `git_source` must be defined on the job.
+:param health: :class:`JobsHealthRules` (optional)
+  An optional set of health rules that can be defined for this job.
+:param job_clusters: List[:class:`JobCluster`] (optional)
+  A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries
+  cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.
+  If more than 100 job clusters are available, you can paginate through them using :method:jobs/get.
+:param max_concurrent_runs: int (optional)
+  An optional maximum allowed number of concurrent runs of the job. Set this value if you want to be
+  able to execute multiple runs of the same job concurrently. This is useful for example if you
+  trigger your job on a frequent schedule and want to allow consecutive runs to overlap with each
+  other, or if you want to trigger multiple runs which differ by their input parameters. This setting
+  affects only new runs. For example, suppose the job’s concurrency is 4 and there are 4 concurrent
+  active runs. Then setting the concurrency to 3 won’t kill any of the active runs. However, from
+  then on, new runs are skipped unless there are fewer than 3 active runs. This value cannot exceed
+  1000. Setting this value to `0` causes all new runs to be skipped.
+:param name: str (optional)
+  An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding.
+:param notification_settings: :class:`JobNotificationSettings` (optional)
+  Optional notification settings that are used when sending notifications to each of the
+  `email_notifications` and `webhook_notifications` for this job.
+:param parameters: List[:class:`JobParameterDefinition`] (optional)
+  Job-level parameter definitions
+:param performance_target: :class:`PerformanceTarget` (optional)
+  PerformanceTarget defines how performant or cost efficient the execution of run on serverless should
+  be.
+:param queue: :class:`QueueSettings` (optional)
+  The queue settings of the job.
+:param run_as: :class:`JobRunAs` (optional)
+  Write-only setting. Specifies the user or service principal that the job runs as. If not specified,
+  the job runs as the user who created the job.
+  
+  Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.
+:param schedule: :class:`CronSchedule` (optional)
+  An optional periodic schedule for this job. The default behavior is that the job only runs when
+  triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.
+:param tags: Dict[str,str] (optional)
+  A map of tags associated with the job. These are forwarded to the cluster as cluster tags for jobs
+  clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added
+  to the job.
+:param tasks: List[:class:`Task`] (optional)
+  A list of task specifications to be executed by this job. If more than 100 tasks are available, you
+  can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root
+  to determine if more results are available.
+:param timeout_seconds: int (optional)
+  An optional timeout applied to each run of this job. A value of `0` means no timeout.
+:param trigger: :class:`TriggerSettings` (optional)
+  A configuration to trigger a run when certain conditions are met. The default behavior is that the
+  job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to
+  `runNow`.
+:param webhook_notifications: :class:`WebhookNotifications` (optional)
+  A collection of system notification IDs to notify when runs of this job begin or complete.
+
+:returns: :class:`CreateResponse`
+
 
     .. py:method:: delete(job_id: int)
 
         Delete a job.
-        
-        Deletes a job.
-        
-        :param job_id: int
-          The canonical identifier of the job to delete. This field is required.
-        
-        
-        
+
+Deletes a job.
+
+:param job_id: int
+  The canonical identifier of the job to delete. This field is required.
+
+
+
 
     .. py:method:: delete_run(run_id: int)
 
         Delete a job run.
-        
-        Deletes a non-active run. Returns an error if the run is active.
-        
-        :param run_id: int
-          ID of the run to delete.
-        
-        
-        
+
+Deletes a non-active run. Returns an error if the run is active.
+
+:param run_id: int
+  ID of the run to delete.
+
+
+
 
     .. py:method:: export_run(run_id: int [, views_to_export: Optional[ViewsToExport]]) -> ExportRunOutput
 
@@ -310,16 +310,16 @@
             w.jobs.delete(job_id=created_job.job_id)
 
         Export and retrieve a job run.
-        
-        Export and retrieve the job run task.
-        
-        :param run_id: int
-          The canonical identifier for the run. This field is required.
-        :param views_to_export: :class:`ViewsToExport` (optional)
-          Which views to export (CODE, DASHBOARDS, or ALL). Defaults to CODE.
-        
-        :returns: :class:`ExportRunOutput`
-        
+
+Export and retrieve the job run task.
+
+:param run_id: int
+  The canonical identifier for the run. This field is required.
+:param views_to_export: :class:`ViewsToExport` (optional)
+  Which views to export (CODE, DASHBOARDS, or ALL). Defaults to CODE.
+
+:returns: :class:`ExportRunOutput`
+
 
     .. py:method:: get(job_id: int [, page_token: Optional[str]]) -> Job
 
@@ -354,46 +354,46 @@
             w.jobs.delete_run(run_id=run.run_id)
 
         Get a single job.
-        
-        Retrieves the details for a single job.
-        
-        In Jobs API 2.2, requests for a single job support pagination of `tasks` and `job_clusters` when
-        either exceeds 100 elements. Use the `next_page_token` field to check for more results and pass its
-        value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements in a page will
-        be empty on later pages.
-        
-        :param job_id: int
-          The canonical identifier of the job to retrieve information about. This field is required.
-        :param page_token: str (optional)
-          Use `next_page_token` returned from the previous GetJob to request the next page of the job's
-          sub-resources.
-        
-        :returns: :class:`Job`
-        
+
+Retrieves the details for a single job.
+
+In Jobs API 2.2, requests for a single job support pagination of `tasks` and `job_clusters` when
+either exceeds 100 elements. Use the `next_page_token` field to check for more results and pass its
+value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements in a page will
+be empty on later pages.
+
+:param job_id: int
+  The canonical identifier of the job to retrieve information about. This field is required.
+:param page_token: str (optional)
+  Use `next_page_token` returned from the previous GetJob to request the next page of the job's
+  sub-resources.
+
+:returns: :class:`Job`
+
 
     .. py:method:: get_permission_levels(job_id: str) -> GetJobPermissionLevelsResponse
 
         Get job permission levels.
-        
-        Gets the permission levels that a user can have on an object.
-        
-        :param job_id: str
-          The job for which to get or manage permissions.
-        
-        :returns: :class:`GetJobPermissionLevelsResponse`
-        
+
+Gets the permission levels that a user can have on an object.
+
+:param job_id: str
+  The job for which to get or manage permissions.
+
+:returns: :class:`GetJobPermissionLevelsResponse`
+
 
     .. py:method:: get_permissions(job_id: str) -> JobPermissions
 
         Get job permissions.
-        
-        Gets the permissions of a job. Jobs can inherit permissions from their root object.
-        
-        :param job_id: str
-          The job for which to get or manage permissions.
-        
-        :returns: :class:`JobPermissions`
-        
+
+Gets the permissions of a job. Jobs can inherit permissions from their root object.
+
+:param job_id: str
+  The job for which to get or manage permissions.
+
+:returns: :class:`JobPermissions`
+
 
     .. py:method:: get_run(run_id: int [, include_history: bool, include_resolved_values: bool, page_token: str]) -> Run
 
@@ -428,19 +428,19 @@
             w.jobs.delete_run(run_id=run.run_id)
 
         
-        This method fetches the details of a run identified by `run_id`. If the run has multiple pages of tasks or iterations,
-        it will paginate through all pages and aggregate the results.
-        :param run_id: int
-          The canonical identifier of the run for which to retrieve the metadata. This field is required.
-        :param include_history: bool (optional)
-          Whether to include the repair history in the response.
-        :param include_resolved_values: bool (optional)
-          Whether to include resolved parameter values in the response.
-        :param page_token: str (optional)
-          To list the next page or the previous page of job tasks, set this field to the value of the
-          `next_page_token` or `prev_page_token` returned in the GetJob response.
-        :returns: :class:`Run`
-        
+This method fetches the details of a run identified by `run_id`. If the run has multiple pages of tasks or iterations,
+it will paginate through all pages and aggregate the results.
+:param run_id: int
+  The canonical identifier of the run for which to retrieve the metadata. This field is required.
+:param include_history: bool (optional)
+  Whether to include the repair history in the response.
+:param include_resolved_values: bool (optional)
+  Whether to include resolved parameter values in the response.
+:param page_token: str (optional)
+  To list the next page or the previous page of job tasks, set this field to the value of the
+  `next_page_token` or `prev_page_token` returned in the GetJob response.
+:returns: :class:`Run`
+
 
     .. py:method:: get_run_output(run_id: int) -> RunOutput
 
@@ -475,21 +475,21 @@
             w.jobs.delete_run(run_id=run.run_id)
 
         Get the output for a single run.
-        
-        Retrieve the output and metadata of a single task run. When a notebook task returns a value through
-        the `dbutils.notebook.exit()` call, you can use this endpoint to retrieve that value. Databricks
-        restricts this API to returning the first 5 MB of the output. To return a larger result, you can store
-        job results in a cloud storage service.
-        
-        This endpoint validates that the __run_id__ parameter is valid and returns an HTTP status code 400 if
-        the __run_id__ parameter is invalid. Runs are automatically removed after 60 days. If you to want to
-        reference them beyond 60 days, you must save old run results before they expire.
-        
-        :param run_id: int
-          The canonical identifier for the run.
-        
-        :returns: :class:`RunOutput`
-        
+
+Retrieve the output and metadata of a single task run. When a notebook task returns a value through
+the `dbutils.notebook.exit()` call, you can use this endpoint to retrieve that value. Databricks
+restricts this API to returning the first 5 MB of the output. To return a larger result, you can store
+job results in a cloud storage service.
+
+This endpoint validates that the __run_id__ parameter is valid and returns an HTTP status code 400 if
+the __run_id__ parameter is invalid. Runs are automatically removed after 60 days. If you to want to
+reference them beyond 60 days, you must save old run results before they expire.
+
+:param run_id: int
+  The canonical identifier for the run.
+
+:returns: :class:`RunOutput`
+
 
     .. py:method:: list( [, expand_tasks: Optional[bool], limit: Optional[int], name: Optional[str], offset: Optional[int], page_token: Optional[str]]) -> Iterator[BaseJob]
 
@@ -526,26 +526,26 @@
             w.jobs.delete(job_id=created_job.job_id)
 
         List jobs.
-        
-        Retrieves a list of jobs.
-        
-        :param expand_tasks: bool (optional)
-          Whether to include task and cluster details in the response. Note that in API 2.2, only the first
-          100 elements will be shown. Use :method:jobs/get to paginate through all tasks and clusters.
-        :param limit: int (optional)
-          The number of jobs to return. This value must be greater than 0 and less or equal to 100. The
-          default value is 20.
-        :param name: str (optional)
-          A filter on the list based on the exact (case insensitive) job name.
-        :param offset: int (optional)
-          The offset of the first job to return, relative to the most recently created job. Deprecated since
-          June 2023. Use `page_token` to iterate through the pages instead.
-        :param page_token: str (optional)
-          Use `next_page_token` or `prev_page_token` returned from the previous request to list the next or
-          previous page of jobs respectively.
-        
-        :returns: Iterator over :class:`BaseJob`
-        
+
+Retrieves a list of jobs.
+
+:param expand_tasks: bool (optional)
+  Whether to include task and cluster details in the response. Note that in API 2.2, only the first
+  100 elements will be shown. Use :method:jobs/get to paginate through all tasks and clusters.
+:param limit: int (optional)
+  The number of jobs to return. This value must be greater than 0 and less or equal to 100. The
+  default value is 20.
+:param name: str (optional)
+  A filter on the list based on the exact (case insensitive) job name.
+:param offset: int (optional)
+  The offset of the first job to return, relative to the most recently created job. Deprecated since
+  June 2023. Use `page_token` to iterate through the pages instead.
+:param page_token: str (optional)
+  Use `next_page_token` or `prev_page_token` returned from the previous request to list the next or
+  previous page of jobs respectively.
+
+:returns: Iterator over :class:`BaseJob`
+
 
     .. py:method:: list_runs( [, active_only: Optional[bool], completed_only: Optional[bool], expand_tasks: Optional[bool], job_id: Optional[int], limit: Optional[int], offset: Optional[int], page_token: Optional[str], run_type: Optional[RunType], start_time_from: Optional[int], start_time_to: Optional[int]]) -> Iterator[BaseRun]
 
@@ -582,41 +582,41 @@
             w.jobs.delete(job_id=created_job.job_id)
 
         List job runs.
-        
-        List runs in descending order by start time.
-        
-        :param active_only: bool (optional)
-          If active_only is `true`, only active runs are included in the results; otherwise, lists both active
-          and completed runs. An active run is a run in the `QUEUED`, `PENDING`, `RUNNING`, or `TERMINATING`.
-          This field cannot be `true` when completed_only is `true`.
-        :param completed_only: bool (optional)
-          If completed_only is `true`, only completed runs are included in the results; otherwise, lists both
-          active and completed runs. This field cannot be `true` when active_only is `true`.
-        :param expand_tasks: bool (optional)
-          Whether to include task and cluster details in the response. Note that in API 2.2, only the first
-          100 elements will be shown. Use :method:jobs/getrun to paginate through all tasks and clusters.
-        :param job_id: int (optional)
-          The job for which to list runs. If omitted, the Jobs service lists runs from all jobs.
-        :param limit: int (optional)
-          The number of runs to return. This value must be greater than 0 and less than 25. The default value
-          is 20. If a request specifies a limit of 0, the service instead uses the maximum limit.
-        :param offset: int (optional)
-          The offset of the first run to return, relative to the most recent run. Deprecated since June 2023.
-          Use `page_token` to iterate through the pages instead.
-        :param page_token: str (optional)
-          Use `next_page_token` or `prev_page_token` returned from the previous request to list the next or
-          previous page of runs respectively.
-        :param run_type: :class:`RunType` (optional)
-          The type of runs to return. For a description of run types, see :method:jobs/getRun.
-        :param start_time_from: int (optional)
-          Show runs that started _at or after_ this value. The value must be a UTC timestamp in milliseconds.
-          Can be combined with _start_time_to_ to filter by a time range.
-        :param start_time_to: int (optional)
-          Show runs that started _at or before_ this value. The value must be a UTC timestamp in milliseconds.
-          Can be combined with _start_time_from_ to filter by a time range.
-        
-        :returns: Iterator over :class:`BaseRun`
-        
+
+List runs in descending order by start time.
+
+:param active_only: bool (optional)
+  If active_only is `true`, only active runs are included in the results; otherwise, lists both active
+  and completed runs. An active run is a run in the `QUEUED`, `PENDING`, `RUNNING`, or `TERMINATING`.
+  This field cannot be `true` when completed_only is `true`.
+:param completed_only: bool (optional)
+  If completed_only is `true`, only completed runs are included in the results; otherwise, lists both
+  active and completed runs. This field cannot be `true` when active_only is `true`.
+:param expand_tasks: bool (optional)
+  Whether to include task and cluster details in the response. Note that in API 2.2, only the first
+  100 elements will be shown. Use :method:jobs/getrun to paginate through all tasks and clusters.
+:param job_id: int (optional)
+  The job for which to list runs. If omitted, the Jobs service lists runs from all jobs.
+:param limit: int (optional)
+  The number of runs to return. This value must be greater than 0 and less than 25. The default value
+  is 20. If a request specifies a limit of 0, the service instead uses the maximum limit.
+:param offset: int (optional)
+  The offset of the first run to return, relative to the most recent run. Deprecated since June 2023.
+  Use `page_token` to iterate through the pages instead.
+:param page_token: str (optional)
+  Use `next_page_token` or `prev_page_token` returned from the previous request to list the next or
+  previous page of runs respectively.
+:param run_type: :class:`RunType` (optional)
+  The type of runs to return. For a description of run types, see :method:jobs/getRun.
+:param start_time_from: int (optional)
+  Show runs that started _at or after_ this value. The value must be a UTC timestamp in milliseconds.
+  Can be combined with _start_time_to_ to filter by a time range.
+:param start_time_to: int (optional)
+  Show runs that started _at or before_ this value. The value must be a UTC timestamp in milliseconds.
+  Can be combined with _start_time_from_ to filter by a time range.
+
+:returns: Iterator over :class:`BaseRun`
+
 
     .. py:method:: repair_run(run_id: int [, dbt_commands: Optional[List[str]], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], latest_repair_id: Optional[int], notebook_params: Optional[Dict[str, str]], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], rerun_all_failed_tasks: Optional[bool], rerun_dependent_tasks: Optional[bool], rerun_tasks: Optional[List[str]], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]]]) -> Wait[Run]
 
@@ -658,95 +658,95 @@
             w.jobs.delete(job_id=created_job.job_id)
 
         Repair a job run.
-        
-        Re-run one or more tasks. Tasks are re-run as part of the original job run. They use the current job
-        and task settings, and can be viewed in the history for the original job run.
-        
-        :param run_id: int
-          The job run ID of the run to repair. The run must not be in progress.
-        :param dbt_commands: List[str] (optional)
-          An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
-          deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`
-        :param jar_params: List[str] (optional)
-          A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`.
-          The parameters are used to invoke the main function of the main class specified in the Spark JAR
-          task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot be specified
-          in conjunction with notebook_params. The JSON representation of this field (for example
-          `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
-          
-          Use [Task parameter variables] to set parameters containing information about job runs.
-          
-          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
-        :param job_parameters: Dict[str,str] (optional)
-          Job-level parameters used in the run. for example `"param": "overriding_val"`
-        :param latest_repair_id: int (optional)
-          The ID of the latest repair. This parameter is not required when repairing a run for the first time,
-          but must be provided on subsequent requests to repair the same run.
-        :param notebook_params: Dict[str,str] (optional)
-          A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
-          "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the
-          [dbutils.widgets.get] function.
-          
-          If not specified upon `run-now`, the triggered run uses the job’s base parameters.
-          
-          notebook_params cannot be specified in conjunction with jar_params.
-          
-          Use [Task parameter variables] to set parameters containing information about job runs.
-          
-          The JSON representation of this field (for example `{"notebook_params":{"name":"john
-          doe","age":"35"}}`) cannot exceed 10,000 bytes.
-          
-          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
-          [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
-        :param pipeline_params: :class:`PipelineParams` (optional)
-          Controls whether the pipeline should perform a full refresh
-        :param python_named_params: Dict[str,str] (optional)
-        :param python_params: List[str] (optional)
-          A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
-          The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it
-          would overwrite the parameters specified in job setting. The JSON representation of this field (for
-          example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
-          
-          Use [Task parameter variables] to set parameters containing information about job runs.
-          
-          Important
-          
-          These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
-          returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
-          emojis.
-          
-          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
-        :param rerun_all_failed_tasks: bool (optional)
-          If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be used.
-        :param rerun_dependent_tasks: bool (optional)
-          If true, repair all tasks that depend on the tasks in `rerun_tasks`, even if they were previously
-          successful. Can be also used in combination with `rerun_all_failed_tasks`.
-        :param rerun_tasks: List[str] (optional)
-          The task keys of the task runs to repair.
-        :param spark_submit_params: List[str] (optional)
-          A list of parameters for jobs with spark submit task, for example `"spark_submit_params":
-          ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit script
-          as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified
-          in job setting. The JSON representation of this field (for example `{"python_params":["john
-          doe","35"]}`) cannot exceed 10,000 bytes.
-          
-          Use [Task parameter variables] to set parameters containing information about job runs
-          
-          Important
-          
-          These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
-          returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
-          emojis.
-          
-          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
-        :param sql_params: Dict[str,str] (optional)
-          A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe",
-          "age": "35"}`. The SQL alert task does not support custom parameters.
-        
-        :returns:
-          Long-running operation waiter for :class:`Run`.
-          See :method:wait_get_run_job_terminated_or_skipped for more details.
-        
+
+Re-run one or more tasks. Tasks are re-run as part of the original job run. They use the current job
+and task settings, and can be viewed in the history for the original job run.
+
+:param run_id: int
+  The job run ID of the run to repair. The run must not be in progress.
+:param dbt_commands: List[str] (optional)
+  An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
+  deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`
+:param jar_params: List[str] (optional)
+  A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`.
+  The parameters are used to invoke the main function of the main class specified in the Spark JAR
+  task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot be specified
+  in conjunction with notebook_params. The JSON representation of this field (for example
+  `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
+  
+  Use [Task parameter variables] to set parameters containing information about job runs.
+  
+  [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
+:param job_parameters: Dict[str,str] (optional)
+  Job-level parameters used in the run. for example `"param": "overriding_val"`
+:param latest_repair_id: int (optional)
+  The ID of the latest repair. This parameter is not required when repairing a run for the first time,
+  but must be provided on subsequent requests to repair the same run.
+:param notebook_params: Dict[str,str] (optional)
+  A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
+  "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the
+  [dbutils.widgets.get] function.
+  
+  If not specified upon `run-now`, the triggered run uses the job’s base parameters.
+  
+  notebook_params cannot be specified in conjunction with jar_params.
+  
+  Use [Task parameter variables] to set parameters containing information about job runs.
+  
+  The JSON representation of this field (for example `{"notebook_params":{"name":"john
+  doe","age":"35"}}`) cannot exceed 10,000 bytes.
+  
+  [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
+  [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
+:param pipeline_params: :class:`PipelineParams` (optional)
+  Controls whether the pipeline should perform a full refresh
+:param python_named_params: Dict[str,str] (optional)
+:param python_params: List[str] (optional)
+  A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
+  The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it
+  would overwrite the parameters specified in job setting. The JSON representation of this field (for
+  example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
+  
+  Use [Task parameter variables] to set parameters containing information about job runs.
+  
+  Important
+  
+  These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
+  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
+  emojis.
+  
+  [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
+:param rerun_all_failed_tasks: bool (optional)
+  If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be used.
+:param rerun_dependent_tasks: bool (optional)
+  If true, repair all tasks that depend on the tasks in `rerun_tasks`, even if they were previously
+  successful. Can be also used in combination with `rerun_all_failed_tasks`.
+:param rerun_tasks: List[str] (optional)
+  The task keys of the task runs to repair.
+:param spark_submit_params: List[str] (optional)
+  A list of parameters for jobs with spark submit task, for example `"spark_submit_params":
+  ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit script
+  as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified
+  in job setting. The JSON representation of this field (for example `{"python_params":["john
+  doe","35"]}`) cannot exceed 10,000 bytes.
+  
+  Use [Task parameter variables] to set parameters containing information about job runs
+  
+  Important
+  
+  These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
+  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
+  emojis.
+  
+  [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
+:param sql_params: Dict[str,str] (optional)
+  A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe",
+  "age": "35"}`. The SQL alert task does not support custom parameters.
+
+:returns:
+  Long-running operation waiter for :class:`Run`.
+  See :method:wait_get_run_job_terminated_or_skipped for more details.
+
 
     .. py:method:: repair_run_and_wait(run_id: int [, dbt_commands: Optional[List[str]], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], latest_repair_id: Optional[int], notebook_params: Optional[Dict[str, str]], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], rerun_all_failed_tasks: Optional[bool], rerun_dependent_tasks: Optional[bool], rerun_tasks: Optional[List[str]], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]], timeout: datetime.timedelta = 0:20:00]) -> Run
 
@@ -790,20 +790,20 @@
             w.jobs.delete(job_id=created_job.job_id)
 
         Update all job settings (reset).
-        
-        Overwrite all settings for the given job. Use the [_Update_ endpoint](:method:jobs/update) to update
-        job settings partially.
-        
-        :param job_id: int
-          The canonical identifier of the job to reset. This field is required.
-        :param new_settings: :class:`JobSettings`
-          The new settings of the job. These settings completely replace the old settings.
-          
-          Changes to the field `JobBaseSettings.timeout_seconds` are applied to active runs. Changes to other
-          fields are applied to future runs only.
-        
-        
-        
+
+Overwrite all settings for the given job. Use the [_Update_ endpoint](:method:jobs/update) to update
+job settings partially.
+
+:param job_id: int
+  The canonical identifier of the job to reset. This field is required.
+:param new_settings: :class:`JobSettings`
+  The new settings of the job. These settings completely replace the old settings.
+  
+  Changes to the field `JobBaseSettings.timeout_seconds` are applied to active runs. Changes to other
+  fields are applied to future runs only.
+
+
+
 
     .. py:method:: run_now(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], only: Optional[List[str]], performance_target: Optional[PerformanceTarget], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]]]) -> Wait[Run]
 
@@ -840,106 +840,106 @@
             w.jobs.delete(job_id=created_job.job_id)
 
         Trigger a new job run.
-        
-        Run a job and return the `run_id` of the triggered run.
-        
-        :param job_id: int
-          The ID of the job to be executed
-        :param dbt_commands: List[str] (optional)
-          An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
-          deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`
-        :param idempotency_token: str (optional)
-          An optional token to guarantee the idempotency of job run requests. If a run with the provided token
-          already exists, the request does not create a new run but returns the ID of the existing run
-          instead. If a run with the provided token is deleted, an error is returned.
-          
-          If you specify the idempotency token, upon failure you can retry until the request succeeds.
-          Databricks guarantees that exactly one run is launched with that idempotency token.
-          
-          This token must have at most 64 characters.
-          
-          For more information, see [How to ensure idempotency for jobs].
-          
-          [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html
-        :param jar_params: List[str] (optional)
-          A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`.
-          The parameters are used to invoke the main function of the main class specified in the Spark JAR
-          task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot be specified
-          in conjunction with notebook_params. The JSON representation of this field (for example
-          `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
-          
-          Use [Task parameter variables] to set parameters containing information about job runs.
-          
-          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
-        :param job_parameters: Dict[str,str] (optional)
-          Job-level parameters used in the run. for example `"param": "overriding_val"`
-        :param notebook_params: Dict[str,str] (optional)
-          A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
-          "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the
-          [dbutils.widgets.get] function.
-          
-          If not specified upon `run-now`, the triggered run uses the job’s base parameters.
-          
-          notebook_params cannot be specified in conjunction with jar_params.
-          
-          Use [Task parameter variables] to set parameters containing information about job runs.
-          
-          The JSON representation of this field (for example `{"notebook_params":{"name":"john
-          doe","age":"35"}}`) cannot exceed 10,000 bytes.
-          
-          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
-          [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
-        :param only: List[str] (optional)
-          A list of task keys to run inside of the job. If this field is not provided, all tasks in the job
-          will be run.
-        :param performance_target: :class:`PerformanceTarget` (optional)
-          PerformanceTarget defines how performant or cost efficient the execution of run on serverless
-          compute should be. For RunNow request, the run will execute with this settings instead of ones
-          defined in job.
-        :param pipeline_params: :class:`PipelineParams` (optional)
-          Controls whether the pipeline should perform a full refresh
-        :param python_named_params: Dict[str,str] (optional)
-        :param python_params: List[str] (optional)
-          A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
-          The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it
-          would overwrite the parameters specified in job setting. The JSON representation of this field (for
-          example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
-          
-          Use [Task parameter variables] to set parameters containing information about job runs.
-          
-          Important
-          
-          These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
-          returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
-          emojis.
-          
-          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
-        :param queue: :class:`QueueSettings` (optional)
-          The queue settings of the run.
-        :param spark_submit_params: List[str] (optional)
-          A list of parameters for jobs with spark submit task, for example `"spark_submit_params":
-          ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit script
-          as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified
-          in job setting. The JSON representation of this field (for example `{"python_params":["john
-          doe","35"]}`) cannot exceed 10,000 bytes.
-          
-          Use [Task parameter variables] to set parameters containing information about job runs
-          
-          Important
-          
-          These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
-          returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
-          emojis.
-          
-          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
-        :param sql_params: Dict[str,str] (optional)
-          A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe",
-          "age": "35"}`. The SQL alert task does not support custom parameters.
-        
-        :returns:
-          Long-running operation waiter for :class:`Run`.
-          See :method:wait_get_run_job_terminated_or_skipped for more details.
-        
+
+Run a job and return the `run_id` of the triggered run.
+
+:param job_id: int
+  The ID of the job to be executed
+:param dbt_commands: List[str] (optional)
+  An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
+  deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`
+:param idempotency_token: str (optional)
+  An optional token to guarantee the idempotency of job run requests. If a run with the provided token
+  already exists, the request does not create a new run but returns the ID of the existing run
+  instead. If a run with the provided token is deleted, an error is returned.
+  
+  If you specify the idempotency token, upon failure you can retry until the request succeeds.
+  Databricks guarantees that exactly one run is launched with that idempotency token.
+  
+  This token must have at most 64 characters.
+  
+  For more information, see [How to ensure idempotency for jobs].
+  
+  [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html
+:param jar_params: List[str] (optional)
+  A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`.
+  The parameters are used to invoke the main function of the main class specified in the Spark JAR
+  task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot be specified
+  in conjunction with notebook_params. The JSON representation of this field (for example
+  `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
+  
+  Use [Task parameter variables] to set parameters containing information about job runs.
+  
+  [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
+:param job_parameters: Dict[str,str] (optional)
+  Job-level parameters used in the run. for example `"param": "overriding_val"`
+:param notebook_params: Dict[str,str] (optional)
+  A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
+  "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the
+  [dbutils.widgets.get] function.
+  
+  If not specified upon `run-now`, the triggered run uses the job’s base parameters.
+  
+  notebook_params cannot be specified in conjunction with jar_params.
+  
+  Use [Task parameter variables] to set parameters containing information about job runs.
+  
+  The JSON representation of this field (for example `{"notebook_params":{"name":"john
+  doe","age":"35"}}`) cannot exceed 10,000 bytes.
+  
+  [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
+  [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
+:param only: List[str] (optional)
+  A list of task keys to run inside of the job. If this field is not provided, all tasks in the job
+  will be run.
+:param performance_target: :class:`PerformanceTarget` (optional)
+  PerformanceTarget defines how performant or cost efficient the execution of run on serverless
+  compute should be. For RunNow request, the run will execute with this settings instead of ones
+  defined in job.
+:param pipeline_params: :class:`PipelineParams` (optional)
+  Controls whether the pipeline should perform a full refresh
+:param python_named_params: Dict[str,str] (optional)
+:param python_params: List[str] (optional)
+  A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
+  The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it
+  would overwrite the parameters specified in job setting. The JSON representation of this field (for
+  example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
+  
+  Use [Task parameter variables] to set parameters containing information about job runs.
+  
+  Important
+  
+  These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
+  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
+  emojis.
+  
+  [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
+:param queue: :class:`QueueSettings` (optional)
+  The queue settings of the run.
+:param spark_submit_params: List[str] (optional)
+  A list of parameters for jobs with spark submit task, for example `"spark_submit_params":
+  ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit script
+  as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified
+  in job setting. The JSON representation of this field (for example `{"python_params":["john
+  doe","35"]}`) cannot exceed 10,000 bytes.
+  
+  Use [Task parameter variables] to set parameters containing information about job runs
+  
+  Important
+  
+  These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
+  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
+  emojis.
+  
+  [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
+:param sql_params: Dict[str,str] (optional)
+  A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe",
+  "age": "35"}`. The SQL alert task does not support custom parameters.
+
+:returns:
+  Long-running operation waiter for :class:`Run`.
+  See :method:wait_get_run_job_terminated_or_skipped for more details.
+
 
     .. py:method:: run_now_and_wait(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], only: Optional[List[str]], performance_target: Optional[PerformanceTarget], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]], timeout: datetime.timedelta = 0:20:00]) -> Run
 
@@ -947,16 +947,16 @@
     .. py:method:: set_permissions(job_id: str [, access_control_list: Optional[List[JobAccessControlRequest]]]) -> JobPermissions
 
         Set job permissions.
-        
-        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-        permissions if none are specified. Objects can inherit permissions from their root object.
-        
-        :param job_id: str
-          The job for which to get or manage permissions.
-        :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
-        
-        :returns: :class:`JobPermissions`
-        
+
+Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+permissions if none are specified. Objects can inherit permissions from their root object.
+
+:param job_id: str
+  The job for which to get or manage permissions.
+:param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
+
+:returns: :class:`JobPermissions`
+
 
     .. py:method:: submit( [, access_control_list: Optional[List[JobAccessControlRequest]], budget_policy_id: Optional[str], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications]]) -> Wait[Run]
 
@@ -989,64 +989,64 @@
             w.jobs.delete_run(run_id=run.run_id)
 
         Create and trigger a one-time run.
-        
-        Submit a one-time run. This endpoint allows you to submit a workload directly without creating a job.
-        Runs submitted using this endpoint don’t display in the UI. Use the `jobs/runs/get` API to check the
-        run state after the job is submitted.
-        
-        :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
-          List of permissions to set on the job.
-        :param budget_policy_id: str (optional)
-          The user specified id of the budget policy to use for this one-time run. If not specified, the run
-          will be not be attributed to any budget policy.
-        :param email_notifications: :class:`JobEmailNotifications` (optional)
-          An optional set of email addresses notified when the run begins or completes.
-        :param environments: List[:class:`JobEnvironment`] (optional)
-          A list of task execution environment specifications that can be referenced by tasks of this run.
-        :param git_source: :class:`GitSource` (optional)
-          An optional specification for a remote Git repository containing the source code used by tasks.
-          Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
-          
-          If `git_source` is set, these tasks retrieve the file from the remote repository by default.
-          However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.
-          
-          Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are
-          used, `git_source` must be defined on the job.
-        :param health: :class:`JobsHealthRules` (optional)
-          An optional set of health rules that can be defined for this job.
-        :param idempotency_token: str (optional)
-          An optional token that can be used to guarantee the idempotency of job run requests. If a run with
-          the provided token already exists, the request does not create a new run but returns the ID of the
-          existing run instead. If a run with the provided token is deleted, an error is returned.
-          
-          If you specify the idempotency token, upon failure you can retry until the request succeeds.
-          Databricks guarantees that exactly one run is launched with that idempotency token.
-          
-          This token must have at most 64 characters.
-          
-          For more information, see [How to ensure idempotency for jobs].
-          
-          [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html
-        :param notification_settings: :class:`JobNotificationSettings` (optional)
-          Optional notification settings that are used when sending notifications to each of the
-          `email_notifications` and `webhook_notifications` for this run.
-        :param queue: :class:`QueueSettings` (optional)
-          The queue settings of the one-time run.
-        :param run_as: :class:`JobRunAs` (optional)
-          Specifies the user or service principal that the job runs as. If not specified, the job runs as the
-          user who submits the request.
-        :param run_name: str (optional)
-          An optional name for the run. The default value is `Untitled`.
-        :param tasks: List[:class:`SubmitTask`] (optional)
-        :param timeout_seconds: int (optional)
-          An optional timeout applied to each run of this job. A value of `0` means no timeout.
-        :param webhook_notifications: :class:`WebhookNotifications` (optional)
-          A collection of system notification IDs to notify when the run begins or completes.
-        
-        :returns:
-          Long-running operation waiter for :class:`Run`.
-          See :method:wait_get_run_job_terminated_or_skipped for more details.
-        
+
+Submit a one-time run. This endpoint allows you to submit a workload directly without creating a job.
+Runs submitted using this endpoint don’t display in the UI. Use the `jobs/runs/get` API to check the
+run state after the job is submitted.
+
+:param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
+  List of permissions to set on the job.
+:param budget_policy_id: str (optional)
+  The user specified id of the budget policy to use for this one-time run. If not specified, the run
+  will be not be attributed to any budget policy.
+:param email_notifications: :class:`JobEmailNotifications` (optional)
+  An optional set of email addresses notified when the run begins or completes.
+:param environments: List[:class:`JobEnvironment`] (optional)
+  A list of task execution environment specifications that can be referenced by tasks of this run.
+:param git_source: :class:`GitSource` (optional)
+  An optional specification for a remote Git repository containing the source code used by tasks.
+  Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
+  
+  If `git_source` is set, these tasks retrieve the file from the remote repository by default.
+  However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.
+  
+  Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are
+  used, `git_source` must be defined on the job.
+:param health: :class:`JobsHealthRules` (optional)
+  An optional set of health rules that can be defined for this job.
+:param idempotency_token: str (optional)
+  An optional token that can be used to guarantee the idempotency of job run requests. If a run with
+  the provided token already exists, the request does not create a new run but returns the ID of the
+  existing run instead. If a run with the provided token is deleted, an error is returned.
+  
+  If you specify the idempotency token, upon failure you can retry until the request succeeds.
+  Databricks guarantees that exactly one run is launched with that idempotency token.
+  
+  This token must have at most 64 characters.
+  
+  For more information, see [How to ensure idempotency for jobs].
+  
+  [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html
+:param notification_settings: :class:`JobNotificationSettings` (optional)
+  Optional notification settings that are used when sending notifications to each of the
+  `email_notifications` and `webhook_notifications` for this run.
+:param queue: :class:`QueueSettings` (optional)
+  The queue settings of the one-time run.
+:param run_as: :class:`JobRunAs` (optional)
+  Specifies the user or service principal that the job runs as. If not specified, the job runs as the
+  user who submits the request.
+:param run_name: str (optional)
+  An optional name for the run. The default value is `Untitled`.
+:param tasks: List[:class:`SubmitTask`] (optional)
+:param timeout_seconds: int (optional)
+  An optional timeout applied to each run of this job. A value of `0` means no timeout.
+:param webhook_notifications: :class:`WebhookNotifications` (optional)
+  A collection of system notification IDs to notify when the run begins or completes.
+
+:returns:
+  Long-running operation waiter for :class:`Run`.
+  See :method:wait_get_run_job_terminated_or_skipped for more details.
+
 
     .. py:method:: submit_and_wait( [, access_control_list: Optional[List[JobAccessControlRequest]], budget_policy_id: Optional[str], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications], timeout: datetime.timedelta = 0:20:00]) -> Run
 
@@ -1088,41 +1088,41 @@
             w.jobs.delete(job_id=created_job.job_id)
 
         Update job settings partially.
-        
-        Add, update, or remove specific settings of an existing job. Use the [_Reset_
-        endpoint](:method:jobs/reset) to overwrite all job settings.
-        
-        :param job_id: int
-          The canonical identifier of the job to update. This field is required.
-        :param fields_to_remove: List[str] (optional)
-          Remove top-level fields in the job settings. Removing nested fields is not supported, except for
-          tasks and job clusters (`tasks/task_1`). This field is optional.
-        :param new_settings: :class:`JobSettings` (optional)
-          The new settings for the job.
-          
-          Top-level fields specified in `new_settings` are completely replaced, except for arrays which are
-          merged. That is, new and existing entries are completely replaced based on the respective key
-          fields, i.e. `task_key` or `job_cluster_key`, while previous entries are kept.
-          
-          Partially updating nested fields is not supported.
-          
-          Changes to the field `JobSettings.timeout_seconds` are applied to active runs. Changes to other
-          fields are applied to future runs only.
-        
-        
-        
+
+Add, update, or remove specific settings of an existing job. Use the [_Reset_
+endpoint](:method:jobs/reset) to overwrite all job settings.
+
+:param job_id: int
+  The canonical identifier of the job to update. This field is required.
+:param fields_to_remove: List[str] (optional)
+  Remove top-level fields in the job settings. Removing nested fields is not supported, except for
+  tasks and job clusters (`tasks/task_1`). This field is optional.
+:param new_settings: :class:`JobSettings` (optional)
+  The new settings for the job.
+  
+  Top-level fields specified in `new_settings` are completely replaced, except for arrays which are
+  merged. That is, new and existing entries are completely replaced based on the respective key
+  fields, i.e. `task_key` or `job_cluster_key`, while previous entries are kept.
+  
+  Partially updating nested fields is not supported.
+  
+  Changes to the field `JobSettings.timeout_seconds` are applied to active runs. Changes to other
+  fields are applied to future runs only.
+
+
+
 
     .. py:method:: update_permissions(job_id: str [, access_control_list: Optional[List[JobAccessControlRequest]]]) -> JobPermissions
 
         Update job permissions.
-        
-        Updates the permissions on a job. Jobs can inherit permissions from their root object.
-        
-        :param job_id: str
-          The job for which to get or manage permissions.
-        :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
-        
-        :returns: :class:`JobPermissions`
-        
+
+Updates the permissions on a job. Jobs can inherit permissions from their root object.
+
+:param job_id: str
+  The job for which to get or manage permissions.
+:param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
+
+:returns: :class:`JobPermissions`
+
 
     .. py:method:: wait_get_run_job_terminated_or_skipped(run_id: int, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[Run], None]]) -> Run
diff --git a/docs/workspace/jobs/policy_compliance_for_jobs.rst b/docs/workspace/jobs/policy_compliance_for_jobs.rst
index 69f211552..b4326e9e2 100644
--- a/docs/workspace/jobs/policy_compliance_for_jobs.rst
+++ b/docs/workspace/jobs/policy_compliance_for_jobs.rst
@@ -5,62 +5,61 @@
 .. py:class:: PolicyComplianceForJobsAPI
 
     The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace.
-    This API currently only supports compliance controls for cluster policies.
-    
-    A job is in compliance if its cluster configurations satisfy the rules of all their respective cluster
-    policies. A job could be out of compliance if a cluster policy it uses was updated after the job was last
-    edited. The job is considered out of compliance if any of its clusters no longer comply with their updated
-    policies.
-    
-    The get and list compliance APIs allow you to view the policy compliance status of a job. The enforce
-    compliance API allows you to update a job so that it becomes compliant with all of its policies.
+This API currently only supports compliance controls for cluster policies.
+
+A job is in compliance if its cluster configurations satisfy the rules of all their respective cluster
+policies. A job could be out of compliance if a cluster policy it uses was updated after the job was last
+edited. The job is considered out of compliance if any of its clusters no longer comply with their updated
+policies.
+
+The get and list compliance APIs allow you to view the policy compliance status of a job. The enforce
+compliance API allows you to update a job so that it becomes compliant with all of its policies.
 
     .. py:method:: enforce_compliance(job_id: int [, validate_only: Optional[bool]]) -> EnforcePolicyComplianceResponse
 
         Enforce job policy compliance.
-        
-        Updates a job so the job clusters that are created when running the job (specified in `new_cluster`)
-        are compliant with the current versions of their respective cluster policies. All-purpose clusters
-        used in the job will not be updated.
-        
-        :param job_id: int
-          The ID of the job you want to enforce policy compliance on.
-        :param validate_only: bool (optional)
-          If set, previews changes made to the job to comply with its policy, but does not update the job.
-        
-        :returns: :class:`EnforcePolicyComplianceResponse`
-        
+
+Updates a job so the job clusters that are created when running the job (specified in `new_cluster`)
+are compliant with the current versions of their respective cluster policies. All-purpose clusters
+used in the job will not be updated.
+
+:param job_id: int
+  The ID of the job you want to enforce policy compliance on.
+:param validate_only: bool (optional)
+  If set, previews changes made to the job to comply with its policy, but does not update the job.
+
+:returns: :class:`EnforcePolicyComplianceResponse`
+
 
     .. py:method:: get_compliance(job_id: int) -> GetPolicyComplianceResponse
 
         Get job policy compliance.
-        
-        Returns the policy compliance status of a job. Jobs could be out of compliance if a cluster policy
-        they use was updated after the job was last edited and some of its job clusters no longer comply with
-        their updated policies.
-        
-        :param job_id: int
-          The ID of the job whose compliance status you are requesting.
-        
-        :returns: :class:`GetPolicyComplianceResponse`
-        
+
+Returns the policy compliance status of a job. Jobs could be out of compliance if a cluster policy
+they use was updated after the job was last edited and some of its job clusters no longer comply with
+their updated policies.
+
+:param job_id: int
+  The ID of the job whose compliance status you are requesting.
+
+:returns: :class:`GetPolicyComplianceResponse`
+
 
     .. py:method:: list_compliance(policy_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[JobCompliance]
 
         List job policy compliance.
-        
-        Returns the policy compliance status of all jobs that use a given policy. Jobs could be out of
-        compliance if a cluster policy they use was updated after the job was last edited and its job clusters
-        no longer comply with the updated policy.
-        
-        :param policy_id: str
-          Canonical unique identifier for the cluster policy.
-        :param page_size: int (optional)
-          Use this field to specify the maximum number of results to be returned by the server. The server may
-          further constrain the maximum number of results returned in a single page.
-        :param page_token: str (optional)
-          A page token that can be used to navigate to the next page or previous page as returned by
-          `next_page_token` or `prev_page_token`.
-        
-        :returns: Iterator over :class:`JobCompliance`
-        
\ No newline at end of file
+
+Returns the policy compliance status of all jobs that use a given policy. Jobs could be out of
+compliance if a cluster policy they use was updated after the job was last edited and its job clusters
+no longer comply with the updated policy.
+
+:param policy_id: str
+  Canonical unique identifier for the cluster policy.
+:param page_size: int (optional)
+  Use this field to specify the maximum number of results to be returned by the server. The server may
+  further constrain the maximum number of results returned in a single page.
+:param page_token: str (optional)
+  A page token that can be used to navigate to the next page or previous page as returned by
+  `next_page_token` or `prev_page_token`.
+
+:returns: Iterator over :class:`JobCompliance`
diff --git a/docs/workspace/marketplace/consumer_fulfillments.rst b/docs/workspace/marketplace/consumer_fulfillments.rst
index 4ea7a9c29..5833c3fac 100644
--- a/docs/workspace/marketplace/consumer_fulfillments.rst
+++ b/docs/workspace/marketplace/consumer_fulfillments.rst
@@ -9,28 +9,27 @@
     .. py:method:: get(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[SharedDataObject]
 
         Get listing content metadata.
-        
-        Get a high level preview of the metadata of listing installable content.
-        
-        :param listing_id: str
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`SharedDataObject`
-        
+
+Get a high level preview of the metadata of listing installable content.
+
+:param listing_id: str
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`SharedDataObject`
+
 
     .. py:method:: list(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListingFulfillment]
 
         List all listing fulfillments.
-        
-        Get all listings fulfillments associated with a listing. A _fulfillment_ is a potential installation.
-        Standard installations contain metadata about the attached share or git repo. Only one of these fields
-        will be present. Personalized installations contain metadata about the attached share or git repo, as
-        well as the Delta Sharing recipient type.
-        
-        :param listing_id: str
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`ListingFulfillment`
-        
\ No newline at end of file
+
+Get all listings fulfillments associated with a listing. A _fulfillment_ is a potential installation.
+Standard installations contain metadata about the attached share or git repo. Only one of these fields
+will be present. Personalized installations contain metadata about the attached share or git repo, as
+well as the Delta Sharing recipient type.
+
+:param listing_id: str
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`ListingFulfillment`
diff --git a/docs/workspace/marketplace/consumer_installations.rst b/docs/workspace/marketplace/consumer_installations.rst
index 3cdb00a5a..363d90655 100644
--- a/docs/workspace/marketplace/consumer_installations.rst
+++ b/docs/workspace/marketplace/consumer_installations.rst
@@ -9,70 +9,69 @@
     .. py:method:: create(listing_id: str [, accepted_consumer_terms: Optional[ConsumerTerms], catalog_name: Optional[str], recipient_type: Optional[DeltaSharingRecipientType], repo_detail: Optional[RepoInstallation], share_name: Optional[str]]) -> Installation
 
         Install from a listing.
-        
-        Install payload associated with a Databricks Marketplace listing.
-        
-        :param listing_id: str
-        :param accepted_consumer_terms: :class:`ConsumerTerms` (optional)
-        :param catalog_name: str (optional)
-        :param recipient_type: :class:`DeltaSharingRecipientType` (optional)
-        :param repo_detail: :class:`RepoInstallation` (optional)
-          for git repo installations
-        :param share_name: str (optional)
-        
-        :returns: :class:`Installation`
-        
+
+Install payload associated with a Databricks Marketplace listing.
+
+:param listing_id: str
+:param accepted_consumer_terms: :class:`ConsumerTerms` (optional)
+:param catalog_name: str (optional)
+:param recipient_type: :class:`DeltaSharingRecipientType` (optional)
+:param repo_detail: :class:`RepoInstallation` (optional)
+  for git repo installations
+:param share_name: str (optional)
+
+:returns: :class:`Installation`
+
 
     .. py:method:: delete(listing_id: str, installation_id: str)
 
         Uninstall from a listing.
-        
-        Uninstall an installation associated with a Databricks Marketplace listing.
-        
-        :param listing_id: str
-        :param installation_id: str
-        
-        
-        
+
+Uninstall an installation associated with a Databricks Marketplace listing.
+
+:param listing_id: str
+:param installation_id: str
+
+
+
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[InstallationDetail]
 
         List all installations.
-        
-        List all installations across all listings.
-        
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`InstallationDetail`
-        
+
+List all installations across all listings.
+
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`InstallationDetail`
+
 
     .. py:method:: list_listing_installations(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[InstallationDetail]
 
         List installations for a listing.
-        
-        List all installations for a particular listing.
-        
-        :param listing_id: str
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`InstallationDetail`
-        
+
+List all installations for a particular listing.
+
+:param listing_id: str
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`InstallationDetail`
+
 
     .. py:method:: update(listing_id: str, installation_id: str, installation: InstallationDetail [, rotate_token: Optional[bool]]) -> UpdateInstallationResponse
 
         Update an installation.
-        
-        This is a update API that will update the part of the fields defined in the installation table as well
-        as interact with external services according to the fields not included in the installation table 1.
-        the token will be rotate if the rotateToken flag is true 2. the token will be forcibly rotate if the
-        rotateToken flag is true and the tokenInfo field is empty
-        
-        :param listing_id: str
-        :param installation_id: str
-        :param installation: :class:`InstallationDetail`
-        :param rotate_token: bool (optional)
-        
-        :returns: :class:`UpdateInstallationResponse`
-        
\ No newline at end of file
+
+This is a update API that will update the part of the fields defined in the installation table as well
+as interact with external services according to the fields not included in the installation table 1.
+the token will be rotate if the rotateToken flag is true 2. the token will be forcibly rotate if the
+rotateToken flag is true and the tokenInfo field is empty
+
+:param listing_id: str
+:param installation_id: str
+:param installation: :class:`InstallationDetail`
+:param rotate_token: bool (optional)
+
+:returns: :class:`UpdateInstallationResponse`
diff --git a/docs/workspace/marketplace/consumer_listings.rst b/docs/workspace/marketplace/consumer_listings.rst
index 242a8fce7..9dff6a54e 100644
--- a/docs/workspace/marketplace/consumer_listings.rst
+++ b/docs/workspace/marketplace/consumer_listings.rst
@@ -5,75 +5,74 @@
 .. py:class:: ConsumerListingsAPI
 
     Listings are the core entities in the Marketplace. They represent the products that are available for
-    consumption.
+consumption.
 
     .. py:method:: batch_get( [, ids: Optional[List[str]]]) -> BatchGetListingsResponse
 
         Get one batch of listings. One may specify up to 50 IDs per request.
-        
-        Batch get a published listing in the Databricks Marketplace that the consumer has access to.
-        
-        :param ids: List[str] (optional)
-        
-        :returns: :class:`BatchGetListingsResponse`
-        
+
+Batch get a published listing in the Databricks Marketplace that the consumer has access to.
+
+:param ids: List[str] (optional)
+
+:returns: :class:`BatchGetListingsResponse`
+
 
     .. py:method:: get(id: str) -> GetListingResponse
 
         Get listing.
-        
-        Get a published listing in the Databricks Marketplace that the consumer has access to.
-        
-        :param id: str
-        
-        :returns: :class:`GetListingResponse`
-        
+
+Get a published listing in the Databricks Marketplace that the consumer has access to.
+
+:param id: str
+
+:returns: :class:`GetListingResponse`
+
 
     .. py:method:: list( [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_free: Optional[bool], is_private_exchange: Optional[bool], is_staff_pick: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]], tags: Optional[List[ListingTag]]]) -> Iterator[Listing]
 
         List listings.
-        
-        List all published listings in the Databricks Marketplace that the consumer has access to.
-        
-        :param assets: List[:class:`AssetType`] (optional)
-          Matches any of the following asset types
-        :param categories: List[:class:`Category`] (optional)
-          Matches any of the following categories
-        :param is_free: bool (optional)
-          Filters each listing based on if it is free.
-        :param is_private_exchange: bool (optional)
-          Filters each listing based on if it is a private exchange.
-        :param is_staff_pick: bool (optional)
-          Filters each listing based on whether it is a staff pick.
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        :param provider_ids: List[str] (optional)
-          Matches any of the following provider ids
-        :param tags: List[:class:`ListingTag`] (optional)
-          Matches any of the following tags
-        
-        :returns: Iterator over :class:`Listing`
-        
+
+List all published listings in the Databricks Marketplace that the consumer has access to.
+
+:param assets: List[:class:`AssetType`] (optional)
+  Matches any of the following asset types
+:param categories: List[:class:`Category`] (optional)
+  Matches any of the following categories
+:param is_free: bool (optional)
+  Filters each listing based on if it is free.
+:param is_private_exchange: bool (optional)
+  Filters each listing based on if it is a private exchange.
+:param is_staff_pick: bool (optional)
+  Filters each listing based on whether it is a staff pick.
+:param page_size: int (optional)
+:param page_token: str (optional)
+:param provider_ids: List[str] (optional)
+  Matches any of the following provider ids
+:param tags: List[:class:`ListingTag`] (optional)
+  Matches any of the following tags
+
+:returns: Iterator over :class:`Listing`
+
 
     .. py:method:: search(query: str [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_free: Optional[bool], is_private_exchange: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]]]) -> Iterator[Listing]
 
         Search listings.
-        
-        Search published listings in the Databricks Marketplace that the consumer has access to. This query
-        supports a variety of different search parameters and performs fuzzy matching.
-        
-        :param query: str
-          Fuzzy matches query
-        :param assets: List[:class:`AssetType`] (optional)
-          Matches any of the following asset types
-        :param categories: List[:class:`Category`] (optional)
-          Matches any of the following categories
-        :param is_free: bool (optional)
-        :param is_private_exchange: bool (optional)
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        :param provider_ids: List[str] (optional)
-          Matches any of the following provider ids
-        
-        :returns: Iterator over :class:`Listing`
-        
\ No newline at end of file
+
+Search published listings in the Databricks Marketplace that the consumer has access to. This query
+supports a variety of different search parameters and performs fuzzy matching.
+
+:param query: str
+  Fuzzy matches query
+:param assets: List[:class:`AssetType`] (optional)
+  Matches any of the following asset types
+:param categories: List[:class:`Category`] (optional)
+  Matches any of the following categories
+:param is_free: bool (optional)
+:param is_private_exchange: bool (optional)
+:param page_size: int (optional)
+:param page_token: str (optional)
+:param provider_ids: List[str] (optional)
+  Matches any of the following provider ids
+
+:returns: Iterator over :class:`Listing`
diff --git a/docs/workspace/marketplace/consumer_personalization_requests.rst b/docs/workspace/marketplace/consumer_personalization_requests.rst
index 63ead75d3..e732113ff 100644
--- a/docs/workspace/marketplace/consumer_personalization_requests.rst
+++ b/docs/workspace/marketplace/consumer_personalization_requests.rst
@@ -9,42 +9,41 @@
     .. py:method:: create(listing_id: str, intended_use: str, accepted_consumer_terms: ConsumerTerms [, comment: Optional[str], company: Optional[str], first_name: Optional[str], is_from_lighthouse: Optional[bool], last_name: Optional[str], recipient_type: Optional[DeltaSharingRecipientType]]) -> CreatePersonalizationRequestResponse
 
         Create a personalization request.
-        
-        Create a personalization request for a listing.
-        
-        :param listing_id: str
-        :param intended_use: str
-        :param accepted_consumer_terms: :class:`ConsumerTerms`
-        :param comment: str (optional)
-        :param company: str (optional)
-        :param first_name: str (optional)
-        :param is_from_lighthouse: bool (optional)
-        :param last_name: str (optional)
-        :param recipient_type: :class:`DeltaSharingRecipientType` (optional)
-        
-        :returns: :class:`CreatePersonalizationRequestResponse`
-        
+
+Create a personalization request for a listing.
+
+:param listing_id: str
+:param intended_use: str
+:param accepted_consumer_terms: :class:`ConsumerTerms`
+:param comment: str (optional)
+:param company: str (optional)
+:param first_name: str (optional)
+:param is_from_lighthouse: bool (optional)
+:param last_name: str (optional)
+:param recipient_type: :class:`DeltaSharingRecipientType` (optional)
+
+:returns: :class:`CreatePersonalizationRequestResponse`
+
 
     .. py:method:: get(listing_id: str) -> GetPersonalizationRequestResponse
 
         Get the personalization request for a listing.
-        
-        Get the personalization request for a listing. Each consumer can make at *most* one personalization
-        request for a listing.
-        
-        :param listing_id: str
-        
-        :returns: :class:`GetPersonalizationRequestResponse`
-        
+
+Get the personalization request for a listing. Each consumer can make at *most* one personalization
+request for a listing.
+
+:param listing_id: str
+
+:returns: :class:`GetPersonalizationRequestResponse`
+
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[PersonalizationRequest]
 
         List all personalization requests.
-        
-        List personalization requests for a consumer across all listings.
-        
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`PersonalizationRequest`
-        
\ No newline at end of file
+
+List personalization requests for a consumer across all listings.
+
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`PersonalizationRequest`
diff --git a/docs/workspace/marketplace/consumer_providers.rst b/docs/workspace/marketplace/consumer_providers.rst
index 13cca357e..f6cc1d770 100644
--- a/docs/workspace/marketplace/consumer_providers.rst
+++ b/docs/workspace/marketplace/consumer_providers.rst
@@ -9,34 +9,33 @@
     .. py:method:: batch_get( [, ids: Optional[List[str]]]) -> BatchGetProvidersResponse
 
         Get one batch of providers. One may specify up to 50 IDs per request.
-        
-        Batch get a provider in the Databricks Marketplace with at least one visible listing.
-        
-        :param ids: List[str] (optional)
-        
-        :returns: :class:`BatchGetProvidersResponse`
-        
+
+Batch get a provider in the Databricks Marketplace with at least one visible listing.
+
+:param ids: List[str] (optional)
+
+:returns: :class:`BatchGetProvidersResponse`
+
 
     .. py:method:: get(id: str) -> GetProviderResponse
 
         Get a provider.
-        
-        Get a provider in the Databricks Marketplace with at least one visible listing.
-        
-        :param id: str
-        
-        :returns: :class:`GetProviderResponse`
-        
+
+Get a provider in the Databricks Marketplace with at least one visible listing.
+
+:param id: str
+
+:returns: :class:`GetProviderResponse`
+
 
     .. py:method:: list( [, is_featured: Optional[bool], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderInfo]
 
         List providers.
-        
-        List all providers in the Databricks Marketplace with at least one visible listing.
-        
-        :param is_featured: bool (optional)
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`ProviderInfo`
-        
\ No newline at end of file
+
+List all providers in the Databricks Marketplace with at least one visible listing.
+
+:param is_featured: bool (optional)
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`ProviderInfo`
diff --git a/docs/workspace/marketplace/provider_exchange_filters.rst b/docs/workspace/marketplace/provider_exchange_filters.rst
index ceca51e63..3d3becc67 100644
--- a/docs/workspace/marketplace/provider_exchange_filters.rst
+++ b/docs/workspace/marketplace/provider_exchange_filters.rst
@@ -9,46 +9,45 @@
     .. py:method:: create(filter: ExchangeFilter) -> CreateExchangeFilterResponse
 
         Create a new exchange filter.
-        
-        Add an exchange filter.
-        
-        :param filter: :class:`ExchangeFilter`
-        
-        :returns: :class:`CreateExchangeFilterResponse`
-        
+
+Add an exchange filter.
+
+:param filter: :class:`ExchangeFilter`
+
+:returns: :class:`CreateExchangeFilterResponse`
+
 
     .. py:method:: delete(id: str)
 
         Delete an exchange filter.
-        
-        Delete an exchange filter
-        
-        :param id: str
-        
-        
-        
+
+Delete an exchange filter
+
+:param id: str
+
+
+
 
     .. py:method:: list(exchange_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ExchangeFilter]
 
         List exchange filters.
-        
-        List exchange filter
-        
-        :param exchange_id: str
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`ExchangeFilter`
-        
+
+List exchange filter
+
+:param exchange_id: str
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`ExchangeFilter`
+
 
     .. py:method:: update(id: str, filter: ExchangeFilter) -> UpdateExchangeFilterResponse
 
         Update exchange filter.
-        
-        Update an exchange filter.
-        
-        :param id: str
-        :param filter: :class:`ExchangeFilter`
-        
-        :returns: :class:`UpdateExchangeFilterResponse`
-        
\ No newline at end of file
+
+Update an exchange filter.
+
+:param id: str
+:param filter: :class:`ExchangeFilter`
+
+:returns: :class:`UpdateExchangeFilterResponse`
diff --git a/docs/workspace/marketplace/provider_exchanges.rst b/docs/workspace/marketplace/provider_exchanges.rst
index d53fd823d..6c5eda159 100644
--- a/docs/workspace/marketplace/provider_exchanges.rst
+++ b/docs/workspace/marketplace/provider_exchanges.rst
@@ -9,105 +9,104 @@
     .. py:method:: add_listing_to_exchange(listing_id: str, exchange_id: str) -> AddExchangeForListingResponse
 
         Add an exchange for listing.
-        
-        Associate an exchange with a listing
-        
-        :param listing_id: str
-        :param exchange_id: str
-        
-        :returns: :class:`AddExchangeForListingResponse`
-        
+
+Associate an exchange with a listing
+
+:param listing_id: str
+:param exchange_id: str
+
+:returns: :class:`AddExchangeForListingResponse`
+
 
     .. py:method:: create(exchange: Exchange) -> CreateExchangeResponse
 
         Create an exchange.
-        
-        Create an exchange
-        
-        :param exchange: :class:`Exchange`
-        
-        :returns: :class:`CreateExchangeResponse`
-        
+
+Create an exchange
+
+:param exchange: :class:`Exchange`
+
+:returns: :class:`CreateExchangeResponse`
+
 
     .. py:method:: delete(id: str)
 
         Delete an exchange.
-        
-        This removes a listing from marketplace.
-        
-        :param id: str
-        
-        
-        
+
+This removes a listing from marketplace.
+
+:param id: str
+
+
+
 
     .. py:method:: delete_listing_from_exchange(id: str)
 
         Remove an exchange for listing.
-        
-        Disassociate an exchange with a listing
-        
-        :param id: str
-        
-        
-        
+
+Disassociate an exchange with a listing
+
+:param id: str
+
+
+
 
     .. py:method:: get(id: str) -> GetExchangeResponse
 
         Get an exchange.
-        
-        Get an exchange.
-        
-        :param id: str
-        
-        :returns: :class:`GetExchangeResponse`
-        
+
+Get an exchange.
+
+:param id: str
+
+:returns: :class:`GetExchangeResponse`
+
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Exchange]
 
         List exchanges.
-        
-        List exchanges visible to provider
-        
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`Exchange`
-        
+
+List exchanges visible to provider
+
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`Exchange`
+
 
     .. py:method:: list_exchanges_for_listing(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ExchangeListing]
 
         List exchanges for listing.
-        
-        List exchanges associated with a listing
-        
-        :param listing_id: str
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`ExchangeListing`
-        
+
+List exchanges associated with a listing
+
+:param listing_id: str
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`ExchangeListing`
+
 
     .. py:method:: list_listings_for_exchange(exchange_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ExchangeListing]
 
         List listings for exchange.
-        
-        List listings associated with an exchange
-        
-        :param exchange_id: str
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`ExchangeListing`
-        
+
+List listings associated with an exchange
+
+:param exchange_id: str
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`ExchangeListing`
+
 
     .. py:method:: update(id: str, exchange: Exchange) -> UpdateExchangeResponse
 
         Update exchange.
-        
-        Update an exchange
-        
-        :param id: str
-        :param exchange: :class:`Exchange`
-        
-        :returns: :class:`UpdateExchangeResponse`
-        
\ No newline at end of file
+
+Update an exchange
+
+:param id: str
+:param exchange: :class:`Exchange`
+
+:returns: :class:`UpdateExchangeResponse`
diff --git a/docs/workspace/marketplace/provider_files.rst b/docs/workspace/marketplace/provider_files.rst
index f719ca65f..b71865e30 100644
--- a/docs/workspace/marketplace/provider_files.rst
+++ b/docs/workspace/marketplace/provider_files.rst
@@ -9,48 +9,47 @@
     .. py:method:: create(file_parent: FileParent, marketplace_file_type: MarketplaceFileType, mime_type: str [, display_name: Optional[str]]) -> CreateFileResponse
 
         Create a file.
-        
-        Create a file. Currently, only provider icons and attached notebooks are supported.
-        
-        :param file_parent: :class:`FileParent`
-        :param marketplace_file_type: :class:`MarketplaceFileType`
-        :param mime_type: str
-        :param display_name: str (optional)
-        
-        :returns: :class:`CreateFileResponse`
-        
+
+Create a file. Currently, only provider icons and attached notebooks are supported.
+
+:param file_parent: :class:`FileParent`
+:param marketplace_file_type: :class:`MarketplaceFileType`
+:param mime_type: str
+:param display_name: str (optional)
+
+:returns: :class:`CreateFileResponse`
+
 
     .. py:method:: delete(file_id: str)
 
         Delete a file.
-        
-        Delete a file
-        
-        :param file_id: str
-        
-        
-        
+
+Delete a file
+
+:param file_id: str
+
+
+
 
     .. py:method:: get(file_id: str) -> GetFileResponse
 
         Get a file.
-        
-        Get a file
-        
-        :param file_id: str
-        
-        :returns: :class:`GetFileResponse`
-        
+
+Get a file
+
+:param file_id: str
+
+:returns: :class:`GetFileResponse`
+
 
     .. py:method:: list(file_parent: FileParent [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FileInfo]
 
         List files.
-        
-        List files attached to a parent entity.
-        
-        :param file_parent: :class:`FileParent`
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`FileInfo`
-        
\ No newline at end of file
+
+List files attached to a parent entity.
+
+:param file_parent: :class:`FileParent`
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`FileInfo`
diff --git a/docs/workspace/marketplace/provider_listings.rst b/docs/workspace/marketplace/provider_listings.rst
index d26c5293e..7b96689a8 100644
--- a/docs/workspace/marketplace/provider_listings.rst
+++ b/docs/workspace/marketplace/provider_listings.rst
@@ -5,61 +5,60 @@
 .. py:class:: ProviderListingsAPI
 
     Listings are the core entities in the Marketplace. They represent the products that are available for
-    consumption.
+consumption.
 
     .. py:method:: create(listing: Listing) -> CreateListingResponse
 
         Create a listing.
-        
-        Create a new listing
-        
-        :param listing: :class:`Listing`
-        
-        :returns: :class:`CreateListingResponse`
-        
+
+Create a new listing
+
+:param listing: :class:`Listing`
+
+:returns: :class:`CreateListingResponse`
+
 
     .. py:method:: delete(id: str)
 
         Delete a listing.
-        
-        Delete a listing
-        
-        :param id: str
-        
-        
-        
+
+Delete a listing
+
+:param id: str
+
+
+
 
     .. py:method:: get(id: str) -> GetListingResponse
 
         Get a listing.
-        
-        Get a listing
-        
-        :param id: str
-        
-        :returns: :class:`GetListingResponse`
-        
+
+Get a listing
+
+:param id: str
+
+:returns: :class:`GetListingResponse`
+
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Listing]
 
         List listings.
-        
-        List listings owned by this provider
-        
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`Listing`
-        
+
+List listings owned by this provider
+
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`Listing`
+
 
     .. py:method:: update(id: str, listing: Listing) -> UpdateListingResponse
 
         Update listing.
-        
-        Update a listing
-        
-        :param id: str
-        :param listing: :class:`Listing`
-        
-        :returns: :class:`UpdateListingResponse`
-        
\ No newline at end of file
+
+Update a listing
+
+:param id: str
+:param listing: :class:`Listing`
+
+:returns: :class:`UpdateListingResponse`
diff --git a/docs/workspace/marketplace/provider_personalization_requests.rst b/docs/workspace/marketplace/provider_personalization_requests.rst
index 32cdbdbb3..ba896ce96 100644
--- a/docs/workspace/marketplace/provider_personalization_requests.rst
+++ b/docs/workspace/marketplace/provider_personalization_requests.rst
@@ -5,32 +5,31 @@
 .. py:class:: ProviderPersonalizationRequestsAPI
 
     Personalization requests are an alternate to instantly available listings. Control the lifecycle of
-    personalized solutions.
+personalized solutions.
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[PersonalizationRequest]
 
         All personalization requests across all listings.
-        
-        List personalization requests to this provider. This will return all personalization requests,
-        regardless of which listing they are for.
-        
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`PersonalizationRequest`
-        
+
+List personalization requests to this provider. This will return all personalization requests,
+regardless of which listing they are for.
+
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`PersonalizationRequest`
+
 
     .. py:method:: update(listing_id: str, request_id: str, status: PersonalizationRequestStatus [, reason: Optional[str], share: Optional[ShareInfo]]) -> UpdatePersonalizationRequestResponse
 
         Update personalization request status.
-        
-        Update personalization request. This method only permits updating the status of the request.
-        
-        :param listing_id: str
-        :param request_id: str
-        :param status: :class:`PersonalizationRequestStatus`
-        :param reason: str (optional)
-        :param share: :class:`ShareInfo` (optional)
-        
-        :returns: :class:`UpdatePersonalizationRequestResponse`
-        
\ No newline at end of file
+
+Update personalization request. This method only permits updating the status of the request.
+
+:param listing_id: str
+:param request_id: str
+:param status: :class:`PersonalizationRequestStatus`
+:param reason: str (optional)
+:param share: :class:`ShareInfo` (optional)
+
+:returns: :class:`UpdatePersonalizationRequestResponse`
diff --git a/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst b/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst
index cc29e089f..4ddee879a 100644
--- a/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst
+++ b/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst
@@ -9,42 +9,41 @@
     .. py:method:: create() -> ProviderAnalyticsDashboard
 
         Create provider analytics dashboard.
-        
-        Create provider analytics dashboard. Returns Marketplace specific `id`. Not to be confused with the
-        Lakeview dashboard id.
-        
-        :returns: :class:`ProviderAnalyticsDashboard`
-        
+
+Create provider analytics dashboard. Returns Marketplace specific `id`. Not to be confused with the
+Lakeview dashboard id.
+
+:returns: :class:`ProviderAnalyticsDashboard`
+
 
     .. py:method:: get() -> ListProviderAnalyticsDashboardResponse
 
         Get provider analytics dashboard.
-        
-        Get provider analytics dashboard.
-        
-        :returns: :class:`ListProviderAnalyticsDashboardResponse`
-        
+
+Get provider analytics dashboard.
+
+:returns: :class:`ListProviderAnalyticsDashboardResponse`
+
 
     .. py:method:: get_latest_version() -> GetLatestVersionProviderAnalyticsDashboardResponse
 
         Get latest version of provider analytics dashboard.
-        
-        Get latest version of provider analytics dashboard.
-        
-        :returns: :class:`GetLatestVersionProviderAnalyticsDashboardResponse`
-        
+
+Get latest version of provider analytics dashboard.
+
+:returns: :class:`GetLatestVersionProviderAnalyticsDashboardResponse`
+
 
     .. py:method:: update(id: str [, version: Optional[int]]) -> UpdateProviderAnalyticsDashboardResponse
 
         Update provider analytics dashboard.
-        
-        Update provider analytics dashboard.
-        
-        :param id: str
-          id is immutable property and can't be updated.
-        :param version: int (optional)
-          this is the version of the dashboard template we want to update our user to current expectation is
-          that it should be equal to latest version of the dashboard template
-        
-        :returns: :class:`UpdateProviderAnalyticsDashboardResponse`
-        
\ No newline at end of file
+
+Update provider analytics dashboard.
+
+:param id: str
+  id is immutable property and can't be updated.
+:param version: int (optional)
+  this is the version of the dashboard template we want to update our user to current expectation is
+  that it should be equal to latest version of the dashboard template
+
+:returns: :class:`UpdateProviderAnalyticsDashboardResponse`
diff --git a/docs/workspace/marketplace/provider_providers.rst b/docs/workspace/marketplace/provider_providers.rst
index 610c9602e..61ea4d966 100644
--- a/docs/workspace/marketplace/provider_providers.rst
+++ b/docs/workspace/marketplace/provider_providers.rst
@@ -9,56 +9,55 @@
     .. py:method:: create(provider: ProviderInfo) -> CreateProviderResponse
 
         Create a provider.
-        
-        Create a provider
-        
-        :param provider: :class:`ProviderInfo`
-        
-        :returns: :class:`CreateProviderResponse`
-        
+
+Create a provider
+
+:param provider: :class:`ProviderInfo`
+
+:returns: :class:`CreateProviderResponse`
+
 
     .. py:method:: delete(id: str)
 
         Delete provider.
-        
-        Delete provider
-        
-        :param id: str
-        
-        
-        
+
+Delete provider
+
+:param id: str
+
+
+
 
     .. py:method:: get(id: str) -> GetProviderResponse
 
         Get provider.
-        
-        Get provider profile
-        
-        :param id: str
-        
-        :returns: :class:`GetProviderResponse`
-        
+
+Get provider profile
+
+:param id: str
+
+:returns: :class:`GetProviderResponse`
+
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderInfo]
 
         List providers.
-        
-        List provider profiles for account.
-        
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`ProviderInfo`
-        
+
+List provider profiles for account.
+
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`ProviderInfo`
+
 
     .. py:method:: update(id: str, provider: ProviderInfo) -> UpdateProviderResponse
 
         Update provider.
-        
-        Update provider profile
-        
-        :param id: str
-        :param provider: :class:`ProviderInfo`
-        
-        :returns: :class:`UpdateProviderResponse`
-        
\ No newline at end of file
+
+Update provider profile
+
+:param id: str
+:param provider: :class:`ProviderInfo`
+
+:returns: :class:`UpdateProviderResponse`
diff --git a/docs/workspace/ml/experiments.rst b/docs/workspace/ml/experiments.rst
index 44ceeef8c..386395493 100644
--- a/docs/workspace/ml/experiments.rst
+++ b/docs/workspace/ml/experiments.rst
@@ -5,11 +5,11 @@
 .. py:class:: ExperimentsAPI
 
     Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment. Each
-    experiment lets you visualize, search, and compare runs, as well as download run artifacts or metadata for
-    analysis in other tools. Experiments are maintained in a Databricks hosted MLflow tracking server.
-    
-    Experiments are located in the workspace file tree. You manage experiments using the same tools you use to
-    manage other workspace objects such as folders, notebooks, and libraries.
+experiment lets you visualize, search, and compare runs, as well as download run artifacts or metadata for
+analysis in other tools. Experiments are maintained in a Databricks hosted MLflow tracking server.
+
+Experiments are located in the workspace file tree. You manage experiments using the same tools you use to
+manage other workspace objects such as folders, notebooks, and libraries.
 
     .. py:method:: create_experiment(name: str [, artifact_location: Optional[str], tags: Optional[List[ExperimentTag]]]) -> CreateExperimentResponse
 
@@ -30,26 +30,26 @@
             w.experiments.delete_experiment(experiment_id=experiment.experiment_id)
 
         Create experiment.
-        
-        Creates an experiment with a name. Returns the ID of the newly created experiment. Validates that
-        another experiment with the same name does not already exist and fails if another experiment with the
-        same name already exists.
-        
-        Throws `RESOURCE_ALREADY_EXISTS` if a experiment with the given name exists.
-        
-        :param name: str
-          Experiment name.
-        :param artifact_location: str (optional)
-          Location where all artifacts for the experiment are stored. If not provided, the remote server will
-          select an appropriate default.
-        :param tags: List[:class:`ExperimentTag`] (optional)
-          A collection of tags to set on the experiment. Maximum tag size and number of tags per request
-          depends on the storage backend. All storage backends are guaranteed to support tag keys up to 250
-          bytes in size and tag values up to 5000 bytes in size. All storage backends are also guaranteed to
-          support up to 20 tags per request.
-        
-        :returns: :class:`CreateExperimentResponse`
-        
+
+Creates an experiment with a name. Returns the ID of the newly created experiment. Validates that
+another experiment with the same name does not already exist and fails if another experiment with the
+same name already exists.
+
+Throws `RESOURCE_ALREADY_EXISTS` if a experiment with the given name exists.
+
+:param name: str
+  Experiment name.
+:param artifact_location: str (optional)
+  Location where all artifacts for the experiment are stored. If not provided, the remote server will
+  select an appropriate default.
+:param tags: List[:class:`ExperimentTag`] (optional)
+  A collection of tags to set on the experiment. Maximum tag size and number of tags per request
+  depends on the storage backend. All storage backends are guaranteed to support tag keys up to 250
+  bytes in size and tag values up to 5000 bytes in size. All storage backends are also guaranteed to
+  support up to 20 tags per request.
+
+:returns: :class:`CreateExperimentResponse`
+
 
     .. py:method:: create_run( [, experiment_id: Optional[str], start_time: Optional[int], tags: Optional[List[RunTag]], user_id: Optional[str]]) -> CreateRunResponse
 
@@ -75,101 +75,101 @@
             w.experiments.delete_run(run_id=created.run.info.run_id)
 
         Create a run.
-        
-        Creates a new run within an experiment. A run is usually a single execution of a machine learning or
-        data ETL pipeline. MLflow uses runs to track the `mlflowParam`, `mlflowMetric` and `mlflowRunTag`
-        associated with a single execution.
-        
-        :param experiment_id: str (optional)
-          ID of the associated experiment.
-        :param start_time: int (optional)
-          Unix timestamp in milliseconds of when the run started.
-        :param tags: List[:class:`RunTag`] (optional)
-          Additional metadata for run.
-        :param user_id: str (optional)
-          ID of the user executing the run. This field is deprecated as of MLflow 1.0, and will be removed in
-          a future MLflow release. Use 'mlflow.user' tag instead.
-        
-        :returns: :class:`CreateRunResponse`
-        
+
+Creates a new run within an experiment. A run is usually a single execution of a machine learning or
+data ETL pipeline. MLflow uses runs to track the `mlflowParam`, `mlflowMetric` and `mlflowRunTag`
+associated with a single execution.
+
+:param experiment_id: str (optional)
+  ID of the associated experiment.
+:param start_time: int (optional)
+  Unix timestamp in milliseconds of when the run started.
+:param tags: List[:class:`RunTag`] (optional)
+  Additional metadata for run.
+:param user_id: str (optional)
+  ID of the user executing the run. This field is deprecated as of MLflow 1.0, and will be removed in
+  a future MLflow release. Use 'mlflow.user' tag instead.
+
+:returns: :class:`CreateRunResponse`
+
 
     .. py:method:: delete_experiment(experiment_id: str)
 
         Delete an experiment.
-        
-        Marks an experiment and associated metadata, runs, metrics, params, and tags for deletion. If the
-        experiment uses FileStore, artifacts associated with experiment are also deleted.
-        
-        :param experiment_id: str
-          ID of the associated experiment.
-        
-        
-        
+
+Marks an experiment and associated metadata, runs, metrics, params, and tags for deletion. If the
+experiment uses FileStore, artifacts associated with experiment are also deleted.
+
+:param experiment_id: str
+  ID of the associated experiment.
+
+
+
 
     .. py:method:: delete_run(run_id: str)
 
         Delete a run.
-        
-        Marks a run for deletion.
-        
-        :param run_id: str
-          ID of the run to delete.
-        
-        
-        
+
+Marks a run for deletion.
+
+:param run_id: str
+  ID of the run to delete.
+
+
+
 
     .. py:method:: delete_runs(experiment_id: str, max_timestamp_millis: int [, max_runs: Optional[int]]) -> DeleteRunsResponse
 
         Delete runs by creation time.
-        
-        Bulk delete runs in an experiment that were created prior to or at the specified timestamp. Deletes at
-        most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the
-        client code snippet on https://learn.microsoft.com/en-us/azure/databricks/mlflow/runs#bulk-delete.
-        
-        :param experiment_id: str
-          The ID of the experiment containing the runs to delete.
-        :param max_timestamp_millis: int
-          The maximum creation timestamp in milliseconds since the UNIX epoch for deleting runs. Only runs
-          created prior to or at this timestamp are deleted.
-        :param max_runs: int (optional)
-          An optional positive integer indicating the maximum number of runs to delete. The maximum allowed
-          value for max_runs is 10000.
-        
-        :returns: :class:`DeleteRunsResponse`
-        
+
+Bulk delete runs in an experiment that were created prior to or at the specified timestamp. Deletes at
+most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the
+client code snippet on https://learn.microsoft.com/en-us/azure/databricks/mlflow/runs#bulk-delete.
+
+:param experiment_id: str
+  The ID of the experiment containing the runs to delete.
+:param max_timestamp_millis: int
+  The maximum creation timestamp in milliseconds since the UNIX epoch for deleting runs. Only runs
+  created prior to or at this timestamp are deleted.
+:param max_runs: int (optional)
+  An optional positive integer indicating the maximum number of runs to delete. The maximum allowed
+  value for max_runs is 10000.
+
+:returns: :class:`DeleteRunsResponse`
+
 
     .. py:method:: delete_tag(run_id: str, key: str)
 
         Delete a tag.
-        
-        Deletes a tag on a run. Tags are run metadata that can be updated during a run and after a run
-        completes.
-        
-        :param run_id: str
-          ID of the run that the tag was logged under. Must be provided.
-        :param key: str
-          Name of the tag. Maximum size is 255 bytes. Must be provided.
-        
-        
-        
+
+Deletes a tag on a run. Tags are run metadata that can be updated during a run and after a run
+completes.
+
+:param run_id: str
+  ID of the run that the tag was logged under. Must be provided.
+:param key: str
+  Name of the tag. Maximum size is 255 bytes. Must be provided.
+
+
+
 
     .. py:method:: get_by_name(experiment_name: str) -> GetExperimentResponse
 
         Get metadata.
-        
-        Gets metadata for an experiment.
-        
-        This endpoint will return deleted experiments, but prefers the active experiment if an active and
-        deleted experiment share the same name. If multiple deleted experiments share the same name, the API
-        will return one of them.
-        
-        Throws `RESOURCE_DOES_NOT_EXIST` if no experiment with the specified name exists.
-        
-        :param experiment_name: str
-          Name of the associated experiment.
-        
-        :returns: :class:`GetExperimentResponse`
-        
+
+Gets metadata for an experiment.
+
+This endpoint will return deleted experiments, but prefers the active experiment if an active and
+deleted experiment share the same name. If multiple deleted experiments share the same name, the API
+will return one of them.
+
+Throws `RESOURCE_DOES_NOT_EXIST` if no experiment with the specified name exists.
+
+:param experiment_name: str
+  Name of the associated experiment.
+
+:returns: :class:`GetExperimentResponse`
+
 
     .. py:method:: get_experiment(experiment_id: str) -> GetExperimentResponse
 
@@ -192,104 +192,104 @@
             w.experiments.delete_experiment(experiment_id=experiment.experiment_id)
 
         Get an experiment.
-        
-        Gets metadata for an experiment. This method works on deleted experiments.
-        
-        :param experiment_id: str
-          ID of the associated experiment.
-        
-        :returns: :class:`GetExperimentResponse`
-        
+
+Gets metadata for an experiment. This method works on deleted experiments.
+
+:param experiment_id: str
+  ID of the associated experiment.
+
+:returns: :class:`GetExperimentResponse`
+
 
     .. py:method:: get_history(metric_key: str [, max_results: Optional[int], page_token: Optional[str], run_id: Optional[str], run_uuid: Optional[str]]) -> Iterator[Metric]
 
         Get history of a given metric within a run.
-        
-        Gets a list of all values for the specified metric for a given run.
-        
-        :param metric_key: str
-          Name of the metric.
-        :param max_results: int (optional)
-          Maximum number of Metric records to return per paginated request. Default is set to 25,000. If set
-          higher than 25,000, a request Exception will be raised.
-        :param page_token: str (optional)
-          Token indicating the page of metric histories to fetch.
-        :param run_id: str (optional)
-          ID of the run from which to fetch metric values. Must be provided.
-        :param run_uuid: str (optional)
-          [Deprecated, use run_id instead] ID of the run from which to fetch metric values. This field will be
-          removed in a future MLflow version.
-        
-        :returns: Iterator over :class:`Metric`
-        
+
+Gets a list of all values for the specified metric for a given run.
+
+:param metric_key: str
+  Name of the metric.
+:param max_results: int (optional)
+  Maximum number of Metric records to return per paginated request. Default is set to 25,000. If set
+  higher than 25,000, a request Exception will be raised.
+:param page_token: str (optional)
+  Token indicating the page of metric histories to fetch.
+:param run_id: str (optional)
+  ID of the run from which to fetch metric values. Must be provided.
+:param run_uuid: str (optional)
+  [Deprecated, use run_id instead] ID of the run from which to fetch metric values. This field will be
+  removed in a future MLflow version.
+
+:returns: Iterator over :class:`Metric`
+
 
     .. py:method:: get_permission_levels(experiment_id: str) -> GetExperimentPermissionLevelsResponse
 
         Get experiment permission levels.
-        
-        Gets the permission levels that a user can have on an object.
-        
-        :param experiment_id: str
-          The experiment for which to get or manage permissions.
-        
-        :returns: :class:`GetExperimentPermissionLevelsResponse`
-        
+
+Gets the permission levels that a user can have on an object.
+
+:param experiment_id: str
+  The experiment for which to get or manage permissions.
+
+:returns: :class:`GetExperimentPermissionLevelsResponse`
+
 
     .. py:method:: get_permissions(experiment_id: str) -> ExperimentPermissions
 
         Get experiment permissions.
-        
-        Gets the permissions of an experiment. Experiments can inherit permissions from their root object.
-        
-        :param experiment_id: str
-          The experiment for which to get or manage permissions.
-        
-        :returns: :class:`ExperimentPermissions`
-        
+
+Gets the permissions of an experiment. Experiments can inherit permissions from their root object.
+
+:param experiment_id: str
+  The experiment for which to get or manage permissions.
+
+:returns: :class:`ExperimentPermissions`
+
 
     .. py:method:: get_run(run_id: str [, run_uuid: Optional[str]]) -> GetRunResponse
 
         Get a run.
-        
-        Gets the metadata, metrics, params, and tags for a run. In the case where multiple metrics with the
-        same key are logged for a run, return only the value with the latest timestamp.
-        
-        If there are multiple values with the latest timestamp, return the maximum of these values.
-        
-        :param run_id: str
-          ID of the run to fetch. Must be provided.
-        :param run_uuid: str (optional)
-          [Deprecated, use run_id instead] ID of the run to fetch. This field will be removed in a future
-          MLflow version.
-        
-        :returns: :class:`GetRunResponse`
-        
+
+Gets the metadata, metrics, params, and tags for a run. In the case where multiple metrics with the
+same key are logged for a run, return only the value with the latest timestamp.
+
+If there are multiple values with the latest timestamp, return the maximum of these values.
+
+:param run_id: str
+  ID of the run to fetch. Must be provided.
+:param run_uuid: str (optional)
+  [Deprecated, use run_id instead] ID of the run to fetch. This field will be removed in a future
+  MLflow version.
+
+:returns: :class:`GetRunResponse`
+
 
     .. py:method:: list_artifacts( [, page_token: Optional[str], path: Optional[str], run_id: Optional[str], run_uuid: Optional[str]]) -> Iterator[FileInfo]
 
         Get all artifacts.
-        
-        List artifacts for a run. Takes an optional `artifact_path` prefix. If it is specified, the response
-        contains only artifacts with the specified prefix. This API does not support pagination when listing
-        artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call
-        `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports
-        pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents).
-        
-        :param page_token: str (optional)
-          Token indicating the page of artifact results to fetch. `page_token` is not supported when listing
-          artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call
-          `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports
-          pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents).
-        :param path: str (optional)
-          Filter artifacts matching this path (a relative path from the root artifact directory).
-        :param run_id: str (optional)
-          ID of the run whose artifacts to list. Must be provided.
-        :param run_uuid: str (optional)
-          [Deprecated, use run_id instead] ID of the run whose artifacts to list. This field will be removed
-          in a future MLflow version.
-        
-        :returns: Iterator over :class:`FileInfo`
-        
+
+List artifacts for a run. Takes an optional `artifact_path` prefix. If it is specified, the response
+contains only artifacts with the specified prefix. This API does not support pagination when listing
+artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call
+`/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports
+pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents).
+
+:param page_token: str (optional)
+  Token indicating the page of artifact results to fetch. `page_token` is not supported when listing
+  artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call
+  `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports
+  pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents).
+:param path: str (optional)
+  Filter artifacts matching this path (a relative path from the root artifact directory).
+:param run_id: str (optional)
+  ID of the run whose artifacts to list. Must be provided.
+:param run_uuid: str (optional)
+  [Deprecated, use run_id instead] ID of the run whose artifacts to list. This field will be removed
+  in a future MLflow version.
+
+:returns: Iterator over :class:`FileInfo`
+
 
     .. py:method:: list_experiments( [, max_results: Optional[int], page_token: Optional[str], view_type: Optional[str]]) -> Iterator[Experiment]
 
@@ -306,308 +306,308 @@
             all = w.experiments.list_experiments(ml.ListExperimentsRequest())
 
         List experiments.
-        
-        Gets a list of all experiments.
-        
-        :param max_results: int (optional)
-          Maximum number of experiments desired. If `max_results` is unspecified, return all experiments. If
-          `max_results` is too large, it'll be automatically capped at 1000. Callers of this endpoint are
-          encouraged to pass max_results explicitly and leverage page_token to iterate through experiments.
-        :param page_token: str (optional)
-          Token indicating the page of experiments to fetch
-        :param view_type: str (optional)
-          Qualifier for type of experiments to be returned. If unspecified, return only active experiments.
-        
-        :returns: Iterator over :class:`Experiment`
-        
+
+Gets a list of all experiments.
+
+:param max_results: int (optional)
+  Maximum number of experiments desired. If `max_results` is unspecified, return all experiments. If
+  `max_results` is too large, it'll be automatically capped at 1000. Callers of this endpoint are
+  encouraged to pass max_results explicitly and leverage page_token to iterate through experiments.
+:param page_token: str (optional)
+  Token indicating the page of experiments to fetch
+:param view_type: str (optional)
+  Qualifier for type of experiments to be returned. If unspecified, return only active experiments.
+
+:returns: Iterator over :class:`Experiment`
+
 
     .. py:method:: log_batch( [, metrics: Optional[List[Metric]], params: Optional[List[Param]], run_id: Optional[str], tags: Optional[List[RunTag]]])
 
         Log a batch.
-        
-        Logs a batch of metrics, params, and tags for a run. If any data failed to be persisted, the server
-        will respond with an error (non-200 status code).
-        
-        In case of error (due to internal server error or an invalid request), partial data may be written.
-        
-        You can write metrics, params, and tags in interleaving fashion, but within a given entity type are
-        guaranteed to follow the order specified in the request body.
-        
-        The overwrite behavior for metrics, params, and tags is as follows:
-        
-        * Metrics: metric values are never overwritten. Logging a metric (key, value, timestamp) appends to
-        the set of values for the metric with the provided key.
-        
-        * Tags: tag values can be overwritten by successive writes to the same tag key. That is, if multiple
-        tag values with the same key are provided in the same API request, the last-provided tag value is
-        written. Logging the same tag (key, value) is permitted. Specifically, logging a tag is idempotent.
-        
-        * Parameters: once written, param values cannot be changed (attempting to overwrite a param value will
-        result in an error). However, logging the same param (key, value) is permitted. Specifically, logging
-        a param is idempotent.
-        
-        Request Limits ------------------------------- A single JSON-serialized API request may be up to 1 MB
-        in size and contain:
-        
-        * No more than 1000 metrics, params, and tags in total * Up to 1000 metrics * Up to 100 params * Up to
-        100 tags
-        
-        For example, a valid request might contain 900 metrics, 50 params, and 50 tags, but logging 900
-        metrics, 50 params, and 51 tags is invalid.
-        
-        The following limits also apply to metric, param, and tag keys and values:
-        
-        * Metric keys, param keys, and tag keys can be up to 250 characters in length * Parameter and tag
-        values can be up to 250 characters in length
-        
-        :param metrics: List[:class:`Metric`] (optional)
-          Metrics to log. A single request can contain up to 1000 metrics, and up to 1000 metrics, params, and
-          tags in total.
-        :param params: List[:class:`Param`] (optional)
-          Params to log. A single request can contain up to 100 params, and up to 1000 metrics, params, and
-          tags in total.
-        :param run_id: str (optional)
-          ID of the run to log under
-        :param tags: List[:class:`RunTag`] (optional)
-          Tags to log. A single request can contain up to 100 tags, and up to 1000 metrics, params, and tags
-          in total.
-        
-        
-        
+
+Logs a batch of metrics, params, and tags for a run. If any data failed to be persisted, the server
+will respond with an error (non-200 status code).
+
+In case of error (due to internal server error or an invalid request), partial data may be written.
+
+You can write metrics, params, and tags in interleaving fashion, but within a given entity type are
+guaranteed to follow the order specified in the request body.
+
+The overwrite behavior for metrics, params, and tags is as follows:
+
+* Metrics: metric values are never overwritten. Logging a metric (key, value, timestamp) appends to
+the set of values for the metric with the provided key.
+
+* Tags: tag values can be overwritten by successive writes to the same tag key. That is, if multiple
+tag values with the same key are provided in the same API request, the last-provided tag value is
+written. Logging the same tag (key, value) is permitted. Specifically, logging a tag is idempotent.
+
+* Parameters: once written, param values cannot be changed (attempting to overwrite a param value will
+result in an error). However, logging the same param (key, value) is permitted. Specifically, logging
+a param is idempotent.
+
+Request Limits ------------------------------- A single JSON-serialized API request may be up to 1 MB
+in size and contain:
+
+* No more than 1000 metrics, params, and tags in total * Up to 1000 metrics * Up to 100 params * Up to
+100 tags
+
+For example, a valid request might contain 900 metrics, 50 params, and 50 tags, but logging 900
+metrics, 50 params, and 51 tags is invalid.
+
+The following limits also apply to metric, param, and tag keys and values:
+
+* Metric keys, param keys, and tag keys can be up to 250 characters in length * Parameter and tag
+values can be up to 250 characters in length
+
+:param metrics: List[:class:`Metric`] (optional)
+  Metrics to log. A single request can contain up to 1000 metrics, and up to 1000 metrics, params, and
+  tags in total.
+:param params: List[:class:`Param`] (optional)
+  Params to log. A single request can contain up to 100 params, and up to 1000 metrics, params, and
+  tags in total.
+:param run_id: str (optional)
+  ID of the run to log under
+:param tags: List[:class:`RunTag`] (optional)
+  Tags to log. A single request can contain up to 100 tags, and up to 1000 metrics, params, and tags
+  in total.
+
+
+
 
     .. py:method:: log_inputs( [, datasets: Optional[List[DatasetInput]], run_id: Optional[str]])
 
         Log inputs to a run.
-        
-        **NOTE:** Experimental: This API may change or be removed in a future release without warning.
-        
-        :param datasets: List[:class:`DatasetInput`] (optional)
-          Dataset inputs
-        :param run_id: str (optional)
-          ID of the run to log under
-        
-        
-        
+
+**NOTE:** Experimental: This API may change or be removed in a future release without warning.
+
+:param datasets: List[:class:`DatasetInput`] (optional)
+  Dataset inputs
+:param run_id: str (optional)
+  ID of the run to log under
+
+
+
 
     .. py:method:: log_metric(key: str, value: float, timestamp: int [, run_id: Optional[str], run_uuid: Optional[str], step: Optional[int]])
 
         Log a metric.
-        
-        Logs a metric for a run. A metric is a key-value pair (string key, float value) with an associated
-        timestamp. Examples include the various metrics that represent ML model accuracy. A metric can be
-        logged multiple times.
-        
-        :param key: str
-          Name of the metric.
-        :param value: float
-          Double value of the metric being logged.
-        :param timestamp: int
-          Unix timestamp in milliseconds at the time metric was logged.
-        :param run_id: str (optional)
-          ID of the run under which to log the metric. Must be provided.
-        :param run_uuid: str (optional)
-          [Deprecated, use run_id instead] ID of the run under which to log the metric. This field will be
-          removed in a future MLflow version.
-        :param step: int (optional)
-          Step at which to log the metric
-        
-        
-        
+
+Logs a metric for a run. A metric is a key-value pair (string key, float value) with an associated
+timestamp. Examples include the various metrics that represent ML model accuracy. A metric can be
+logged multiple times.
+
+:param key: str
+  Name of the metric.
+:param value: float
+  Double value of the metric being logged.
+:param timestamp: int
+  Unix timestamp in milliseconds at the time metric was logged.
+:param run_id: str (optional)
+  ID of the run under which to log the metric. Must be provided.
+:param run_uuid: str (optional)
+  [Deprecated, use run_id instead] ID of the run under which to log the metric. This field will be
+  removed in a future MLflow version.
+:param step: int (optional)
+  Step at which to log the metric
+
+
+
 
     .. py:method:: log_model( [, model_json: Optional[str], run_id: Optional[str]])
 
         Log a model.
-        
-        **NOTE:** Experimental: This API may change or be removed in a future release without warning.
-        
-        :param model_json: str (optional)
-          MLmodel file in json format.
-        :param run_id: str (optional)
-          ID of the run to log under
-        
-        
-        
+
+**NOTE:** Experimental: This API may change or be removed in a future release without warning.
+
+:param model_json: str (optional)
+  MLmodel file in json format.
+:param run_id: str (optional)
+  ID of the run to log under
+
+
+
 
     .. py:method:: log_param(key: str, value: str [, run_id: Optional[str], run_uuid: Optional[str]])
 
         Log a param.
-        
-        Logs a param used for a run. A param is a key-value pair (string key, string value). Examples include
-        hyperparameters used for ML model training and constant dates and values used in an ETL pipeline. A
-        param can be logged only once for a run.
-        
-        :param key: str
-          Name of the param. Maximum size is 255 bytes.
-        :param value: str
-          String value of the param being logged. Maximum size is 500 bytes.
-        :param run_id: str (optional)
-          ID of the run under which to log the param. Must be provided.
-        :param run_uuid: str (optional)
-          [Deprecated, use run_id instead] ID of the run under which to log the param. This field will be
-          removed in a future MLflow version.
-        
-        
-        
+
+Logs a param used for a run. A param is a key-value pair (string key, string value). Examples include
+hyperparameters used for ML model training and constant dates and values used in an ETL pipeline. A
+param can be logged only once for a run.
+
+:param key: str
+  Name of the param. Maximum size is 255 bytes.
+:param value: str
+  String value of the param being logged. Maximum size is 500 bytes.
+:param run_id: str (optional)
+  ID of the run under which to log the param. Must be provided.
+:param run_uuid: str (optional)
+  [Deprecated, use run_id instead] ID of the run under which to log the param. This field will be
+  removed in a future MLflow version.
+
+
+
 
     .. py:method:: restore_experiment(experiment_id: str)
 
         Restores an experiment.
-        
-        Restore an experiment marked for deletion. This also restores associated metadata, runs, metrics,
-        params, and tags. If experiment uses FileStore, underlying artifacts associated with experiment are
-        also restored.
-        
-        Throws `RESOURCE_DOES_NOT_EXIST` if experiment was never created or was permanently deleted.
-        
-        :param experiment_id: str
-          ID of the associated experiment.
-        
-        
-        
+
+Restore an experiment marked for deletion. This also restores associated metadata, runs, metrics,
+params, and tags. If experiment uses FileStore, underlying artifacts associated with experiment are
+also restored.
+
+Throws `RESOURCE_DOES_NOT_EXIST` if experiment was never created or was permanently deleted.
+
+:param experiment_id: str
+  ID of the associated experiment.
+
+
+
 
     .. py:method:: restore_run(run_id: str)
 
         Restore a run.
-        
-        Restores a deleted run.
-        
-        :param run_id: str
-          ID of the run to restore.
-        
-        
-        
+
+Restores a deleted run.
+
+:param run_id: str
+  ID of the run to restore.
+
+
+
 
     .. py:method:: restore_runs(experiment_id: str, min_timestamp_millis: int [, max_runs: Optional[int]]) -> RestoreRunsResponse
 
         Restore runs by deletion time.
-        
-        Bulk restore runs in an experiment that were deleted no earlier than the specified timestamp. Restores
-        at most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the
-        client code snippet on https://learn.microsoft.com/en-us/azure/databricks/mlflow/runs#bulk-restore.
-        
-        :param experiment_id: str
-          The ID of the experiment containing the runs to restore.
-        :param min_timestamp_millis: int
-          The minimum deletion timestamp in milliseconds since the UNIX epoch for restoring runs. Only runs
-          deleted no earlier than this timestamp are restored.
-        :param max_runs: int (optional)
-          An optional positive integer indicating the maximum number of runs to restore. The maximum allowed
-          value for max_runs is 10000.
-        
-        :returns: :class:`RestoreRunsResponse`
-        
+
+Bulk restore runs in an experiment that were deleted no earlier than the specified timestamp. Restores
+at most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the
+client code snippet on https://learn.microsoft.com/en-us/azure/databricks/mlflow/runs#bulk-restore.
+
+:param experiment_id: str
+  The ID of the experiment containing the runs to restore.
+:param min_timestamp_millis: int
+  The minimum deletion timestamp in milliseconds since the UNIX epoch for restoring runs. Only runs
+  deleted no earlier than this timestamp are restored.
+:param max_runs: int (optional)
+  An optional positive integer indicating the maximum number of runs to restore. The maximum allowed
+  value for max_runs is 10000.
+
+:returns: :class:`RestoreRunsResponse`
+
 
     .. py:method:: search_experiments( [, filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str], view_type: Optional[SearchExperimentsViewType]]) -> Iterator[Experiment]
 
         Search experiments.
-        
-        Searches for experiments that satisfy specified search criteria.
-        
-        :param filter: str (optional)
-          String representing a SQL filter condition (e.g. "name ILIKE 'my-experiment%'")
-        :param max_results: int (optional)
-          Maximum number of experiments desired. Max threshold is 3000.
-        :param order_by: List[str] (optional)
-          List of columns for ordering search results, which can include experiment name and last updated
-          timestamp with an optional "DESC" or "ASC" annotation, where "ASC" is the default. Tiebreaks are
-          done by experiment id DESC.
-        :param page_token: str (optional)
-          Token indicating the page of experiments to fetch
-        :param view_type: :class:`SearchExperimentsViewType` (optional)
-          Qualifier for type of experiments to be returned. If unspecified, return only active experiments.
-        
-        :returns: Iterator over :class:`Experiment`
-        
+
+Searches for experiments that satisfy specified search criteria.
+
+:param filter: str (optional)
+  String representing a SQL filter condition (e.g. "name ILIKE 'my-experiment%'")
+:param max_results: int (optional)
+  Maximum number of experiments desired. Max threshold is 3000.
+:param order_by: List[str] (optional)
+  List of columns for ordering search results, which can include experiment name and last updated
+  timestamp with an optional "DESC" or "ASC" annotation, where "ASC" is the default. Tiebreaks are
+  done by experiment id DESC.
+:param page_token: str (optional)
+  Token indicating the page of experiments to fetch
+:param view_type: :class:`SearchExperimentsViewType` (optional)
+  Qualifier for type of experiments to be returned. If unspecified, return only active experiments.
+
+:returns: Iterator over :class:`Experiment`
+
 
     .. py:method:: search_runs( [, experiment_ids: Optional[List[str]], filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str], run_view_type: Optional[SearchRunsRunViewType]]) -> Iterator[Run]
 
         Search for runs.
-        
-        Searches for runs that satisfy expressions.
-        
-        Search expressions can use `mlflowMetric` and `mlflowParam` keys.",
-        
-        :param experiment_ids: List[str] (optional)
-          List of experiment IDs to search over.
-        :param filter: str (optional)
-          A filter expression over params, metrics, and tags, that allows returning a subset of runs. The
-          syntax is a subset of SQL that supports ANDing together binary operations between a param, metric,
-          or tag and a constant.
-          
-          Example: `metrics.rmse < 1 and params.model_class = 'LogisticRegression'`
-          
-          You can select columns with special characters (hyphen, space, period, etc.) by using double quotes:
-          `metrics."model class" = 'LinearRegression' and tags."user-name" = 'Tomas'`
-          
-          Supported operators are `=`, `!=`, `>`, `>=`, `<`, and `<=`.
-        :param max_results: int (optional)
-          Maximum number of runs desired. Max threshold is 50000
-        :param order_by: List[str] (optional)
-          List of columns to be ordered by, including attributes, params, metrics, and tags with an optional
-          "DESC" or "ASC" annotation, where "ASC" is the default. Example: ["params.input DESC",
-          "metrics.alpha ASC", "metrics.rmse"] Tiebreaks are done by start_time DESC followed by run_id for
-          runs with the same start time (and this is the default ordering criterion if order_by is not
-          provided).
-        :param page_token: str (optional)
-          Token for the current page of runs.
-        :param run_view_type: :class:`SearchRunsRunViewType` (optional)
-          Whether to display only active, only deleted, or all runs. Defaults to only active runs.
-        
-        :returns: Iterator over :class:`Run`
-        
+
+Searches for runs that satisfy expressions.
+
+Search expressions can use `mlflowMetric` and `mlflowParam` keys.",
+
+:param experiment_ids: List[str] (optional)
+  List of experiment IDs to search over.
+:param filter: str (optional)
+  A filter expression over params, metrics, and tags, that allows returning a subset of runs. The
+  syntax is a subset of SQL that supports ANDing together binary operations between a param, metric,
+  or tag and a constant.
+  
+  Example: `metrics.rmse < 1 and params.model_class = 'LogisticRegression'`
+  
+  You can select columns with special characters (hyphen, space, period, etc.) by using double quotes:
+  `metrics."model class" = 'LinearRegression' and tags."user-name" = 'Tomas'`
+  
+  Supported operators are `=`, `!=`, `>`, `>=`, `<`, and `<=`.
+:param max_results: int (optional)
+  Maximum number of runs desired. Max threshold is 50000
+:param order_by: List[str] (optional)
+  List of columns to be ordered by, including attributes, params, metrics, and tags with an optional
+  "DESC" or "ASC" annotation, where "ASC" is the default. Example: ["params.input DESC",
+  "metrics.alpha ASC", "metrics.rmse"] Tiebreaks are done by start_time DESC followed by run_id for
+  runs with the same start time (and this is the default ordering criterion if order_by is not
+  provided).
+:param page_token: str (optional)
+  Token for the current page of runs.
+:param run_view_type: :class:`SearchRunsRunViewType` (optional)
+  Whether to display only active, only deleted, or all runs. Defaults to only active runs.
+
+:returns: Iterator over :class:`Run`
+
 
     .. py:method:: set_experiment_tag(experiment_id: str, key: str, value: str)
 
         Set a tag.
-        
-        Sets a tag on an experiment. Experiment tags are metadata that can be updated.
-        
-        :param experiment_id: str
-          ID of the experiment under which to log the tag. Must be provided.
-        :param key: str
-          Name of the tag. Maximum size depends on storage backend. All storage backends are guaranteed to
-          support key values up to 250 bytes in size.
-        :param value: str
-          String value of the tag being logged. Maximum size depends on storage backend. All storage backends
-          are guaranteed to support key values up to 5000 bytes in size.
-        
-        
-        
+
+Sets a tag on an experiment. Experiment tags are metadata that can be updated.
+
+:param experiment_id: str
+  ID of the experiment under which to log the tag. Must be provided.
+:param key: str
+  Name of the tag. Maximum size depends on storage backend. All storage backends are guaranteed to
+  support key values up to 250 bytes in size.
+:param value: str
+  String value of the tag being logged. Maximum size depends on storage backend. All storage backends
+  are guaranteed to support key values up to 5000 bytes in size.
+
+
+
 
     .. py:method:: set_permissions(experiment_id: str [, access_control_list: Optional[List[ExperimentAccessControlRequest]]]) -> ExperimentPermissions
 
         Set experiment permissions.
-        
-        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-        permissions if none are specified. Objects can inherit permissions from their root object.
-        
-        :param experiment_id: str
-          The experiment for which to get or manage permissions.
-        :param access_control_list: List[:class:`ExperimentAccessControlRequest`] (optional)
-        
-        :returns: :class:`ExperimentPermissions`
-        
+
+Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+permissions if none are specified. Objects can inherit permissions from their root object.
+
+:param experiment_id: str
+  The experiment for which to get or manage permissions.
+:param access_control_list: List[:class:`ExperimentAccessControlRequest`] (optional)
+
+:returns: :class:`ExperimentPermissions`
+
 
     .. py:method:: set_tag(key: str, value: str [, run_id: Optional[str], run_uuid: Optional[str]])
 
         Set a tag.
-        
-        Sets a tag on a run. Tags are run metadata that can be updated during a run and after a run completes.
-        
-        :param key: str
-          Name of the tag. Maximum size depends on storage backend. All storage backends are guaranteed to
-          support key values up to 250 bytes in size.
-        :param value: str
-          String value of the tag being logged. Maximum size depends on storage backend. All storage backends
-          are guaranteed to support key values up to 5000 bytes in size.
-        :param run_id: str (optional)
-          ID of the run under which to log the tag. Must be provided.
-        :param run_uuid: str (optional)
-          [Deprecated, use run_id instead] ID of the run under which to log the tag. This field will be
-          removed in a future MLflow version.
-        
-        
-        
+
+Sets a tag on a run. Tags are run metadata that can be updated during a run and after a run completes.
+
+:param key: str
+  Name of the tag. Maximum size depends on storage backend. All storage backends are guaranteed to
+  support key values up to 250 bytes in size.
+:param value: str
+  String value of the tag being logged. Maximum size depends on storage backend. All storage backends
+  are guaranteed to support key values up to 5000 bytes in size.
+:param run_id: str (optional)
+  ID of the run under which to log the tag. Must be provided.
+:param run_uuid: str (optional)
+  [Deprecated, use run_id instead] ID of the run under which to log the tag. This field will be
+  removed in a future MLflow version.
+
+
+
 
     .. py:method:: update_experiment(experiment_id: str [, new_name: Optional[str]])
 
@@ -630,29 +630,29 @@
             w.experiments.delete_experiment(experiment_id=experiment.experiment_id)
 
         Update an experiment.
-        
-        Updates experiment metadata.
-        
-        :param experiment_id: str
-          ID of the associated experiment.
-        :param new_name: str (optional)
-          If provided, the experiment's name is changed to the new name. The new name must be unique.
-        
-        
-        
+
+Updates experiment metadata.
+
+:param experiment_id: str
+  ID of the associated experiment.
+:param new_name: str (optional)
+  If provided, the experiment's name is changed to the new name. The new name must be unique.
+
+
+
 
     .. py:method:: update_permissions(experiment_id: str [, access_control_list: Optional[List[ExperimentAccessControlRequest]]]) -> ExperimentPermissions
 
         Update experiment permissions.
-        
-        Updates the permissions on an experiment. Experiments can inherit permissions from their root object.
-        
-        :param experiment_id: str
-          The experiment for which to get or manage permissions.
-        :param access_control_list: List[:class:`ExperimentAccessControlRequest`] (optional)
-        
-        :returns: :class:`ExperimentPermissions`
-        
+
+Updates the permissions on an experiment. Experiments can inherit permissions from their root object.
+
+:param experiment_id: str
+  The experiment for which to get or manage permissions.
+:param access_control_list: List[:class:`ExperimentAccessControlRequest`] (optional)
+
+:returns: :class:`ExperimentPermissions`
+
 
     .. py:method:: update_run( [, end_time: Optional[int], run_id: Optional[str], run_uuid: Optional[str], status: Optional[UpdateRunStatus]]) -> UpdateRunResponse
 
@@ -680,18 +680,17 @@
             w.experiments.delete_run(run_id=created.run.info.run_id)
 
         Update a run.
-        
-        Updates run metadata.
-        
-        :param end_time: int (optional)
-          Unix timestamp in milliseconds of when the run ended.
-        :param run_id: str (optional)
-          ID of the run to update. Must be provided.
-        :param run_uuid: str (optional)
-          [Deprecated, use run_id instead] ID of the run to update.. This field will be removed in a future
-          MLflow version.
-        :param status: :class:`UpdateRunStatus` (optional)
-          Updated status of the run.
-        
-        :returns: :class:`UpdateRunResponse`
-        
\ No newline at end of file
+
+Updates run metadata.
+
+:param end_time: int (optional)
+  Unix timestamp in milliseconds of when the run ended.
+:param run_id: str (optional)
+  ID of the run to update. Must be provided.
+:param run_uuid: str (optional)
+  [Deprecated, use run_id instead] ID of the run to update.. This field will be removed in a future
+  MLflow version.
+:param status: :class:`UpdateRunStatus` (optional)
+  Updated status of the run.
+
+:returns: :class:`UpdateRunResponse`
diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst
index d08a85415..efc7475c5 100644
--- a/docs/workspace/ml/model_registry.rst
+++ b/docs/workspace/ml/model_registry.rst
@@ -5,40 +5,40 @@
 .. py:class:: ModelRegistryAPI
 
     Note: This API reference documents APIs for the Workspace Model Registry. Databricks recommends using
-    [Models in Unity Catalog](/api/workspace/registeredmodels) instead. Models in Unity Catalog provides
-    centralized model governance, cross-workspace access, lineage, and deployment. Workspace Model Registry
-    will be deprecated in the future.
-    
-    The Workspace Model Registry is a centralized model repository and a UI and set of APIs that enable you to
-    manage the full lifecycle of MLflow Models.
+[Models in Unity Catalog](/api/workspace/registeredmodels) instead. Models in Unity Catalog provides
+centralized model governance, cross-workspace access, lineage, and deployment. Workspace Model Registry
+will be deprecated in the future.
+
+The Workspace Model Registry is a centralized model repository and a UI and set of APIs that enable you to
+manage the full lifecycle of MLflow Models.
 
     .. py:method:: approve_transition_request(name: str, version: str, stage: Stage, archive_existing_versions: bool [, comment: Optional[str]]) -> ApproveTransitionRequestResponse
 
         Approve transition request.
-        
-        Approves a model version stage transition request.
-        
-        :param name: str
-          Name of the model.
-        :param version: str
-          Version of the model.
-        :param stage: :class:`Stage`
-          Target stage of the transition. Valid values are:
-          
-          * `None`: The initial stage of a model version.
-          
-          * `Staging`: Staging or pre-production stage.
-          
-          * `Production`: Production stage.
-          
-          * `Archived`: Archived stage.
-        :param archive_existing_versions: bool
-          Specifies whether to archive all current model versions in the target stage.
-        :param comment: str (optional)
-          User-provided comment on the action.
-        
-        :returns: :class:`ApproveTransitionRequestResponse`
-        
+
+Approves a model version stage transition request.
+
+:param name: str
+  Name of the model.
+:param version: str
+  Version of the model.
+:param stage: :class:`Stage`
+  Target stage of the transition. Valid values are:
+  
+  * `None`: The initial stage of a model version.
+  
+  * `Staging`: Staging or pre-production stage.
+  
+  * `Production`: Production stage.
+  
+  * `Archived`: Archived stage.
+:param archive_existing_versions: bool
+  Specifies whether to archive all current model versions in the target stage.
+:param comment: str (optional)
+  User-provided comment on the action.
+
+:returns: :class:`ApproveTransitionRequestResponse`
+
 
     .. py:method:: create_comment(name: str, version: str, comment: str) -> CreateCommentResponse
 
@@ -65,19 +65,19 @@
             w.model_registry.delete_comment(id=created.comment.id)
 
         Post a comment.
-        
-        Posts a comment on a model version. A comment can be submitted either by a user or programmatically to
-        display relevant information about the model. For example, test results or deployment errors.
-        
-        :param name: str
-          Name of the model.
-        :param version: str
-          Version of the model.
-        :param comment: str
-          User-provided comment on the action.
-        
-        :returns: :class:`CreateCommentResponse`
-        
+
+Posts a comment on a model version. A comment can be submitted either by a user or programmatically to
+display relevant information about the model. For example, test results or deployment errors.
+
+:param name: str
+  Name of the model.
+:param version: str
+  Version of the model.
+:param comment: str
+  User-provided comment on the action.
+
+:returns: :class:`CreateCommentResponse`
+
 
     .. py:method:: create_model(name: str [, description: Optional[str], tags: Optional[List[ModelTag]]]) -> CreateModelResponse
 
@@ -95,20 +95,20 @@
             model = w.model_registry.create_model(name=f'sdk-{time.time_ns()}')
 
         Create a model.
-        
-        Creates a new registered model with the name specified in the request body.
-        
-        Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists.
-        
-        :param name: str
-          Register models under this name
-        :param description: str (optional)
-          Optional description for registered model.
-        :param tags: List[:class:`ModelTag`] (optional)
-          Additional metadata for registered model.
-        
-        :returns: :class:`CreateModelResponse`
-        
+
+Creates a new registered model with the name specified in the request body.
+
+Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists.
+
+:param name: str
+  Register models under this name
+:param description: str (optional)
+  Optional description for registered model.
+:param tags: List[:class:`ModelTag`] (optional)
+  Additional metadata for registered model.
+
+:returns: :class:`CreateModelResponse`
+
 
     .. py:method:: create_model_version(name: str, source: str [, description: Optional[str], run_id: Optional[str], run_link: Optional[str], tags: Optional[List[ModelVersionTag]]]) -> CreateModelVersionResponse
 
@@ -128,52 +128,52 @@
             mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp")
 
         Create a model version.
-        
-        Creates a model version.
-        
-        :param name: str
-          Register model under this name
-        :param source: str
-          URI indicating the location of the model artifacts.
-        :param description: str (optional)
-          Optional description for model version.
-        :param run_id: str (optional)
-          MLflow run ID for correlation, if `source` was generated by an experiment run in MLflow tracking
-          server
-        :param run_link: str (optional)
-          MLflow run link - this is the exact link of the run that generated this model version, potentially
-          hosted at another instance of MLflow.
-        :param tags: List[:class:`ModelVersionTag`] (optional)
-          Additional metadata for model version.
-        
-        :returns: :class:`CreateModelVersionResponse`
-        
+
+Creates a model version.
+
+:param name: str
+  Register model under this name
+:param source: str
+  URI indicating the location of the model artifacts.
+:param description: str (optional)
+  Optional description for model version.
+:param run_id: str (optional)
+  MLflow run ID for correlation, if `source` was generated by an experiment run in MLflow tracking
+  server
+:param run_link: str (optional)
+  MLflow run link - this is the exact link of the run that generated this model version, potentially
+  hosted at another instance of MLflow.
+:param tags: List[:class:`ModelVersionTag`] (optional)
+  Additional metadata for model version.
+
+:returns: :class:`CreateModelVersionResponse`
+
 
     .. py:method:: create_transition_request(name: str, version: str, stage: Stage [, comment: Optional[str]]) -> CreateTransitionRequestResponse
 
         Make a transition request.
-        
-        Creates a model version stage transition request.
-        
-        :param name: str
-          Name of the model.
-        :param version: str
-          Version of the model.
-        :param stage: :class:`Stage`
-          Target stage of the transition. Valid values are:
-          
-          * `None`: The initial stage of a model version.
-          
-          * `Staging`: Staging or pre-production stage.
-          
-          * `Production`: Production stage.
-          
-          * `Archived`: Archived stage.
-        :param comment: str (optional)
-          User-provided comment on the action.
-        
-        :returns: :class:`CreateTransitionRequestResponse`
-        
+
+Creates a model version stage transition request.
+
+:param name: str
+  Name of the model.
+:param version: str
+  Version of the model.
+:param stage: :class:`Stage`
+  Target stage of the transition. Valid values are:
+  
+  * `None`: The initial stage of a model version.
+  
+  * `Staging`: Staging or pre-production stage.
+  
+  * `Production`: Production stage.
+  
+  * `Archived`: Archived stage.
+:param comment: str (optional)
+  User-provided comment on the action.
+
+:returns: :class:`CreateTransitionRequestResponse`
+
 
     .. py:method:: create_webhook(events: List[RegistryWebhookEvent] [, description: Optional[str], http_url_spec: Optional[HttpUrlSpec], job_spec: Optional[JobSpec], model_name: Optional[str], status: Optional[RegistryWebhookStatus]]) -> CreateWebhookResponse
 
@@ -197,183 +197,183 @@
             w.model_registry.delete_webhook(id=created.webhook.id)
 
         Create a webhook.
-        
-        **NOTE**: This endpoint is in Public Preview.
-        
-        Creates a registry webhook.
-        
-        :param events: List[:class:`RegistryWebhookEvent`]
-          Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was
-          created for the associated model.
-          
-          * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed.
-          
-          * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned.
-          
-          * `COMMENT_CREATED`: A user wrote a comment on a registered model.
-          
-          * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be
-          specified for a registry-wide webhook, which can be created by not specifying a model name in the
-          create request.
-          
-          * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version.
-          
-          * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging.
-          
-          * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production.
-          
-          * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived.
-          
-          * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to
-          staging.
-          
-          * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to
-          production.
-          
-          * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.
-        :param description: str (optional)
-          User-specified description for the webhook.
-        :param http_url_spec: :class:`HttpUrlSpec` (optional)
-        :param job_spec: :class:`JobSpec` (optional)
-        :param model_name: str (optional)
-          Name of the model whose events would trigger this webhook.
-        :param status: :class:`RegistryWebhookStatus` (optional)
-          Enable or disable triggering the webhook, or put the webhook into test mode. The default is
-          `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens.
-          
-          * `DISABLED`: Webhook is not triggered.
-          
-          * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real
-          event.
-        
-        :returns: :class:`CreateWebhookResponse`
-        
+
+**NOTE**: This endpoint is in Public Preview.
+
+Creates a registry webhook.
+
+:param events: List[:class:`RegistryWebhookEvent`]
+  Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was
+  created for the associated model.
+  
+  * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed.
+  
+  * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned.
+  
+  * `COMMENT_CREATED`: A user wrote a comment on a registered model.
+  
+  * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be
+  specified for a registry-wide webhook, which can be created by not specifying a model name in the
+  create request.
+  
+  * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version.
+  
+  * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging.
+  
+  * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production.
+  
+  * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived.
+  
+  * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to
+  staging.
+  
+  * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to
+  production.
+  
+  * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.
+:param description: str (optional)
+  User-specified description for the webhook.
+:param http_url_spec: :class:`HttpUrlSpec` (optional)
+:param job_spec: :class:`JobSpec` (optional)
+:param model_name: str (optional)
+  Name of the model whose events would trigger this webhook.
+:param status: :class:`RegistryWebhookStatus` (optional)
+  Enable or disable triggering the webhook, or put the webhook into test mode. The default is
+  `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens.
+  
+  * `DISABLED`: Webhook is not triggered.
+  
+  * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real
+  event.
+
+:returns: :class:`CreateWebhookResponse`
+
 
     .. py:method:: delete_comment(id: str)
 
         Delete a comment.
-        
-        Deletes a comment on a model version.
-        
-        :param id: str
-        
-        
-        
+
+Deletes a comment on a model version.
+
+:param id: str
+
+
+
 
     .. py:method:: delete_model(name: str)
 
         Delete a model.
-        
-        Deletes a registered model.
-        
-        :param name: str
-          Registered model unique name identifier.
-        
-        
-        
+
+Deletes a registered model.
+
+:param name: str
+  Registered model unique name identifier.
+
+
+
 
     .. py:method:: delete_model_tag(name: str, key: str)
 
         Delete a model tag.
-        
-        Deletes the tag for a registered model.
-        
-        :param name: str
-          Name of the registered model that the tag was logged under.
-        :param key: str
-          Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum size
-          is 250 bytes.
-        
-        
-        
+
+Deletes the tag for a registered model.
+
+:param name: str
+  Name of the registered model that the tag was logged under.
+:param key: str
+  Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum size
+  is 250 bytes.
+
+
+
 
     .. py:method:: delete_model_version(name: str, version: str)
 
         Delete a model version.
-        
-        Deletes a model version.
-        
-        :param name: str
-          Name of the registered model
-        :param version: str
-          Model version number
-        
-        
-        
+
+Deletes a model version.
+
+:param name: str
+  Name of the registered model
+:param version: str
+  Model version number
+
+
+
 
     .. py:method:: delete_model_version_tag(name: str, version: str, key: str)
 
         Delete a model version tag.
-        
-        Deletes a model version tag.
-        
-        :param name: str
-          Name of the registered model that the tag was logged under.
-        :param version: str
-          Model version number that the tag was logged under.
-        :param key: str
-          Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum size
-          is 250 bytes.
-        
-        
-        
+
+Deletes a model version tag.
+
+:param name: str
+  Name of the registered model that the tag was logged under.
+:param version: str
+  Model version number that the tag was logged under.
+:param key: str
+  Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum size
+  is 250 bytes.
+
+
+
 
     .. py:method:: delete_transition_request(name: str, version: str, stage: DeleteTransitionRequestStage, creator: str [, comment: Optional[str]])
 
         Delete a transition request.
-        
-        Cancels a model version stage transition request.
-        
-        :param name: str
-          Name of the model.
-        :param version: str
-          Version of the model.
-        :param stage: :class:`DeleteTransitionRequestStage`
-          Target stage of the transition request. Valid values are:
-          
-          * `None`: The initial stage of a model version.
-          
-          * `Staging`: Staging or pre-production stage.
-          
-          * `Production`: Production stage.
-          
-          * `Archived`: Archived stage.
-        :param creator: str
-          Username of the user who created this request. Of the transition requests matching the specified
-          details, only the one transition created by this user will be deleted.
-        :param comment: str (optional)
-          User-provided comment on the action.
-        
-        
-        
+
+Cancels a model version stage transition request.
+
+:param name: str
+  Name of the model.
+:param version: str
+  Version of the model.
+:param stage: :class:`DeleteTransitionRequestStage`
+  Target stage of the transition request. Valid values are:
+  
+  * `None`: The initial stage of a model version.
+  
+  * `Staging`: Staging or pre-production stage.
+  
+  * `Production`: Production stage.
+  
+  * `Archived`: Archived stage.
+:param creator: str
+  Username of the user who created this request. Of the transition requests matching the specified
+  details, only the one transition created by this user will be deleted.
+:param comment: str (optional)
+  User-provided comment on the action.
+
+
+
 
     .. py:method:: delete_webhook( [, id: Optional[str]])
 
         Delete a webhook.
-        
-        **NOTE:** This endpoint is in Public Preview.
-        
-        Deletes a registry webhook.
-        
-        :param id: str (optional)
-          Webhook ID required to delete a registry webhook.
-        
-        
-        
+
+**NOTE:** This endpoint is in Public Preview.
+
+Deletes a registry webhook.
+
+:param id: str (optional)
+  Webhook ID required to delete a registry webhook.
+
+
+
 
     .. py:method:: get_latest_versions(name: str [, stages: Optional[List[str]]]) -> Iterator[ModelVersion]
 
         Get the latest version.
-        
-        Gets the latest version of a registered model.
-        
-        :param name: str
-          Registered model unique name identifier.
-        :param stages: List[str] (optional)
-          List of stages.
-        
-        :returns: Iterator over :class:`ModelVersion`
-        
+
+Gets the latest version of a registered model.
+
+:param name: str
+  Registered model unique name identifier.
+:param stages: List[str] (optional)
+  List of stages.
+
+:returns: Iterator over :class:`ModelVersion`
+
 
     .. py:method:: get_model(name: str) -> GetModelResponse
 
@@ -393,71 +393,71 @@
             model = w.model_registry.get_model(name=created.registered_model.name)
 
         Get model.
-        
-        Get the details of a model. This is a Databricks workspace version of the [MLflow endpoint] that also
-        returns the model's Databricks workspace ID and the permission level of the requesting user on the
-        model.
-        
-        [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#get-registeredmodel
-        
-        :param name: str
-          Registered model unique name identifier.
-        
-        :returns: :class:`GetModelResponse`
-        
+
+Get the details of a model. This is a Databricks workspace version of the [MLflow endpoint] that also
+returns the model's Databricks workspace ID and the permission level of the requesting user on the
+model.
+
+[MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#get-registeredmodel
+
+:param name: str
+  Registered model unique name identifier.
+
+:returns: :class:`GetModelResponse`
+
 
     .. py:method:: get_model_version(name: str, version: str) -> GetModelVersionResponse
 
         Get a model version.
-        
-        Get a model version.
-        
-        :param name: str
-          Name of the registered model
-        :param version: str
-          Model version number
-        
-        :returns: :class:`GetModelVersionResponse`
-        
+
+Get a model version.
+
+:param name: str
+  Name of the registered model
+:param version: str
+  Model version number
+
+:returns: :class:`GetModelVersionResponse`
+
 
     .. py:method:: get_model_version_download_uri(name: str, version: str) -> GetModelVersionDownloadUriResponse
 
         Get a model version URI.
-        
-        Gets a URI to download the model version.
-        
-        :param name: str
-          Name of the registered model
-        :param version: str
-          Model version number
-        
-        :returns: :class:`GetModelVersionDownloadUriResponse`
-        
+
+Gets a URI to download the model version.
+
+:param name: str
+  Name of the registered model
+:param version: str
+  Model version number
+
+:returns: :class:`GetModelVersionDownloadUriResponse`
+
 
     .. py:method:: get_permission_levels(registered_model_id: str) -> GetRegisteredModelPermissionLevelsResponse
 
         Get registered model permission levels.
-        
-        Gets the permission levels that a user can have on an object.
-        
-        :param registered_model_id: str
-          The registered model for which to get or manage permissions.
-        
-        :returns: :class:`GetRegisteredModelPermissionLevelsResponse`
-        
+
+Gets the permission levels that a user can have on an object.
+
+:param registered_model_id: str
+  The registered model for which to get or manage permissions.
+
+:returns: :class:`GetRegisteredModelPermissionLevelsResponse`
+
 
     .. py:method:: get_permissions(registered_model_id: str) -> RegisteredModelPermissions
 
         Get registered model permissions.
-        
-        Gets the permissions of a registered model. Registered models can inherit permissions from their root
-        object.
-        
-        :param registered_model_id: str
-          The registered model for which to get or manage permissions.
-        
-        :returns: :class:`RegisteredModelPermissions`
-        
+
+Gets the permissions of a registered model. Registered models can inherit permissions from their root
+object.
+
+:param registered_model_id: str
+  The registered model for which to get or manage permissions.
+
+:returns: :class:`RegisteredModelPermissions`
+
 
     .. py:method:: list_models( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[Model]
 
@@ -474,30 +474,30 @@
             all = w.model_registry.list_models(ml.ListModelsRequest())
 
         List models.
-        
-        Lists all available registered models, up to the limit specified in __max_results__.
-        
-        :param max_results: int (optional)
-          Maximum number of registered models desired. Max threshold is 1000.
-        :param page_token: str (optional)
-          Pagination token to go to the next page based on a previous query.
-        
-        :returns: Iterator over :class:`Model`
-        
+
+Lists all available registered models, up to the limit specified in __max_results__.
+
+:param max_results: int (optional)
+  Maximum number of registered models desired. Max threshold is 1000.
+:param page_token: str (optional)
+  Pagination token to go to the next page based on a previous query.
+
+:returns: Iterator over :class:`Model`
+
 
     .. py:method:: list_transition_requests(name: str, version: str) -> Iterator[Activity]
 
         List transition requests.
-        
-        Gets a list of all open stage transition requests for the model version.
-        
-        :param name: str
-          Name of the model.
-        :param version: str
-          Version of the model.
-        
-        :returns: Iterator over :class:`Activity`
-        
+
+Gets a list of all open stage transition requests for the model version.
+
+:param name: str
+  Name of the model.
+:param version: str
+  Version of the model.
+
+:returns: Iterator over :class:`Activity`
+
 
     .. py:method:: list_webhooks( [, events: Optional[List[RegistryWebhookEvent]], model_name: Optional[str], page_token: Optional[str]]) -> Iterator[RegistryWebhook]
 
@@ -514,207 +514,207 @@
             all = w.model_registry.list_webhooks(ml.ListWebhooksRequest())
 
         List registry webhooks.
-        
-        **NOTE:** This endpoint is in Public Preview.
-        
-        Lists all registry webhooks.
-        
-        :param events: List[:class:`RegistryWebhookEvent`] (optional)
-          If `events` is specified, any webhook with one or more of the specified trigger events is included
-          in the output. If `events` is not specified, webhooks of all event types are included in the output.
-        :param model_name: str (optional)
-          If not specified, all webhooks associated with the specified events are listed, regardless of their
-          associated model.
-        :param page_token: str (optional)
-          Token indicating the page of artifact results to fetch
-        
-        :returns: Iterator over :class:`RegistryWebhook`
-        
+
+**NOTE:** This endpoint is in Public Preview.
+
+Lists all registry webhooks.
+
+:param events: List[:class:`RegistryWebhookEvent`] (optional)
+  If `events` is specified, any webhook with one or more of the specified trigger events is included
+  in the output. If `events` is not specified, webhooks of all event types are included in the output.
+:param model_name: str (optional)
+  If not specified, all webhooks associated with the specified events are listed, regardless of their
+  associated model.
+:param page_token: str (optional)
+  Token indicating the page of artifact results to fetch
+
+:returns: Iterator over :class:`RegistryWebhook`
+
 
     .. py:method:: reject_transition_request(name: str, version: str, stage: Stage [, comment: Optional[str]]) -> RejectTransitionRequestResponse
 
         Reject a transition request.
-        
-        Rejects a model version stage transition request.
-        
-        :param name: str
-          Name of the model.
-        :param version: str
-          Version of the model.
-        :param stage: :class:`Stage`
-          Target stage of the transition. Valid values are:
-          
-          * `None`: The initial stage of a model version.
-          
-          * `Staging`: Staging or pre-production stage.
-          
-          * `Production`: Production stage.
-          
-          * `Archived`: Archived stage.
-        :param comment: str (optional)
-          User-provided comment on the action.
-        
-        :returns: :class:`RejectTransitionRequestResponse`
-        
+
+Rejects a model version stage transition request.
+
+:param name: str
+  Name of the model.
+:param version: str
+  Version of the model.
+:param stage: :class:`Stage`
+  Target stage of the transition. Valid values are:
+  
+  * `None`: The initial stage of a model version.
+  
+  * `Staging`: Staging or pre-production stage.
+  
+  * `Production`: Production stage.
+  
+  * `Archived`: Archived stage.
+:param comment: str (optional)
+  User-provided comment on the action.
+
+:returns: :class:`RejectTransitionRequestResponse`
+
 
     .. py:method:: rename_model(name: str [, new_name: Optional[str]]) -> RenameModelResponse
 
         Rename a model.
-        
-        Renames a registered model.
-        
-        :param name: str
-          Registered model unique name identifier.
-        :param new_name: str (optional)
-          If provided, updates the name for this `registered_model`.
-        
-        :returns: :class:`RenameModelResponse`
-        
+
+Renames a registered model.
+
+:param name: str
+  Registered model unique name identifier.
+:param new_name: str (optional)
+  If provided, updates the name for this `registered_model`.
+
+:returns: :class:`RenameModelResponse`
+
 
     .. py:method:: search_model_versions( [, filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str]]) -> Iterator[ModelVersion]
 
         Searches model versions.
-        
-        Searches for specific model versions based on the supplied __filter__.
-        
-        :param filter: str (optional)
-          String filter condition, like "name='my-model-name'". Must be a single boolean condition, with
-          string values wrapped in single quotes.
-        :param max_results: int (optional)
-          Maximum number of models desired. Max threshold is 10K.
-        :param order_by: List[str] (optional)
-          List of columns to be ordered by including model name, version, stage with an optional "DESC" or
-          "ASC" annotation, where "ASC" is the default. Tiebreaks are done by latest stage transition
-          timestamp, followed by name ASC, followed by version DESC.
-        :param page_token: str (optional)
-          Pagination token to go to next page based on previous search query.
-        
-        :returns: Iterator over :class:`ModelVersion`
-        
+
+Searches for specific model versions based on the supplied __filter__.
+
+:param filter: str (optional)
+  String filter condition, like "name='my-model-name'". Must be a single boolean condition, with
+  string values wrapped in single quotes.
+:param max_results: int (optional)
+  Maximum number of models desired. Max threshold is 10K.
+:param order_by: List[str] (optional)
+  List of columns to be ordered by including model name, version, stage with an optional "DESC" or
+  "ASC" annotation, where "ASC" is the default. Tiebreaks are done by latest stage transition
+  timestamp, followed by name ASC, followed by version DESC.
+:param page_token: str (optional)
+  Pagination token to go to next page based on previous search query.
+
+:returns: Iterator over :class:`ModelVersion`
+
 
     .. py:method:: search_models( [, filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str]]) -> Iterator[Model]
 
         Search models.
-        
-        Search for registered models based on the specified __filter__.
-        
-        :param filter: str (optional)
-          String filter condition, like "name LIKE 'my-model-name'". Interpreted in the backend automatically
-          as "name LIKE '%my-model-name%'". Single boolean condition, with string values wrapped in single
-          quotes.
-        :param max_results: int (optional)
-          Maximum number of models desired. Default is 100. Max threshold is 1000.
-        :param order_by: List[str] (optional)
-          List of columns for ordering search results, which can include model name and last updated timestamp
-          with an optional "DESC" or "ASC" annotation, where "ASC" is the default. Tiebreaks are done by model
-          name ASC.
-        :param page_token: str (optional)
-          Pagination token to go to the next page based on a previous search query.
-        
-        :returns: Iterator over :class:`Model`
-        
+
+Search for registered models based on the specified __filter__.
+
+:param filter: str (optional)
+  String filter condition, like "name LIKE 'my-model-name'". Interpreted in the backend automatically
+  as "name LIKE '%my-model-name%'". Single boolean condition, with string values wrapped in single
+  quotes.
+:param max_results: int (optional)
+  Maximum number of models desired. Default is 100. Max threshold is 1000.
+:param order_by: List[str] (optional)
+  List of columns for ordering search results, which can include model name and last updated timestamp
+  with an optional "DESC" or "ASC" annotation, where "ASC" is the default. Tiebreaks are done by model
+  name ASC.
+:param page_token: str (optional)
+  Pagination token to go to the next page based on a previous search query.
+
+:returns: Iterator over :class:`Model`
+
 
     .. py:method:: set_model_tag(name: str, key: str, value: str)
 
         Set a tag.
-        
-        Sets a tag on a registered model.
-        
-        :param name: str
-          Unique name of the model.
-        :param key: str
-          Name of the tag. Maximum size depends on storage backend. If a tag with this name already exists,
-          its preexisting value will be replaced by the specified `value`. All storage backends are guaranteed
-          to support key values up to 250 bytes in size.
-        :param value: str
-          String value of the tag being logged. Maximum size depends on storage backend. All storage backends
-          are guaranteed to support key values up to 5000 bytes in size.
-        
-        
-        
+
+Sets a tag on a registered model.
+
+:param name: str
+  Unique name of the model.
+:param key: str
+  Name of the tag. Maximum size depends on storage backend. If a tag with this name already exists,
+  its preexisting value will be replaced by the specified `value`. All storage backends are guaranteed
+  to support key values up to 250 bytes in size.
+:param value: str
+  String value of the tag being logged. Maximum size depends on storage backend. All storage backends
+  are guaranteed to support key values up to 5000 bytes in size.
+
+
+
 
     .. py:method:: set_model_version_tag(name: str, version: str, key: str, value: str)
 
         Set a version tag.
-        
-        Sets a model version tag.
-        
-        :param name: str
-          Unique name of the model.
-        :param version: str
-          Model version number.
-        :param key: str
-          Name of the tag. Maximum size depends on storage backend. If a tag with this name already exists,
-          its preexisting value will be replaced by the specified `value`. All storage backends are guaranteed
-          to support key values up to 250 bytes in size.
-        :param value: str
-          String value of the tag being logged. Maximum size depends on storage backend. All storage backends
-          are guaranteed to support key values up to 5000 bytes in size.
-        
-        
-        
+
+Sets a model version tag.
+
+:param name: str
+  Unique name of the model.
+:param version: str
+  Model version number.
+:param key: str
+  Name of the tag. Maximum size depends on storage backend. If a tag with this name already exists,
+  its preexisting value will be replaced by the specified `value`. All storage backends are guaranteed
+  to support key values up to 250 bytes in size.
+:param value: str
+  String value of the tag being logged. Maximum size depends on storage backend. All storage backends
+  are guaranteed to support key values up to 5000 bytes in size.
+
+
+
 
     .. py:method:: set_permissions(registered_model_id: str [, access_control_list: Optional[List[RegisteredModelAccessControlRequest]]]) -> RegisteredModelPermissions
 
         Set registered model permissions.
-        
-        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-        permissions if none are specified. Objects can inherit permissions from their root object.
-        
-        :param registered_model_id: str
-          The registered model for which to get or manage permissions.
-        :param access_control_list: List[:class:`RegisteredModelAccessControlRequest`] (optional)
-        
-        :returns: :class:`RegisteredModelPermissions`
-        
+
+Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+permissions if none are specified. Objects can inherit permissions from their root object.
+
+:param registered_model_id: str
+  The registered model for which to get or manage permissions.
+:param access_control_list: List[:class:`RegisteredModelAccessControlRequest`] (optional)
+
+:returns: :class:`RegisteredModelPermissions`
+
 
     .. py:method:: test_registry_webhook(id: str [, event: Optional[RegistryWebhookEvent]]) -> TestRegistryWebhookResponse
 
         Test a webhook.
-        
-        **NOTE:** This endpoint is in Public Preview.
-        
-        Tests a registry webhook.
-        
-        :param id: str
-          Webhook ID
-        :param event: :class:`RegistryWebhookEvent` (optional)
-          If `event` is specified, the test trigger uses the specified event. If `event` is not specified, the
-          test trigger uses a randomly chosen event associated with the webhook.
-        
-        :returns: :class:`TestRegistryWebhookResponse`
-        
+
+**NOTE:** This endpoint is in Public Preview.
+
+Tests a registry webhook.
+
+:param id: str
+  Webhook ID
+:param event: :class:`RegistryWebhookEvent` (optional)
+  If `event` is specified, the test trigger uses the specified event. If `event` is not specified, the
+  test trigger uses a randomly chosen event associated with the webhook.
+
+:returns: :class:`TestRegistryWebhookResponse`
+
 
     .. py:method:: transition_stage(name: str, version: str, stage: Stage, archive_existing_versions: bool [, comment: Optional[str]]) -> TransitionStageResponse
 
         Transition a stage.
-        
-        Transition a model version's stage. This is a Databricks workspace version of the [MLflow endpoint]
-        that also accepts a comment associated with the transition to be recorded.",
-        
-        [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage
-        
-        :param name: str
-          Name of the model.
-        :param version: str
-          Version of the model.
-        :param stage: :class:`Stage`
-          Target stage of the transition. Valid values are:
-          
-          * `None`: The initial stage of a model version.
-          
-          * `Staging`: Staging or pre-production stage.
-          
-          * `Production`: Production stage.
-          
-          * `Archived`: Archived stage.
-        :param archive_existing_versions: bool
-          Specifies whether to archive all current model versions in the target stage.
-        :param comment: str (optional)
-          User-provided comment on the action.
-        
-        :returns: :class:`TransitionStageResponse`
-        
+
+Transition a model version's stage. This is a Databricks workspace version of the [MLflow endpoint]
+that also accepts a comment associated with the transition to be recorded.",
+
+[MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage
+
+:param name: str
+  Name of the model.
+:param version: str
+  Version of the model.
+:param stage: :class:`Stage`
+  Target stage of the transition. Valid values are:
+  
+  * `None`: The initial stage of a model version.
+  
+  * `Staging`: Staging or pre-production stage.
+  
+  * `Production`: Production stage.
+  
+  * `Archived`: Archived stage.
+:param archive_existing_versions: bool
+  Specifies whether to archive all current model versions in the target stage.
+:param comment: str (optional)
+  User-provided comment on the action.
+
+:returns: :class:`TransitionStageResponse`
+
 
     .. py:method:: update_comment(id: str, comment: str) -> UpdateCommentResponse
 
@@ -743,16 +743,16 @@
             w.model_registry.delete_comment(id=created.comment.id)
 
         Update a comment.
-        
-        Post an edit to a comment on a model version.
-        
-        :param id: str
-          Unique identifier of an activity
-        :param comment: str
-          User-provided comment on the action.
-        
-        :returns: :class:`UpdateCommentResponse`
-        
+
+Post an edit to a comment on a model version.
+
+:param id: str
+  Unique identifier of an activity
+:param comment: str
+  User-provided comment on the action.
+
+:returns: :class:`UpdateCommentResponse`
+
 
     .. py:method:: update_model(name: str [, description: Optional[str]])
 
@@ -776,16 +776,16 @@
                                                   version=created.model_version.version)
 
         Update model.
-        
-        Updates a registered model.
-        
-        :param name: str
-          Registered model unique name identifier.
-        :param description: str (optional)
-          If provided, updates the description for this `registered_model`.
-        
-        
-        
+
+Updates a registered model.
+
+:param name: str
+  Registered model unique name identifier.
+:param description: str (optional)
+  If provided, updates the description for this `registered_model`.
+
+
+
 
     .. py:method:: update_model_version(name: str, version: str [, description: Optional[str]])
 
@@ -809,32 +809,32 @@
                                                   version=created.model_version.version)
 
         Update model version.
-        
-        Updates the model version.
-        
-        :param name: str
-          Name of the registered model
-        :param version: str
-          Model version number
-        :param description: str (optional)
-          If provided, updates the description for this `registered_model`.
-        
-        
-        
+
+Updates the model version.
+
+:param name: str
+  Name of the registered model
+:param version: str
+  Model version number
+:param description: str (optional)
+  If provided, updates the description for this `registered_model`.
+
+
+
 
     .. py:method:: update_permissions(registered_model_id: str [, access_control_list: Optional[List[RegisteredModelAccessControlRequest]]]) -> RegisteredModelPermissions
 
         Update registered model permissions.
-        
-        Updates the permissions on a registered model. Registered models can inherit permissions from their
-        root object.
-        
-        :param registered_model_id: str
-          The registered model for which to get or manage permissions.
-        :param access_control_list: List[:class:`RegisteredModelAccessControlRequest`] (optional)
-        
-        :returns: :class:`RegisteredModelPermissions`
-        
+
+Updates the permissions on a registered model. Registered models can inherit permissions from their
+root object.
+
+:param registered_model_id: str
+  The registered model for which to get or manage permissions.
+:param access_control_list: List[:class:`RegisteredModelAccessControlRequest`] (optional)
+
+:returns: :class:`RegisteredModelPermissions`
+
 
     .. py:method:: update_webhook(id: str [, description: Optional[str], events: Optional[List[RegistryWebhookEvent]], http_url_spec: Optional[HttpUrlSpec], job_spec: Optional[JobSpec], status: Optional[RegistryWebhookStatus]])
 
@@ -860,54 +860,53 @@
             w.model_registry.delete_webhook(id=created.webhook.id)
 
         Update a webhook.
-        
-        **NOTE:** This endpoint is in Public Preview.
-        
-        Updates a registry webhook.
-        
-        :param id: str
-          Webhook ID
-        :param description: str (optional)
-          User-specified description for the webhook.
-        :param events: List[:class:`RegistryWebhookEvent`] (optional)
-          Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was
-          created for the associated model.
-          
-          * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed.
-          
-          * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned.
-          
-          * `COMMENT_CREATED`: A user wrote a comment on a registered model.
-          
-          * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be
-          specified for a registry-wide webhook, which can be created by not specifying a model name in the
-          create request.
-          
-          * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version.
-          
-          * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging.
-          
-          * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production.
-          
-          * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived.
-          
-          * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to
-          staging.
-          
-          * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to
-          production.
-          
-          * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.
-        :param http_url_spec: :class:`HttpUrlSpec` (optional)
-        :param job_spec: :class:`JobSpec` (optional)
-        :param status: :class:`RegistryWebhookStatus` (optional)
-          Enable or disable triggering the webhook, or put the webhook into test mode. The default is
-          `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens.
-          
-          * `DISABLED`: Webhook is not triggered.
-          
-          * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real
-          event.
-        
-        
-        
\ No newline at end of file
+
+**NOTE:** This endpoint is in Public Preview.
+
+Updates a registry webhook.
+
+:param id: str
+  Webhook ID
+:param description: str (optional)
+  User-specified description for the webhook.
+:param events: List[:class:`RegistryWebhookEvent`] (optional)
+  Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was
+  created for the associated model.
+  
+  * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed.
+  
+  * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned.
+  
+  * `COMMENT_CREATED`: A user wrote a comment on a registered model.
+  
+  * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be
+  specified for a registry-wide webhook, which can be created by not specifying a model name in the
+  create request.
+  
+  * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version.
+  
+  * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging.
+  
+  * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production.
+  
+  * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived.
+  
+  * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to
+  staging.
+  
+  * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to
+  production.
+  
+  * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.
+:param http_url_spec: :class:`HttpUrlSpec` (optional)
+:param job_spec: :class:`JobSpec` (optional)
+:param status: :class:`RegistryWebhookStatus` (optional)
+  Enable or disable triggering the webhook, or put the webhook into test mode. The default is
+  `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens.
+  
+  * `DISABLED`: Webhook is not triggered.
+  
+  * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real
+  event.
+
+
diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst
index ec31991ef..4d261b0b4 100644
--- a/docs/workspace/pipelines/pipelines.rst
+++ b/docs/workspace/pipelines/pipelines.rst
@@ -5,15 +5,15 @@
 .. py:class:: PipelinesAPI
 
     The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.
-    
-    Delta Live Tables is a framework for building reliable, maintainable, and testable data processing
-    pipelines. You define the transformations to perform on your data, and Delta Live Tables manages task
-    orchestration, cluster management, monitoring, data quality, and error handling.
-    
-    Instead of defining your data pipelines using a series of separate Apache Spark tasks, Delta Live Tables
-    manages how your data is transformed based on a target schema you define for each processing step. You can
-    also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected
-    data quality and specify how to handle records that fail those expectations.
+
+Delta Live Tables is a framework for building reliable, maintainable, and testable data processing
+pipelines. You define the transformations to perform on your data, and Delta Live Tables manages task
+orchestration, cluster management, monitoring, data quality, and error handling.
+
+Instead of defining your data pipelines using a series of separate Apache Spark tasks, Delta Live Tables
+manages how your data is transformed based on a target schema you define for each processing step. You can
+also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected
+data quality and specify how to handle records that fail those expectations.
 
     .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse
 
@@ -49,84 +49,84 @@
             w.pipelines.delete(pipeline_id=created.pipeline_id)
 
         Create a pipeline.
-        
-        Creates a new data processing pipeline based on the requested configuration. If successful, this
-        method returns the ID of the new pipeline.
-        
-        :param allow_duplicate_names: bool (optional)
-          If false, deployment will fail if name conflicts with that of another pipeline.
-        :param budget_policy_id: str (optional)
-          Budget policy of this pipeline.
-        :param catalog: str (optional)
-          A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables
-          in this pipeline are published to a `target` schema inside `catalog` (for example,
-          `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity Catalog.
-        :param channel: str (optional)
-          DLT Release Channel that specifies which version to use.
-        :param clusters: List[:class:`PipelineCluster`] (optional)
-          Cluster settings for this pipeline deployment.
-        :param configuration: Dict[str,str] (optional)
-          String-String configuration for this pipeline execution.
-        :param continuous: bool (optional)
-          Whether the pipeline is continuous or triggered. This replaces `trigger`.
-        :param deployment: :class:`PipelineDeployment` (optional)
-          Deployment type of this pipeline.
-        :param development: bool (optional)
-          Whether the pipeline is in Development mode. Defaults to false.
-        :param dry_run: bool (optional)
-        :param edition: str (optional)
-          Pipeline product edition.
-        :param filters: :class:`Filters` (optional)
-          Filters on which Pipeline packages to include in the deployed graph.
-        :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
-          The definition of a gateway pipeline to support change data capture.
-        :param id: str (optional)
-          Unique identifier for this pipeline.
-        :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
-          The configuration for a managed ingestion pipeline. These settings cannot be used with the
-          'libraries', 'target' or 'catalog' settings.
-        :param libraries: List[:class:`PipelineLibrary`] (optional)
-          Libraries or code needed by this deployment.
-        :param name: str (optional)
-          Friendly identifier for this pipeline.
-        :param notifications: List[:class:`Notifications`] (optional)
-          List of notification settings for this pipeline.
-        :param photon: bool (optional)
-          Whether Photon is enabled for this pipeline.
-        :param restart_window: :class:`RestartWindow` (optional)
-          Restart window of this pipeline.
-        :param run_as: :class:`RunAs` (optional)
-          Write-only setting, available only in Create/Update calls. Specifies the user or service principal
-          that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
-          
-          Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
-          thrown.
-        :param schema: str (optional)
-          The default schema (database) where tables are read from or published to. The presence of this field
-          implies that the pipeline is in direct publishing mode.
-        :param serverless: bool (optional)
-          Whether serverless compute is enabled for this pipeline.
-        :param storage: str (optional)
-          DBFS root directory for storing checkpoints and tables.
-        :param target: str (optional)
-          Target schema (database) to add tables in this pipeline to. If not specified, no data is published
-          to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify `catalog`.
-        :param trigger: :class:`PipelineTrigger` (optional)
-          Which pipeline trigger to use. Deprecated: Use `continuous` instead.
-        
-        :returns: :class:`CreatePipelineResponse`
-        
+
+Creates a new data processing pipeline based on the requested configuration. If successful, this
+method returns the ID of the new pipeline.
+
+:param allow_duplicate_names: bool (optional)
+  If false, deployment will fail if name conflicts with that of another pipeline.
+:param budget_policy_id: str (optional)
+  Budget policy of this pipeline.
+:param catalog: str (optional)
+  A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables
+  in this pipeline are published to a `target` schema inside `catalog` (for example,
+  `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity Catalog.
+:param channel: str (optional)
+  DLT Release Channel that specifies which version to use.
+:param clusters: List[:class:`PipelineCluster`] (optional)
+  Cluster settings for this pipeline deployment.
+:param configuration: Dict[str,str] (optional)
+  String-String configuration for this pipeline execution.
+:param continuous: bool (optional)
+  Whether the pipeline is continuous or triggered. This replaces `trigger`.
+:param deployment: :class:`PipelineDeployment` (optional)
+  Deployment type of this pipeline.
+:param development: bool (optional)
+  Whether the pipeline is in Development mode. Defaults to false.
+:param dry_run: bool (optional)
+:param edition: str (optional)
+  Pipeline product edition.
+:param filters: :class:`Filters` (optional)
+  Filters on which Pipeline packages to include in the deployed graph.
+:param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
+  The definition of a gateway pipeline to support change data capture.
+:param id: str (optional)
+  Unique identifier for this pipeline.
+:param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
+  The configuration for a managed ingestion pipeline. These settings cannot be used with the
+  'libraries', 'target' or 'catalog' settings.
+:param libraries: List[:class:`PipelineLibrary`] (optional)
+  Libraries or code needed by this deployment.
+:param name: str (optional)
+  Friendly identifier for this pipeline.
+:param notifications: List[:class:`Notifications`] (optional)
+  List of notification settings for this pipeline.
+:param photon: bool (optional)
+  Whether Photon is enabled for this pipeline.
+:param restart_window: :class:`RestartWindow` (optional)
+  Restart window of this pipeline.
+:param run_as: :class:`RunAs` (optional)
+  Write-only setting, available only in Create/Update calls. Specifies the user or service principal
+  that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
+  
+  Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
+  thrown.
+:param schema: str (optional)
+  The default schema (database) where tables are read from or published to. The presence of this field
+  implies that the pipeline is in direct publishing mode.
+:param serverless: bool (optional)
+  Whether serverless compute is enabled for this pipeline.
+:param storage: str (optional)
+  DBFS root directory for storing checkpoints and tables.
+:param target: str (optional)
+  Target schema (database) to add tables in this pipeline to. If not specified, no data is published
+  to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify `catalog`.
+:param trigger: :class:`PipelineTrigger` (optional)
+  Which pipeline trigger to use. Deprecated: Use `continuous` instead.
+
+:returns: :class:`CreatePipelineResponse`
+
 
     .. py:method:: delete(pipeline_id: str)
 
         Delete a pipeline.
-        
-        Deletes a pipeline.
-        
-        :param pipeline_id: str
-        
-        
-        
+
+Deletes a pipeline.
+
+:param pipeline_id: str
+
+
+
 
     .. py:method:: get(pipeline_id: str) -> GetPipelineResponse
 
@@ -164,49 +164,49 @@
             w.pipelines.delete(pipeline_id=created.pipeline_id)
 
         Get a pipeline.
-        
-        :param pipeline_id: str
-        
-        :returns: :class:`GetPipelineResponse`
-        
+
+:param pipeline_id: str
+
+:returns: :class:`GetPipelineResponse`
+
 
     .. py:method:: get_permission_levels(pipeline_id: str) -> GetPipelinePermissionLevelsResponse
 
         Get pipeline permission levels.
-        
-        Gets the permission levels that a user can have on an object.
-        
-        :param pipeline_id: str
-          The pipeline for which to get or manage permissions.
-        
-        :returns: :class:`GetPipelinePermissionLevelsResponse`
-        
+
+Gets the permission levels that a user can have on an object.
+
+:param pipeline_id: str
+  The pipeline for which to get or manage permissions.
+
+:returns: :class:`GetPipelinePermissionLevelsResponse`
+
 
     .. py:method:: get_permissions(pipeline_id: str) -> PipelinePermissions
 
         Get pipeline permissions.
-        
-        Gets the permissions of a pipeline. Pipelines can inherit permissions from their root object.
-        
-        :param pipeline_id: str
-          The pipeline for which to get or manage permissions.
-        
-        :returns: :class:`PipelinePermissions`
-        
+
+Gets the permissions of a pipeline. Pipelines can inherit permissions from their root object.
+
+:param pipeline_id: str
+  The pipeline for which to get or manage permissions.
+
+:returns: :class:`PipelinePermissions`
+
 
     .. py:method:: get_update(pipeline_id: str, update_id: str) -> GetUpdateResponse
 
         Get a pipeline update.
-        
-        Gets an update from an active pipeline.
-        
-        :param pipeline_id: str
-          The ID of the pipeline.
-        :param update_id: str
-          The ID of the update.
-        
-        :returns: :class:`GetUpdateResponse`
-        
+
+Gets an update from an active pipeline.
+
+:param pipeline_id: str
+  The ID of the pipeline.
+:param update_id: str
+  The ID of the update.
+
+:returns: :class:`GetUpdateResponse`
+
 
     .. py:method:: list_pipeline_events(pipeline_id: str [, filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str]]) -> Iterator[PipelineEvent]
 
@@ -244,31 +244,31 @@
             w.pipelines.delete(pipeline_id=created.pipeline_id)
 
         List pipeline events.
-        
-        Retrieves events for a pipeline.
-        
-        :param pipeline_id: str
-        :param filter: str (optional)
-          Criteria to select a subset of results, expressed using a SQL-like syntax. The supported filters
-          are: 1. level='INFO' (or WARN or ERROR) 2. level in ('INFO', 'WARN') 3. id='[event-id]' 4. timestamp
-          > 'TIMESTAMP' (or >=,<,<=,=)
-          
-          Composite expressions are supported, for example: level in ('ERROR', 'WARN') AND timestamp>
-          '2021-07-22T06:37:33.083Z'
-        :param max_results: int (optional)
-          Max number of entries to return in a single page. The system may return fewer than max_results
-          events in a response, even if there are more events available.
-        :param order_by: List[str] (optional)
-          A string indicating a sort order by timestamp for the results, for example, ["timestamp asc"]. The
-          sort order can be ascending or descending. By default, events are returned in descending order by
-          timestamp.
-        :param page_token: str (optional)
-          Page token returned by previous call. This field is mutually exclusive with all fields in this
-          request except max_results. An error is returned if any fields other than max_results are set when
-          this field is set.
-        
-        :returns: Iterator over :class:`PipelineEvent`
-        
+
+Retrieves events for a pipeline.
+
+:param pipeline_id: str
+:param filter: str (optional)
+  Criteria to select a subset of results, expressed using a SQL-like syntax. The supported filters
+  are: 1. level='INFO' (or WARN or ERROR) 2. level in ('INFO', 'WARN') 3. id='[event-id]' 4. timestamp
+  > 'TIMESTAMP' (or >=,<,<=,=)
+  
+  Composite expressions are supported, for example: level in ('ERROR', 'WARN') AND timestamp>
+  '2021-07-22T06:37:33.083Z'
+:param max_results: int (optional)
+  Max number of entries to return in a single page. The system may return fewer than max_results
+  events in a response, even if there are more events available.
+:param order_by: List[str] (optional)
+  A string indicating a sort order by timestamp for the results, for example, ["timestamp asc"]. The
+  sort order can be ascending or descending. By default, events are returned in descending order by
+  timestamp.
+:param page_token: str (optional)
+  Page token returned by previous call. This field is mutually exclusive with all fields in this
+  request except max_results. An error is returned if any fields other than max_results are set when
+  this field is set.
+
+:returns: Iterator over :class:`PipelineEvent`
+
 
     .. py:method:: list_pipelines( [, filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str]]) -> Iterator[PipelineStateInfo]
 
@@ -285,102 +285,102 @@
             all = w.pipelines.list_pipelines(pipelines.ListPipelinesRequest())
 
         List pipelines.
-        
-        Lists pipelines defined in the Delta Live Tables system.
-        
-        :param filter: str (optional)
-          Select a subset of results based on the specified criteria. The supported filters are:
-          
-          * `notebook=''` to select pipelines that reference the provided notebook path. * `name LIKE
-          '[pattern]'` to select pipelines with a name that matches pattern. Wildcards are supported, for
-          example: `name LIKE '%shopping%'`
-          
-          Composite filters are not supported. This field is optional.
-        :param max_results: int (optional)
-          The maximum number of entries to return in a single page. The system may return fewer than
-          max_results events in a response, even if there are more events available. This field is optional.
-          The default value is 25. The maximum value is 100. An error is returned if the value of max_results
-          is greater than 100.
-        :param order_by: List[str] (optional)
-          A list of strings specifying the order of results. Supported order_by fields are id and name. The
-          default is id asc. This field is optional.
-        :param page_token: str (optional)
-          Page token returned by previous call
-        
-        :returns: Iterator over :class:`PipelineStateInfo`
-        
+
+Lists pipelines defined in the Delta Live Tables system.
+
+:param filter: str (optional)
+  Select a subset of results based on the specified criteria. The supported filters are:
+  
+  * `notebook=''` to select pipelines that reference the provided notebook path. * `name LIKE
+  '[pattern]'` to select pipelines with a name that matches pattern. Wildcards are supported, for
+  example: `name LIKE '%shopping%'`
+  
+  Composite filters are not supported. This field is optional.
+:param max_results: int (optional)
+  The maximum number of entries to return in a single page. The system may return fewer than
+  max_results events in a response, even if there are more events available. This field is optional.
+  The default value is 25. The maximum value is 100. An error is returned if the value of max_results
+  is greater than 100.
+:param order_by: List[str] (optional)
+  A list of strings specifying the order of results. Supported order_by fields are id and name. The
+  default is id asc. This field is optional.
+:param page_token: str (optional)
+  Page token returned by previous call
+
+:returns: Iterator over :class:`PipelineStateInfo`
+
 
     .. py:method:: list_updates(pipeline_id: str [, max_results: Optional[int], page_token: Optional[str], until_update_id: Optional[str]]) -> ListUpdatesResponse
 
         List pipeline updates.
-        
-        List updates for an active pipeline.
-        
-        :param pipeline_id: str
-          The pipeline to return updates for.
-        :param max_results: int (optional)
-          Max number of entries to return in a single page.
-        :param page_token: str (optional)
-          Page token returned by previous call
-        :param until_update_id: str (optional)
-          If present, returns updates until and including this update_id.
-        
-        :returns: :class:`ListUpdatesResponse`
-        
+
+List updates for an active pipeline.
+
+:param pipeline_id: str
+  The pipeline to return updates for.
+:param max_results: int (optional)
+  Max number of entries to return in a single page.
+:param page_token: str (optional)
+  Page token returned by previous call
+:param until_update_id: str (optional)
+  If present, returns updates until and including this update_id.
+
+:returns: :class:`ListUpdatesResponse`
+
 
     .. py:method:: set_permissions(pipeline_id: str [, access_control_list: Optional[List[PipelineAccessControlRequest]]]) -> PipelinePermissions
 
         Set pipeline permissions.
-        
-        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-        permissions if none are specified. Objects can inherit permissions from their root object.
-        
-        :param pipeline_id: str
-          The pipeline for which to get or manage permissions.
-        :param access_control_list: List[:class:`PipelineAccessControlRequest`] (optional)
-        
-        :returns: :class:`PipelinePermissions`
-        
+
+Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+permissions if none are specified. Objects can inherit permissions from their root object.
+
+:param pipeline_id: str
+  The pipeline for which to get or manage permissions.
+:param access_control_list: List[:class:`PipelineAccessControlRequest`] (optional)
+
+:returns: :class:`PipelinePermissions`
+
 
     .. py:method:: start_update(pipeline_id: str [, cause: Optional[StartUpdateCause], full_refresh: Optional[bool], full_refresh_selection: Optional[List[str]], refresh_selection: Optional[List[str]], validate_only: Optional[bool]]) -> StartUpdateResponse
 
         Start a pipeline.
-        
-        Starts a new update for the pipeline. If there is already an active update for the pipeline, the
-        request will fail and the active update will remain running.
-        
-        :param pipeline_id: str
-        :param cause: :class:`StartUpdateCause` (optional)
-        :param full_refresh: bool (optional)
-          If true, this update will reset all tables before running.
-        :param full_refresh_selection: List[str] (optional)
-          A list of tables to update with fullRefresh. If both refresh_selection and full_refresh_selection
-          are empty, this is a full graph update. Full Refresh on a table means that the states of the table
-          will be reset before the refresh.
-        :param refresh_selection: List[str] (optional)
-          A list of tables to update without fullRefresh. If both refresh_selection and full_refresh_selection
-          are empty, this is a full graph update. Full Refresh on a table means that the states of the table
-          will be reset before the refresh.
-        :param validate_only: bool (optional)
-          If true, this update only validates the correctness of pipeline source code but does not materialize
-          or publish any datasets.
-        
-        :returns: :class:`StartUpdateResponse`
-        
+
+Starts a new update for the pipeline. If there is already an active update for the pipeline, the
+request will fail and the active update will remain running.
+
+:param pipeline_id: str
+:param cause: :class:`StartUpdateCause` (optional)
+:param full_refresh: bool (optional)
+  If true, this update will reset all tables before running.
+:param full_refresh_selection: List[str] (optional)
+  A list of tables to update with fullRefresh. If both refresh_selection and full_refresh_selection
+  are empty, this is a full graph update. Full Refresh on a table means that the states of the table
+  will be reset before the refresh.
+:param refresh_selection: List[str] (optional)
+  A list of tables to update without fullRefresh. If both refresh_selection and full_refresh_selection
+  are empty, this is a full graph update. Full Refresh on a table means that the states of the table
+  will be reset before the refresh.
+:param validate_only: bool (optional)
+  If true, this update only validates the correctness of pipeline source code but does not materialize
+  or publish any datasets.
+
+:returns: :class:`StartUpdateResponse`
+
 
     .. py:method:: stop(pipeline_id: str) -> Wait[GetPipelineResponse]
 
         Stop a pipeline.
-        
-        Stops the pipeline by canceling the active update. If there is no active update for the pipeline, this
-        request is a no-op.
-        
-        :param pipeline_id: str
-        
-        :returns:
-          Long-running operation waiter for :class:`GetPipelineResponse`.
-          See :method:wait_get_pipeline_idle for more details.
-        
+
+Stops the pipeline by canceling the active update. If there is no active update for the pipeline, this
+request is a no-op.
+
+:param pipeline_id: str
+
+:returns:
+  Long-running operation waiter for :class:`GetPipelineResponse`.
+  See :method:wait_get_pipeline_idle for more details.
+
 
     .. py:method:: stop_and_wait(pipeline_id: str, timeout: datetime.timedelta = 0:20:00) -> GetPipelineResponse
 
@@ -432,89 +432,89 @@
             w.pipelines.delete(pipeline_id=created.pipeline_id)
 
         Edit a pipeline.
-        
-        Updates a pipeline with the supplied configuration.
-        
-        :param pipeline_id: str
-          Unique identifier for this pipeline.
-        :param allow_duplicate_names: bool (optional)
-          If false, deployment will fail if name has changed and conflicts the name of another pipeline.
-        :param budget_policy_id: str (optional)
-          Budget policy of this pipeline.
-        :param catalog: str (optional)
-          A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables
-          in this pipeline are published to a `target` schema inside `catalog` (for example,
-          `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity Catalog.
-        :param channel: str (optional)
-          DLT Release Channel that specifies which version to use.
-        :param clusters: List[:class:`PipelineCluster`] (optional)
-          Cluster settings for this pipeline deployment.
-        :param configuration: Dict[str,str] (optional)
-          String-String configuration for this pipeline execution.
-        :param continuous: bool (optional)
-          Whether the pipeline is continuous or triggered. This replaces `trigger`.
-        :param deployment: :class:`PipelineDeployment` (optional)
-          Deployment type of this pipeline.
-        :param development: bool (optional)
-          Whether the pipeline is in Development mode. Defaults to false.
-        :param edition: str (optional)
-          Pipeline product edition.
-        :param expected_last_modified: int (optional)
-          If present, the last-modified time of the pipeline settings before the edit. If the settings were
-          modified after that time, then the request will fail with a conflict.
-        :param filters: :class:`Filters` (optional)
-          Filters on which Pipeline packages to include in the deployed graph.
-        :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
-          The definition of a gateway pipeline to support change data capture.
-        :param id: str (optional)
-          Unique identifier for this pipeline.
-        :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
-          The configuration for a managed ingestion pipeline. These settings cannot be used with the
-          'libraries', 'target' or 'catalog' settings.
-        :param libraries: List[:class:`PipelineLibrary`] (optional)
-          Libraries or code needed by this deployment.
-        :param name: str (optional)
-          Friendly identifier for this pipeline.
-        :param notifications: List[:class:`Notifications`] (optional)
-          List of notification settings for this pipeline.
-        :param photon: bool (optional)
-          Whether Photon is enabled for this pipeline.
-        :param restart_window: :class:`RestartWindow` (optional)
-          Restart window of this pipeline.
-        :param run_as: :class:`RunAs` (optional)
-          Write-only setting, available only in Create/Update calls. Specifies the user or service principal
-          that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
-          
-          Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
-          thrown.
-        :param schema: str (optional)
-          The default schema (database) where tables are read from or published to. The presence of this field
-          implies that the pipeline is in direct publishing mode.
-        :param serverless: bool (optional)
-          Whether serverless compute is enabled for this pipeline.
-        :param storage: str (optional)
-          DBFS root directory for storing checkpoints and tables.
-        :param target: str (optional)
-          Target schema (database) to add tables in this pipeline to. If not specified, no data is published
-          to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify `catalog`.
-        :param trigger: :class:`PipelineTrigger` (optional)
-          Which pipeline trigger to use. Deprecated: Use `continuous` instead.
-        
-        
-        
+
+Updates a pipeline with the supplied configuration.
+
+:param pipeline_id: str
+  Unique identifier for this pipeline.
+:param allow_duplicate_names: bool (optional)
+  If false, deployment will fail if name has changed and conflicts the name of another pipeline.
+:param budget_policy_id: str (optional)
+  Budget policy of this pipeline.
+:param catalog: str (optional)
+  A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables
+  in this pipeline are published to a `target` schema inside `catalog` (for example,
+  `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity Catalog.
+:param channel: str (optional)
+  DLT Release Channel that specifies which version to use.
+:param clusters: List[:class:`PipelineCluster`] (optional)
+  Cluster settings for this pipeline deployment.
+:param configuration: Dict[str,str] (optional)
+  String-String configuration for this pipeline execution.
+:param continuous: bool (optional)
+  Whether the pipeline is continuous or triggered. This replaces `trigger`.
+:param deployment: :class:`PipelineDeployment` (optional)
+  Deployment type of this pipeline.
+:param development: bool (optional)
+  Whether the pipeline is in Development mode. Defaults to false.
+:param edition: str (optional)
+  Pipeline product edition.
+:param expected_last_modified: int (optional)
+  If present, the last-modified time of the pipeline settings before the edit. If the settings were
+  modified after that time, then the request will fail with a conflict.
+:param filters: :class:`Filters` (optional)
+  Filters on which Pipeline packages to include in the deployed graph.
+:param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
+  The definition of a gateway pipeline to support change data capture.
+:param id: str (optional)
+  Unique identifier for this pipeline.
+:param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
+  The configuration for a managed ingestion pipeline. These settings cannot be used with the
+  'libraries', 'target' or 'catalog' settings.
+:param libraries: List[:class:`PipelineLibrary`] (optional)
+  Libraries or code needed by this deployment.
+:param name: str (optional)
+  Friendly identifier for this pipeline.
+:param notifications: List[:class:`Notifications`] (optional)
+  List of notification settings for this pipeline.
+:param photon: bool (optional)
+  Whether Photon is enabled for this pipeline.
+:param restart_window: :class:`RestartWindow` (optional)
+  Restart window of this pipeline.
+:param run_as: :class:`RunAs` (optional)
+  Write-only setting, available only in Create/Update calls. Specifies the user or service principal
+  that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
+  
+  Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
+  thrown.
+:param schema: str (optional)
+  The default schema (database) where tables are read from or published to. The presence of this field
+  implies that the pipeline is in direct publishing mode.
+:param serverless: bool (optional)
+  Whether serverless compute is enabled for this pipeline.
+:param storage: str (optional)
+  DBFS root directory for storing checkpoints and tables.
+:param target: str (optional)
+  Target schema (database) to add tables in this pipeline to. If not specified, no data is published
+  to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify `catalog`.
+:param trigger: :class:`PipelineTrigger` (optional)
+  Which pipeline trigger to use. Deprecated: Use `continuous` instead.
+
+
+
 
     .. py:method:: update_permissions(pipeline_id: str [, access_control_list: Optional[List[PipelineAccessControlRequest]]]) -> PipelinePermissions
 
         Update pipeline permissions.
-        
-        Updates the permissions on a pipeline. Pipelines can inherit permissions from their root object.
-        
-        :param pipeline_id: str
-          The pipeline for which to get or manage permissions.
-        :param access_control_list: List[:class:`PipelineAccessControlRequest`] (optional)
-        
-        :returns: :class:`PipelinePermissions`
-        
+
+Updates the permissions on a pipeline. Pipelines can inherit permissions from their root object.
+
+:param pipeline_id: str
+  The pipeline for which to get or manage permissions.
+:param access_control_list: List[:class:`PipelineAccessControlRequest`] (optional)
+
+:returns: :class:`PipelinePermissions`
+
 
     .. py:method:: wait_get_pipeline_idle(pipeline_id: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[GetPipelineResponse], None]]) -> GetPipelineResponse
 
diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst
index 687976f5d..ce88af34b 100644
--- a/docs/workspace/serving/serving_endpoints.rst
+++ b/docs/workspace/serving/serving_endpoints.rst
@@ -5,54 +5,54 @@
 .. py:class:: ServingEndpointsExt
 
     The Serving Endpoints API allows you to create, update, and delete model serving endpoints.
-    
-    You can use a serving endpoint to serve models from the Databricks Model Registry or from Unity Catalog.
-    Endpoints expose the underlying models as scalable REST API endpoints using serverless compute. This means
-    the endpoints and associated compute resources are fully managed by Databricks and will not appear in your
-    cloud account. A serving endpoint can consist of one or more MLflow models from the Databricks Model
-    Registry, called served entities. A serving endpoint can have at most ten served entities. You can
-    configure traffic settings to define how requests should be routed to your served entities behind an
-    endpoint. Additionally, you can configure the scale of resources that should be applied to each served
-    entity.
+
+You can use a serving endpoint to serve models from the Databricks Model Registry or from Unity Catalog.
+Endpoints expose the underlying models as scalable REST API endpoints using serverless compute. This means
+the endpoints and associated compute resources are fully managed by Databricks and will not appear in your
+cloud account. A serving endpoint can consist of one or more MLflow models from the Databricks Model
+Registry, called served entities. A serving endpoint can have at most ten served entities. You can
+configure traffic settings to define how requests should be routed to your served entities behind an
+endpoint. Additionally, you can configure the scale of resources that should be applied to each served
+entity.
 
     .. py:method:: build_logs(name: str, served_model_name: str) -> BuildLogsResponse
 
         Get build logs for a served model.
-        
-        Retrieves the build logs associated with the provided served model.
-        
-        :param name: str
-          The name of the serving endpoint that the served model belongs to. This field is required.
-        :param served_model_name: str
-          The name of the served model that build logs will be retrieved for. This field is required.
-        
-        :returns: :class:`BuildLogsResponse`
-        
+
+Retrieves the build logs associated with the provided served model.
+
+:param name: str
+  The name of the serving endpoint that the served model belongs to. This field is required.
+:param served_model_name: str
+  The name of the served model that build logs will be retrieved for. This field is required.
+
+:returns: :class:`BuildLogsResponse`
+
 
     .. py:method:: create(name: str [, ai_gateway: Optional[AiGatewayConfig], config: Optional[EndpointCoreConfigInput], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]]]) -> Wait[ServingEndpointDetailed]
 
         Create a new serving endpoint.
-        
-        :param name: str
-          The name of the serving endpoint. This field is required and must be unique across a Databricks
-          workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.
-        :param ai_gateway: :class:`AiGatewayConfig` (optional)
-          The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned
-          throughput endpoints are currently supported.
-        :param config: :class:`EndpointCoreConfigInput` (optional)
-          The core config of the serving endpoint.
-        :param rate_limits: List[:class:`RateLimit`] (optional)
-          Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
-          Gateway to manage rate limits.
-        :param route_optimized: bool (optional)
-          Enable route optimization for the serving endpoint.
-        :param tags: List[:class:`EndpointTag`] (optional)
-          Tags to be attached to the serving endpoint and automatically propagated to billing logs.
-        
-        :returns:
-          Long-running operation waiter for :class:`ServingEndpointDetailed`.
-          See :method:wait_get_serving_endpoint_not_updating for more details.
-        
+
+:param name: str
+  The name of the serving endpoint. This field is required and must be unique across a Databricks
+  workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.
+:param ai_gateway: :class:`AiGatewayConfig` (optional)
+  The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned
+  throughput endpoints are currently supported.
+:param config: :class:`EndpointCoreConfigInput` (optional)
+  The core config of the serving endpoint.
+:param rate_limits: List[:class:`RateLimit`] (optional)
+  Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
+  Gateway to manage rate limits.
+:param route_optimized: bool (optional)
+  Enable route optimization for the serving endpoint.
+:param tags: List[:class:`EndpointTag`] (optional)
+  Tags to be attached to the serving endpoint and automatically propagated to billing logs.
+
+:returns:
+  Long-running operation waiter for :class:`ServingEndpointDetailed`.
+  See :method:wait_get_serving_endpoint_not_updating for more details.
+
 
     .. py:method:: create_and_wait(name: str [, ai_gateway: Optional[AiGatewayConfig], config: Optional[EndpointCoreConfigInput], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]], timeout: datetime.timedelta = 0:20:00]) -> ServingEndpointDetailed
 
@@ -60,36 +60,36 @@
     .. py:method:: delete(name: str)
 
         Delete a serving endpoint.
-        
-        :param name: str
-        
-        
-        
+
+:param name: str
+
+
+
 
     .. py:method:: export_metrics(name: str) -> ExportMetricsResponse
 
         Get metrics of a serving endpoint.
-        
-        Retrieves the metrics associated with the provided serving endpoint in either Prometheus or
-        OpenMetrics exposition format.
-        
-        :param name: str
-          The name of the serving endpoint to retrieve metrics for. This field is required.
-        
-        :returns: :class:`ExportMetricsResponse`
-        
+
+Retrieves the metrics associated with the provided serving endpoint in either Prometheus or
+OpenMetrics exposition format.
+
+:param name: str
+  The name of the serving endpoint to retrieve metrics for. This field is required.
+
+:returns: :class:`ExportMetricsResponse`
+
 
     .. py:method:: get(name: str) -> ServingEndpointDetailed
 
         Get a single serving endpoint.
-        
-        Retrieves the details for a single serving endpoint.
-        
-        :param name: str
-          The name of the serving endpoint. This field is required.
-        
-        :returns: :class:`ServingEndpointDetailed`
-        
+
+Retrieves the details for a single serving endpoint.
+
+:param name: str
+  The name of the serving endpoint. This field is required.
+
+:returns: :class:`ServingEndpointDetailed`
+
 
     .. py:method:: get_langchain_chat_open_ai_client(model)
 
@@ -100,228 +100,228 @@
     .. py:method:: get_open_api(name: str) -> GetOpenApiResponse
 
         Get the schema for a serving endpoint.
-        
-        Get the query schema of the serving endpoint in OpenAPI format. The schema contains information for
-        the supported paths, input and output format and datatypes.
-        
-        :param name: str
-          The name of the serving endpoint that the served model belongs to. This field is required.
-        
-        :returns: :class:`GetOpenApiResponse`
-        
+
+Get the query schema of the serving endpoint in OpenAPI format. The schema contains information for
+the supported paths, input and output format and datatypes.
+
+:param name: str
+  The name of the serving endpoint that the served model belongs to. This field is required.
+
+:returns: :class:`GetOpenApiResponse`
+
 
     .. py:method:: get_permission_levels(serving_endpoint_id: str) -> GetServingEndpointPermissionLevelsResponse
 
         Get serving endpoint permission levels.
-        
-        Gets the permission levels that a user can have on an object.
-        
-        :param serving_endpoint_id: str
-          The serving endpoint for which to get or manage permissions.
-        
-        :returns: :class:`GetServingEndpointPermissionLevelsResponse`
-        
+
+Gets the permission levels that a user can have on an object.
+
+:param serving_endpoint_id: str
+  The serving endpoint for which to get or manage permissions.
+
+:returns: :class:`GetServingEndpointPermissionLevelsResponse`
+
 
     .. py:method:: get_permissions(serving_endpoint_id: str) -> ServingEndpointPermissions
 
         Get serving endpoint permissions.
-        
-        Gets the permissions of a serving endpoint. Serving endpoints can inherit permissions from their root
-        object.
-        
-        :param serving_endpoint_id: str
-          The serving endpoint for which to get or manage permissions.
-        
-        :returns: :class:`ServingEndpointPermissions`
-        
+
+Gets the permissions of a serving endpoint. Serving endpoints can inherit permissions from their root
+object.
+
+:param serving_endpoint_id: str
+  The serving endpoint for which to get or manage permissions.
+
+:returns: :class:`ServingEndpointPermissions`
+
 
     .. py:method:: http_request(conn: str, method: ExternalFunctionRequestHttpMethod, path: str [, headers: typing.Dict[str, str], json: typing.Dict[str, str], params: typing.Dict[str, str]]) -> Response
 
         Make external services call using the credentials stored in UC Connection.
-        **NOTE:** Experimental: This API may change or be removed in a future release without warning.
-        :param conn: str
-          The connection name to use. This is required to identify the external connection.
-        :param method: :class:`ExternalFunctionRequestHttpMethod`
-          The HTTP method to use (e.g., 'GET', 'POST'). This is required.
-        :param path: str
-          The relative path for the API endpoint. This is required.
-        :param headers: Dict[str,str] (optional)
-          Additional headers for the request. If not provided, only auth headers from connections would be
-          passed.
-        :param json: Dict[str,str] (optional)
-          JSON payload for the request.
-        :param params: Dict[str,str] (optional)
-          Query parameters for the request.
-        :returns: :class:`Response`
-        
+**NOTE:** Experimental: This API may change or be removed in a future release without warning.
+:param conn: str
+  The connection name to use. This is required to identify the external connection.
+:param method: :class:`ExternalFunctionRequestHttpMethod`
+  The HTTP method to use (e.g., 'GET', 'POST'). This is required.
+:param path: str
+  The relative path for the API endpoint. This is required.
+:param headers: Dict[str,str] (optional)
+  Additional headers for the request. If not provided, only auth headers from connections would be
+  passed.
+:param json: Dict[str,str] (optional)
+  JSON payload for the request.
+:param params: Dict[str,str] (optional)
+  Query parameters for the request.
+:returns: :class:`Response`
+
 
     .. py:method:: list() -> Iterator[ServingEndpoint]
 
         Get all serving endpoints.
-        
-        :returns: Iterator over :class:`ServingEndpoint`
-        
+
+:returns: Iterator over :class:`ServingEndpoint`
+
 
     .. py:method:: logs(name: str, served_model_name: str) -> ServerLogsResponse
 
         Get the latest logs for a served model.
-        
-        Retrieves the service logs associated with the provided served model.
-        
-        :param name: str
-          The name of the serving endpoint that the served model belongs to. This field is required.
-        :param served_model_name: str
-          The name of the served model that logs will be retrieved for. This field is required.
-        
-        :returns: :class:`ServerLogsResponse`
-        
+
+Retrieves the service logs associated with the provided served model.
+
+:param name: str
+  The name of the serving endpoint that the served model belongs to. This field is required.
+:param served_model_name: str
+  The name of the served model that logs will be retrieved for. This field is required.
+
+:returns: :class:`ServerLogsResponse`
+
 
     .. py:method:: patch(name: str [, add_tags: Optional[List[EndpointTag]], delete_tags: Optional[List[str]]]) -> EndpointTags
 
         Update tags of a serving endpoint.
-        
-        Used to batch add and delete tags from a serving endpoint with a single API call.
-        
-        :param name: str
-          The name of the serving endpoint who's tags to patch. This field is required.
-        :param add_tags: List[:class:`EndpointTag`] (optional)
-          List of endpoint tags to add
-        :param delete_tags: List[str] (optional)
-          List of tag keys to delete
-        
-        :returns: :class:`EndpointTags`
-        
+
+Used to batch add and delete tags from a serving endpoint with a single API call.
+
+:param name: str
+  The name of the serving endpoint who's tags to patch. This field is required.
+:param add_tags: List[:class:`EndpointTag`] (optional)
+  List of endpoint tags to add
+:param delete_tags: List[str] (optional)
+  List of tag keys to delete
+
+:returns: :class:`EndpointTags`
+
 
     .. py:method:: put(name: str [, rate_limits: Optional[List[RateLimit]]]) -> PutResponse
 
         Update rate limits of a serving endpoint.
-        
-        Used to update the rate limits of a serving endpoint. NOTE: Only foundation model endpoints are
-        currently supported. For external models, use AI Gateway to manage rate limits.
-        
-        :param name: str
-          The name of the serving endpoint whose rate limits are being updated. This field is required.
-        :param rate_limits: List[:class:`RateLimit`] (optional)
-          The list of endpoint rate limits.
-        
-        :returns: :class:`PutResponse`
-        
+
+Used to update the rate limits of a serving endpoint. NOTE: Only foundation model endpoints are
+currently supported. For external models, use AI Gateway to manage rate limits.
+
+:param name: str
+  The name of the serving endpoint whose rate limits are being updated. This field is required.
+:param rate_limits: List[:class:`RateLimit`] (optional)
+  The list of endpoint rate limits.
+
+:returns: :class:`PutResponse`
+
 
     .. py:method:: put_ai_gateway(name: str [, guardrails: Optional[AiGatewayGuardrails], inference_table_config: Optional[AiGatewayInferenceTableConfig], rate_limits: Optional[List[AiGatewayRateLimit]], usage_tracking_config: Optional[AiGatewayUsageTrackingConfig]]) -> PutAiGatewayResponse
 
         Update AI Gateway of a serving endpoint.
-        
-        Used to update the AI Gateway of a serving endpoint. NOTE: Only external model and provisioned
-        throughput endpoints are currently supported.
-        
-        :param name: str
-          The name of the serving endpoint whose AI Gateway is being updated. This field is required.
-        :param guardrails: :class:`AiGatewayGuardrails` (optional)
-          Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.
-        :param inference_table_config: :class:`AiGatewayInferenceTableConfig` (optional)
-          Configuration for payload logging using inference tables. Use these tables to monitor and audit data
-          being sent to and received from model APIs and to improve model quality.
-        :param rate_limits: List[:class:`AiGatewayRateLimit`] (optional)
-          Configuration for rate limits which can be set to limit endpoint traffic.
-        :param usage_tracking_config: :class:`AiGatewayUsageTrackingConfig` (optional)
-          Configuration to enable usage tracking using system tables. These tables allow you to monitor
-          operational usage on endpoints and their associated costs.
-        
-        :returns: :class:`PutAiGatewayResponse`
-        
+
+Used to update the AI Gateway of a serving endpoint. NOTE: Only external model and provisioned
+throughput endpoints are currently supported.
+
+:param name: str
+  The name of the serving endpoint whose AI Gateway is being updated. This field is required.
+:param guardrails: :class:`AiGatewayGuardrails` (optional)
+  Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.
+:param inference_table_config: :class:`AiGatewayInferenceTableConfig` (optional)
+  Configuration for payload logging using inference tables. Use these tables to monitor and audit data
+  being sent to and received from model APIs and to improve model quality.
+:param rate_limits: List[:class:`AiGatewayRateLimit`] (optional)
+  Configuration for rate limits which can be set to limit endpoint traffic.
+:param usage_tracking_config: :class:`AiGatewayUsageTrackingConfig` (optional)
+  Configuration to enable usage tracking using system tables. These tables allow you to monitor
+  operational usage on endpoints and their associated costs.
+
+:returns: :class:`PutAiGatewayResponse`
+
 
     .. py:method:: query(name: str [, dataframe_records: Optional[List[Any]], dataframe_split: Optional[DataframeSplitInput], extra_params: Optional[Dict[str, str]], input: Optional[Any], inputs: Optional[Any], instances: Optional[List[Any]], max_tokens: Optional[int], messages: Optional[List[ChatMessage]], n: Optional[int], prompt: Optional[Any], stop: Optional[List[str]], stream: Optional[bool], temperature: Optional[float]]) -> QueryEndpointResponse
 
         Query a serving endpoint.
-        
-        :param name: str
-          The name of the serving endpoint. This field is required.
-        :param dataframe_records: List[Any] (optional)
-          Pandas Dataframe input in the records orientation.
-        :param dataframe_split: :class:`DataframeSplitInput` (optional)
-          Pandas Dataframe input in the split orientation.
-        :param extra_params: Dict[str,str] (optional)
-          The extra parameters field used ONLY for __completions, chat,__ and __embeddings external &
-          foundation model__ serving endpoints. This is a map of strings and should only be used with other
-          external/foundation model query fields.
-        :param input: Any (optional)
-          The input string (or array of strings) field used ONLY for __embeddings external & foundation
-          model__ serving endpoints and is the only field (along with extra_params if needed) used by
-          embeddings queries.
-        :param inputs: Any (optional)
-          Tensor-based input in columnar format.
-        :param instances: List[Any] (optional)
-          Tensor-based input in row format.
-        :param max_tokens: int (optional)
-          The max tokens field used ONLY for __completions__ and __chat external & foundation model__ serving
-          endpoints. This is an integer and should only be used with other chat/completions query fields.
-        :param messages: List[:class:`ChatMessage`] (optional)
-          The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is a
-          map of strings and should only be used with other chat query fields.
-        :param n: int (optional)
-          The n (number of candidates) field used ONLY for __completions__ and __chat external & foundation
-          model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and should only be
-          used with other chat/completions query fields.
-        :param prompt: Any (optional)
-          The prompt string (or array of strings) field used ONLY for __completions external & foundation
-          model__ serving endpoints and should only be used with other completions query fields.
-        :param stop: List[str] (optional)
-          The stop sequences field used ONLY for __completions__ and __chat external & foundation model__
-          serving endpoints. This is a list of strings and should only be used with other chat/completions
-          query fields.
-        :param stream: bool (optional)
-          The stream field used ONLY for __completions__ and __chat external & foundation model__ serving
-          endpoints. This is a boolean defaulting to false and should only be used with other chat/completions
-          query fields.
-        :param temperature: float (optional)
-          The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving
-          endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with
-          other chat/completions query fields.
-        
-        :returns: :class:`QueryEndpointResponse`
-        
+
+:param name: str
+  The name of the serving endpoint. This field is required.
+:param dataframe_records: List[Any] (optional)
+  Pandas Dataframe input in the records orientation.
+:param dataframe_split: :class:`DataframeSplitInput` (optional)
+  Pandas Dataframe input in the split orientation.
+:param extra_params: Dict[str,str] (optional)
+  The extra parameters field used ONLY for __completions, chat,__ and __embeddings external &
+  foundation model__ serving endpoints. This is a map of strings and should only be used with other
+  external/foundation model query fields.
+:param input: Any (optional)
+  The input string (or array of strings) field used ONLY for __embeddings external & foundation
+  model__ serving endpoints and is the only field (along with extra_params if needed) used by
+  embeddings queries.
+:param inputs: Any (optional)
+  Tensor-based input in columnar format.
+:param instances: List[Any] (optional)
+  Tensor-based input in row format.
+:param max_tokens: int (optional)
+  The max tokens field used ONLY for __completions__ and __chat external & foundation model__ serving
+  endpoints. This is an integer and should only be used with other chat/completions query fields.
+:param messages: List[:class:`ChatMessage`] (optional)
+  The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is a
+  map of strings and should only be used with other chat query fields.
+:param n: int (optional)
+  The n (number of candidates) field used ONLY for __completions__ and __chat external & foundation
+  model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and should only be
+  used with other chat/completions query fields.
+:param prompt: Any (optional)
+  The prompt string (or array of strings) field used ONLY for __completions external & foundation
+  model__ serving endpoints and should only be used with other completions query fields.
+:param stop: List[str] (optional)
+  The stop sequences field used ONLY for __completions__ and __chat external & foundation model__
+  serving endpoints. This is a list of strings and should only be used with other chat/completions
+  query fields.
+:param stream: bool (optional)
+  The stream field used ONLY for __completions__ and __chat external & foundation model__ serving
+  endpoints. This is a boolean defaulting to false and should only be used with other chat/completions
+  query fields.
+:param temperature: float (optional)
+  The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving
+  endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with
+  other chat/completions query fields.
+
+:returns: :class:`QueryEndpointResponse`
+
 
     .. py:method:: set_permissions(serving_endpoint_id: str [, access_control_list: Optional[List[ServingEndpointAccessControlRequest]]]) -> ServingEndpointPermissions
 
         Set serving endpoint permissions.
-        
-        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-        permissions if none are specified. Objects can inherit permissions from their root object.
-        
-        :param serving_endpoint_id: str
-          The serving endpoint for which to get or manage permissions.
-        :param access_control_list: List[:class:`ServingEndpointAccessControlRequest`] (optional)
-        
-        :returns: :class:`ServingEndpointPermissions`
-        
+
+Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+permissions if none are specified. Objects can inherit permissions from their root object.
+
+:param serving_endpoint_id: str
+  The serving endpoint for which to get or manage permissions.
+:param access_control_list: List[:class:`ServingEndpointAccessControlRequest`] (optional)
+
+:returns: :class:`ServingEndpointPermissions`
+
 
     .. py:method:: update_config(name: str [, auto_capture_config: Optional[AutoCaptureConfigInput], served_entities: Optional[List[ServedEntityInput]], served_models: Optional[List[ServedModelInput]], traffic_config: Optional[TrafficConfig]]) -> Wait[ServingEndpointDetailed]
 
         Update config of a serving endpoint.
-        
-        Updates any combination of the serving endpoint's served entities, the compute configuration of those
-        served entities, and the endpoint's traffic config. An endpoint that already has an update in progress
-        can not be updated until the current update completes or fails.
-        
-        :param name: str
-          The name of the serving endpoint to update. This field is required.
-        :param auto_capture_config: :class:`AutoCaptureConfigInput` (optional)
-          Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.
-          Note: this field is deprecated for creating new provisioned throughput endpoints, or updating
-          existing provisioned throughput endpoints that never have inference table configured; in these cases
-          please use AI Gateway to manage inference tables.
-        :param served_entities: List[:class:`ServedEntityInput`] (optional)
-          The list of served entities under the serving endpoint config.
-        :param served_models: List[:class:`ServedModelInput`] (optional)
-          (Deprecated, use served_entities instead) The list of served models under the serving endpoint
-          config.
-        :param traffic_config: :class:`TrafficConfig` (optional)
-          The traffic configuration associated with the serving endpoint config.
-        
-        :returns:
-          Long-running operation waiter for :class:`ServingEndpointDetailed`.
-          See :method:wait_get_serving_endpoint_not_updating for more details.
-        
+
+Updates any combination of the serving endpoint's served entities, the compute configuration of those
+served entities, and the endpoint's traffic config. An endpoint that already has an update in progress
+can not be updated until the current update completes or fails.
+
+:param name: str
+  The name of the serving endpoint to update. This field is required.
+:param auto_capture_config: :class:`AutoCaptureConfigInput` (optional)
+  Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.
+  Note: this field is deprecated for creating new provisioned throughput endpoints, or updating
+  existing provisioned throughput endpoints that never have inference table configured; in these cases
+  please use AI Gateway to manage inference tables.
+:param served_entities: List[:class:`ServedEntityInput`] (optional)
+  The list of served entities under the serving endpoint config.
+:param served_models: List[:class:`ServedModelInput`] (optional)
+  (Deprecated, use served_entities instead) The list of served models under the serving endpoint
+  config.
+:param traffic_config: :class:`TrafficConfig` (optional)
+  The traffic configuration associated with the serving endpoint config.
+
+:returns:
+  Long-running operation waiter for :class:`ServingEndpointDetailed`.
+  See :method:wait_get_serving_endpoint_not_updating for more details.
+
 
     .. py:method:: update_config_and_wait(name: str [, auto_capture_config: Optional[AutoCaptureConfigInput], served_entities: Optional[List[ServedEntityInput]], served_models: Optional[List[ServedModelInput]], traffic_config: Optional[TrafficConfig], timeout: datetime.timedelta = 0:20:00]) -> ServingEndpointDetailed
 
@@ -329,15 +329,15 @@
     .. py:method:: update_permissions(serving_endpoint_id: str [, access_control_list: Optional[List[ServingEndpointAccessControlRequest]]]) -> ServingEndpointPermissions
 
         Update serving endpoint permissions.
-        
-        Updates the permissions on a serving endpoint. Serving endpoints can inherit permissions from their
-        root object.
-        
-        :param serving_endpoint_id: str
-          The serving endpoint for which to get or manage permissions.
-        :param access_control_list: List[:class:`ServingEndpointAccessControlRequest`] (optional)
-        
-        :returns: :class:`ServingEndpointPermissions`
-        
+
+Updates the permissions on a serving endpoint. Serving endpoints can inherit permissions from their
+root object.
+
+:param serving_endpoint_id: str
+  The serving endpoint for which to get or manage permissions.
+:param access_control_list: List[:class:`ServingEndpointAccessControlRequest`] (optional)
+
+:returns: :class:`ServingEndpointPermissions`
+
 
     .. py:method:: wait_get_serving_endpoint_not_updating(name: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[ServingEndpointDetailed], None]]) -> ServingEndpointDetailed
diff --git a/docs/workspace/serving/serving_endpoints_data_plane.rst b/docs/workspace/serving/serving_endpoints_data_plane.rst
index 8fb09e7ff..b3501b121 100644
--- a/docs/workspace/serving/serving_endpoints_data_plane.rst
+++ b/docs/workspace/serving/serving_endpoints_data_plane.rst
@@ -5,55 +5,54 @@
 .. py:class:: ServingEndpointsDataPlaneAPI
 
     Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving
-    endpoints service.
+endpoints service.
 
     .. py:method:: query(name: str [, dataframe_records: Optional[List[Any]], dataframe_split: Optional[DataframeSplitInput], extra_params: Optional[Dict[str, str]], input: Optional[Any], inputs: Optional[Any], instances: Optional[List[Any]], max_tokens: Optional[int], messages: Optional[List[ChatMessage]], n: Optional[int], prompt: Optional[Any], stop: Optional[List[str]], stream: Optional[bool], temperature: Optional[float]]) -> QueryEndpointResponse
 
         Query a serving endpoint.
-        
-        :param name: str
-          The name of the serving endpoint. This field is required.
-        :param dataframe_records: List[Any] (optional)
-          Pandas Dataframe input in the records orientation.
-        :param dataframe_split: :class:`DataframeSplitInput` (optional)
-          Pandas Dataframe input in the split orientation.
-        :param extra_params: Dict[str,str] (optional)
-          The extra parameters field used ONLY for __completions, chat,__ and __embeddings external &
-          foundation model__ serving endpoints. This is a map of strings and should only be used with other
-          external/foundation model query fields.
-        :param input: Any (optional)
-          The input string (or array of strings) field used ONLY for __embeddings external & foundation
-          model__ serving endpoints and is the only field (along with extra_params if needed) used by
-          embeddings queries.
-        :param inputs: Any (optional)
-          Tensor-based input in columnar format.
-        :param instances: List[Any] (optional)
-          Tensor-based input in row format.
-        :param max_tokens: int (optional)
-          The max tokens field used ONLY for __completions__ and __chat external & foundation model__ serving
-          endpoints. This is an integer and should only be used with other chat/completions query fields.
-        :param messages: List[:class:`ChatMessage`] (optional)
-          The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is a
-          map of strings and should only be used with other chat query fields.
-        :param n: int (optional)
-          The n (number of candidates) field used ONLY for __completions__ and __chat external & foundation
-          model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and should only be
-          used with other chat/completions query fields.
-        :param prompt: Any (optional)
-          The prompt string (or array of strings) field used ONLY for __completions external & foundation
-          model__ serving endpoints and should only be used with other completions query fields.
-        :param stop: List[str] (optional)
-          The stop sequences field used ONLY for __completions__ and __chat external & foundation model__
-          serving endpoints. This is a list of strings and should only be used with other chat/completions
-          query fields.
-        :param stream: bool (optional)
-          The stream field used ONLY for __completions__ and __chat external & foundation model__ serving
-          endpoints. This is a boolean defaulting to false and should only be used with other chat/completions
-          query fields.
-        :param temperature: float (optional)
-          The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving
-          endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with
-          other chat/completions query fields.
-        
-        :returns: :class:`QueryEndpointResponse`
-        
\ No newline at end of file
+
+:param name: str
+  The name of the serving endpoint. This field is required.
+:param dataframe_records: List[Any] (optional)
+  Pandas Dataframe input in the records orientation.
+:param dataframe_split: :class:`DataframeSplitInput` (optional)
+  Pandas Dataframe input in the split orientation.
+:param extra_params: Dict[str,str] (optional)
+  The extra parameters field used ONLY for __completions, chat,__ and __embeddings external &
+  foundation model__ serving endpoints. This is a map of strings and should only be used with other
+  external/foundation model query fields.
+:param input: Any (optional)
+  The input string (or array of strings) field used ONLY for __embeddings external & foundation
+  model__ serving endpoints and is the only field (along with extra_params if needed) used by
+  embeddings queries.
+:param inputs: Any (optional)
+  Tensor-based input in columnar format.
+:param instances: List[Any] (optional)
+  Tensor-based input in row format.
+:param max_tokens: int (optional)
+  The max tokens field used ONLY for __completions__ and __chat external & foundation model__ serving
+  endpoints. This is an integer and should only be used with other chat/completions query fields.
+:param messages: List[:class:`ChatMessage`] (optional)
+  The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is a
+  map of strings and should only be used with other chat query fields.
+:param n: int (optional)
+  The n (number of candidates) field used ONLY for __completions__ and __chat external & foundation
+  model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and should only be
+  used with other chat/completions query fields.
+:param prompt: Any (optional)
+  The prompt string (or array of strings) field used ONLY for __completions external & foundation
+  model__ serving endpoints and should only be used with other completions query fields.
+:param stop: List[str] (optional)
+  The stop sequences field used ONLY for __completions__ and __chat external & foundation model__
+  serving endpoints. This is a list of strings and should only be used with other chat/completions
+  query fields.
+:param stream: bool (optional)
+  The stream field used ONLY for __completions__ and __chat external & foundation model__ serving
+  endpoints. This is a boolean defaulting to false and should only be used with other chat/completions
+  query fields.
+:param temperature: float (optional)
+  The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving
+  endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with
+  other chat/completions query fields.
+
+:returns: :class:`QueryEndpointResponse`
diff --git a/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst
index 66c621997..587b94d11 100644
--- a/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst
+++ b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst
@@ -5,60 +5,59 @@
 .. py:class:: AibiDashboardEmbeddingAccessPolicyAPI
 
     Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the
-    workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS).
+workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS).
 
     .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse
 
         Delete the AI/BI dashboard embedding access policy.
-        
-        Delete the AI/BI dashboard embedding access policy, reverting back to the default.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`DeleteAibiDashboardEmbeddingAccessPolicySettingResponse`
-        
+
+Delete the AI/BI dashboard embedding access policy, reverting back to the default.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`DeleteAibiDashboardEmbeddingAccessPolicySettingResponse`
+
 
     .. py:method:: get( [, etag: Optional[str]]) -> AibiDashboardEmbeddingAccessPolicySetting
 
         Retrieve the AI/BI dashboard embedding access policy.
-        
-        Retrieves the AI/BI dashboard embedding access policy. The default setting is ALLOW_APPROVED_DOMAINS,
-        permitting AI/BI dashboards to be embedded on approved domains.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
-        
+
+Retrieves the AI/BI dashboard embedding access policy. The default setting is ALLOW_APPROVED_DOMAINS,
+permitting AI/BI dashboards to be embedded on approved domains.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
+
 
     .. py:method:: update(allow_missing: bool, setting: AibiDashboardEmbeddingAccessPolicySetting, field_mask: str) -> AibiDashboardEmbeddingAccessPolicySetting
 
         Update the AI/BI dashboard embedding access policy.
-        
-        Updates the AI/BI dashboard embedding access policy at the workspace level.
-        
-        :param allow_missing: bool
-          This should always be set to true for Settings API. Added for AIP compliance.
-        :param setting: :class:`AibiDashboardEmbeddingAccessPolicySetting`
-        :param field_mask: str
-          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-          the entire collection field can be specified. Field names must exactly match the resource field
-          names.
-          
-          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-          changes in the future.
-        
-        :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
-        
\ No newline at end of file
+
+Updates the AI/BI dashboard embedding access policy at the workspace level.
+
+:param allow_missing: bool
+  This should always be set to true for Settings API. Added for AIP compliance.
+:param setting: :class:`AibiDashboardEmbeddingAccessPolicySetting`
+:param field_mask: str
+  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+  the entire collection field can be specified. Field names must exactly match the resource field
+  names.
+  
+  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+  changes in the future.
+
+:returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
diff --git a/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst
index 0c9294130..53e9cdcca 100644
--- a/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst
+++ b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst
@@ -5,61 +5,60 @@
 .. py:class:: AibiDashboardEmbeddingApprovedDomainsAPI
 
     Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains list
-    can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS.
+can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS.
 
     .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse
 
         Delete AI/BI dashboard embedding approved domains.
-        
-        Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the default
-        empty list.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse`
-        
+
+Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the default
+empty list.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse`
+
 
     .. py:method:: get( [, etag: Optional[str]]) -> AibiDashboardEmbeddingApprovedDomainsSetting
 
         Retrieve the list of domains approved to host embedded AI/BI dashboards.
-        
-        Retrieves the list of domains approved to host embedded AI/BI dashboards.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
-        
+
+Retrieves the list of domains approved to host embedded AI/BI dashboards.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
+
 
     .. py:method:: update(allow_missing: bool, setting: AibiDashboardEmbeddingApprovedDomainsSetting, field_mask: str) -> AibiDashboardEmbeddingApprovedDomainsSetting
 
         Update the list of domains approved to host embedded AI/BI dashboards.
-        
-        Updates the list of domains approved to host embedded AI/BI dashboards. This update will fail if the
-        current workspace access policy is not ALLOW_APPROVED_DOMAINS.
-        
-        :param allow_missing: bool
-          This should always be set to true for Settings API. Added for AIP compliance.
-        :param setting: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
-        :param field_mask: str
-          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-          the entire collection field can be specified. Field names must exactly match the resource field
-          names.
-          
-          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-          changes in the future.
-        
-        :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
-        
\ No newline at end of file
+
+Updates the list of domains approved to host embedded AI/BI dashboards. This update will fail if the
+current workspace access policy is not ALLOW_APPROVED_DOMAINS.
+
+:param allow_missing: bool
+  This should always be set to true for Settings API. Added for AIP compliance.
+:param setting: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
+:param field_mask: str
+  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+  the entire collection field can be specified. Field names must exactly match the resource field
+  names.
+  
+  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+  changes in the future.
+
+:returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
diff --git a/docs/workspace/settings/automatic_cluster_update.rst b/docs/workspace/settings/automatic_cluster_update.rst
index 350e0e713..94d110154 100644
--- a/docs/workspace/settings/automatic_cluster_update.rst
+++ b/docs/workspace/settings/automatic_cluster_update.rst
@@ -5,46 +5,45 @@
 .. py:class:: AutomaticClusterUpdateAPI
 
     Controls whether automatic cluster update is enabled for the current workspace. By default, it is turned
-    off.
+off.
 
     .. py:method:: get( [, etag: Optional[str]]) -> AutomaticClusterUpdateSetting
 
         Get the automatic cluster update setting.
-        
-        Gets the automatic cluster update setting.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`AutomaticClusterUpdateSetting`
-        
+
+Gets the automatic cluster update setting.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`AutomaticClusterUpdateSetting`
+
 
     .. py:method:: update(allow_missing: bool, setting: AutomaticClusterUpdateSetting, field_mask: str) -> AutomaticClusterUpdateSetting
 
         Update the automatic cluster update setting.
-        
-        Updates the automatic cluster update setting for the workspace. A fresh etag needs to be provided in
-        `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request
-        before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the
-        request must be retried by using the fresh etag in the 409 response.
-        
-        :param allow_missing: bool
-          This should always be set to true for Settings API. Added for AIP compliance.
-        :param setting: :class:`AutomaticClusterUpdateSetting`
-        :param field_mask: str
-          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-          the entire collection field can be specified. Field names must exactly match the resource field
-          names.
-          
-          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-          changes in the future.
-        
-        :returns: :class:`AutomaticClusterUpdateSetting`
-        
\ No newline at end of file
+
+Updates the automatic cluster update setting for the workspace. A fresh etag needs to be provided in
+`PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request
+before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the
+request must be retried by using the fresh etag in the 409 response.
+
+:param allow_missing: bool
+  This should always be set to true for Settings API. Added for AIP compliance.
+:param setting: :class:`AutomaticClusterUpdateSetting`
+:param field_mask: str
+  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+  the entire collection field can be specified. Field names must exactly match the resource field
+  names.
+  
+  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+  changes in the future.
+
+:returns: :class:`AutomaticClusterUpdateSetting`
diff --git a/docs/workspace/settings/compliance_security_profile.rst b/docs/workspace/settings/compliance_security_profile.rst
index 855451b82..3d6a4704c 100644
--- a/docs/workspace/settings/compliance_security_profile.rst
+++ b/docs/workspace/settings/compliance_security_profile.rst
@@ -5,48 +5,47 @@
 .. py:class:: ComplianceSecurityProfileAPI
 
     Controls whether to enable the compliance security profile for the current workspace. Enabling it on a
-    workspace is permanent. By default, it is turned off.
-    
-    This settings can NOT be disabled once it is enabled.
+workspace is permanent. By default, it is turned off.
+
+This settings can NOT be disabled once it is enabled.
 
     .. py:method:: get( [, etag: Optional[str]]) -> ComplianceSecurityProfileSetting
 
         Get the compliance security profile setting.
-        
-        Gets the compliance security profile setting.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`ComplianceSecurityProfileSetting`
-        
+
+Gets the compliance security profile setting.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`ComplianceSecurityProfileSetting`
+
 
     .. py:method:: update(allow_missing: bool, setting: ComplianceSecurityProfileSetting, field_mask: str) -> ComplianceSecurityProfileSetting
 
         Update the compliance security profile setting.
-        
-        Updates the compliance security profile setting for the workspace. A fresh etag needs to be provided
-        in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET`
-        request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and
-        the request must be retried by using the fresh etag in the 409 response.
-        
-        :param allow_missing: bool
-          This should always be set to true for Settings API. Added for AIP compliance.
-        :param setting: :class:`ComplianceSecurityProfileSetting`
-        :param field_mask: str
-          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-          the entire collection field can be specified. Field names must exactly match the resource field
-          names.
-          
-          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-          changes in the future.
-        
-        :returns: :class:`ComplianceSecurityProfileSetting`
-        
\ No newline at end of file
+
+Updates the compliance security profile setting for the workspace. A fresh etag needs to be provided
+in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET`
+request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and
+the request must be retried by using the fresh etag in the 409 response.
+
+:param allow_missing: bool
+  This should always be set to true for Settings API. Added for AIP compliance.
+:param setting: :class:`ComplianceSecurityProfileSetting`
+:param field_mask: str
+  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+  the entire collection field can be specified. Field names must exactly match the resource field
+  names.
+  
+  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+  changes in the future.
+
+:returns: :class:`ComplianceSecurityProfileSetting`
diff --git a/docs/workspace/settings/credentials_manager.rst b/docs/workspace/settings/credentials_manager.rst
index c8bfa4f30..eacb47eec 100644
--- a/docs/workspace/settings/credentials_manager.rst
+++ b/docs/workspace/settings/credentials_manager.rst
@@ -5,21 +5,20 @@
 .. py:class:: CredentialsManagerAPI
 
     Credentials manager interacts with with Identity Providers to to perform token exchanges using stored
-    credentials and refresh tokens.
+credentials and refresh tokens.
 
     .. py:method:: exchange_token(partition_id: PartitionId, token_type: List[TokenType], scopes: List[str]) -> ExchangeTokenResponse
 
         Exchange token.
-        
-        Exchange tokens with an Identity Provider to get a new access token. It allows specifying scopes to
-        determine token permissions.
-        
-        :param partition_id: :class:`PartitionId`
-          The partition of Credentials store
-        :param token_type: List[:class:`TokenType`]
-          A list of token types being requested
-        :param scopes: List[str]
-          Array of scopes for the token request.
-        
-        :returns: :class:`ExchangeTokenResponse`
-        
\ No newline at end of file
+
+Exchange tokens with an Identity Provider to get a new access token. It allows specifying scopes to
+determine token permissions.
+
+:param partition_id: :class:`PartitionId`
+  The partition of Credentials store
+:param token_type: List[:class:`TokenType`]
+  A list of token types being requested
+:param scopes: List[str]
+  Array of scopes for the token request.
+
+:returns: :class:`ExchangeTokenResponse`
diff --git a/docs/workspace/settings/default_namespace.rst b/docs/workspace/settings/default_namespace.rst
index 960949930..082011af0 100644
--- a/docs/workspace/settings/default_namespace.rst
+++ b/docs/workspace/settings/default_namespace.rst
@@ -5,82 +5,81 @@
 .. py:class:: DefaultNamespaceAPI
 
     The default namespace setting API allows users to configure the default namespace for a Databricks
-    workspace.
-    
-    Through this API, users can retrieve, set, or modify the default namespace used when queries do not
-    reference a fully qualified three-level name. For example, if you use the API to set 'retail_prod' as the
-    default catalog, then a query 'SELECT * FROM myTable' would reference the object
-    'retail_prod.default.myTable' (the schema 'default' is always assumed).
-    
-    This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default
-    namespace only applies when using Unity Catalog-enabled compute.
+workspace.
+
+Through this API, users can retrieve, set, or modify the default namespace used when queries do not
+reference a fully qualified three-level name. For example, if you use the API to set 'retail_prod' as the
+default catalog, then a query 'SELECT * FROM myTable' would reference the object
+'retail_prod.default.myTable' (the schema 'default' is always assumed).
+
+This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default
+namespace only applies when using Unity Catalog-enabled compute.
 
     .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDefaultNamespaceSettingResponse
 
         Delete the default namespace setting.
-        
-        Deletes the default namespace setting for the workspace. A fresh etag needs to be provided in `DELETE`
-        requests (as a query parameter). The etag can be retrieved by making a `GET` request before the
-        `DELETE` request. If the setting is updated/deleted concurrently, `DELETE` fails with 409 and the
-        request must be retried by using the fresh etag in the 409 response.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`DeleteDefaultNamespaceSettingResponse`
-        
+
+Deletes the default namespace setting for the workspace. A fresh etag needs to be provided in `DELETE`
+requests (as a query parameter). The etag can be retrieved by making a `GET` request before the
+`DELETE` request. If the setting is updated/deleted concurrently, `DELETE` fails with 409 and the
+request must be retried by using the fresh etag in the 409 response.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`DeleteDefaultNamespaceSettingResponse`
+
 
     .. py:method:: get( [, etag: Optional[str]]) -> DefaultNamespaceSetting
 
         Get the default namespace setting.
-        
-        Gets the default namespace setting.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`DefaultNamespaceSetting`
-        
+
+Gets the default namespace setting.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`DefaultNamespaceSetting`
+
 
     .. py:method:: update(allow_missing: bool, setting: DefaultNamespaceSetting, field_mask: str) -> DefaultNamespaceSetting
 
         Update the default namespace setting.
-        
-        Updates the default namespace setting for the workspace. A fresh etag needs to be provided in `PATCH`
-        requests (as part of the setting field). The etag can be retrieved by making a `GET` request before
-        the `PATCH` request. Note that if the setting does not exist, `GET` returns a NOT_FOUND error and the
-        etag is present in the error response, which should be set in the `PATCH` request. If the setting is
-        updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag
-        in the 409 response.
-        
-        :param allow_missing: bool
-          This should always be set to true for Settings API. Added for AIP compliance.
-        :param setting: :class:`DefaultNamespaceSetting`
-          This represents the setting configuration for the default namespace in the Databricks workspace.
-          Setting the default catalog for the workspace determines the catalog that is used when queries do
-          not reference a fully qualified 3 level name. For example, if the default catalog is set to
-          'retail_prod' then a query 'SELECT * FROM myTable' would reference the object
-          'retail_prod.default.myTable' (the schema 'default' is always assumed). This setting requires a
-          restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only
-          applies when using Unity Catalog-enabled compute.
-        :param field_mask: str
-          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-          the entire collection field can be specified. Field names must exactly match the resource field
-          names.
-          
-          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-          changes in the future.
-        
-        :returns: :class:`DefaultNamespaceSetting`
-        
\ No newline at end of file
+
+Updates the default namespace setting for the workspace. A fresh etag needs to be provided in `PATCH`
+requests (as part of the setting field). The etag can be retrieved by making a `GET` request before
+the `PATCH` request. Note that if the setting does not exist, `GET` returns a NOT_FOUND error and the
+etag is present in the error response, which should be set in the `PATCH` request. If the setting is
+updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag
+in the 409 response.
+
+:param allow_missing: bool
+  This should always be set to true for Settings API. Added for AIP compliance.
+:param setting: :class:`DefaultNamespaceSetting`
+  This represents the setting configuration for the default namespace in the Databricks workspace.
+  Setting the default catalog for the workspace determines the catalog that is used when queries do
+  not reference a fully qualified 3 level name. For example, if the default catalog is set to
+  'retail_prod' then a query 'SELECT * FROM myTable' would reference the object
+  'retail_prod.default.myTable' (the schema 'default' is always assumed). This setting requires a
+  restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only
+  applies when using Unity Catalog-enabled compute.
+:param field_mask: str
+  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+  the entire collection field can be specified. Field names must exactly match the resource field
+  names.
+  
+  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+  changes in the future.
+
+:returns: :class:`DefaultNamespaceSetting`
diff --git a/docs/workspace/settings/disable_legacy_access.rst b/docs/workspace/settings/disable_legacy_access.rst
index a015e777f..4e44a7891 100644
--- a/docs/workspace/settings/disable_legacy_access.rst
+++ b/docs/workspace/settings/disable_legacy_access.rst
@@ -5,63 +5,62 @@
 .. py:class:: DisableLegacyAccessAPI
 
     'Disabling legacy access' has the following impacts:
-    
-    1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore through HMS
-    Federation. 2. Disables Fallback Mode (docs link) on any External Location access from the workspace. 3.
-    Alters DBFS path access to use External Location permissions in place of legacy credentials. 4. Enforces
-    Unity Catalog access on all path based access.
+
+1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore through HMS
+Federation. 2. Disables Fallback Mode (docs link) on any External Location access from the workspace. 3.
+Alters DBFS path access to use External Location permissions in place of legacy credentials. 4. Enforces
+Unity Catalog access on all path based access.
 
     .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyAccessResponse
 
         Delete Legacy Access Disablement Status.
-        
-        Deletes legacy access disablement status.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`DeleteDisableLegacyAccessResponse`
-        
+
+Deletes legacy access disablement status.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`DeleteDisableLegacyAccessResponse`
+
 
     .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyAccess
 
         Retrieve Legacy Access Disablement Status.
-        
-        Retrieves legacy access disablement Status.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`DisableLegacyAccess`
-        
+
+Retrieves legacy access disablement Status.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`DisableLegacyAccess`
+
 
     .. py:method:: update(allow_missing: bool, setting: DisableLegacyAccess, field_mask: str) -> DisableLegacyAccess
 
         Update Legacy Access Disablement Status.
-        
-        Updates legacy access disablement status.
-        
-        :param allow_missing: bool
-          This should always be set to true for Settings API. Added for AIP compliance.
-        :param setting: :class:`DisableLegacyAccess`
-        :param field_mask: str
-          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-          the entire collection field can be specified. Field names must exactly match the resource field
-          names.
-          
-          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-          changes in the future.
-        
-        :returns: :class:`DisableLegacyAccess`
-        
\ No newline at end of file
+
+Updates legacy access disablement status.
+
+:param allow_missing: bool
+  This should always be set to true for Settings API. Added for AIP compliance.
+:param setting: :class:`DisableLegacyAccess`
+:param field_mask: str
+  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+  the entire collection field can be specified. Field names must exactly match the resource field
+  names.
+  
+  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+  changes in the future.
+
+:returns: :class:`DisableLegacyAccess`
diff --git a/docs/workspace/settings/disable_legacy_dbfs.rst b/docs/workspace/settings/disable_legacy_dbfs.rst
index 502111fe4..8368bcc71 100644
--- a/docs/workspace/settings/disable_legacy_dbfs.rst
+++ b/docs/workspace/settings/disable_legacy_dbfs.rst
@@ -5,59 +5,58 @@
 .. py:class:: DisableLegacyDbfsAPI
 
     When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation of new
-    mounts). When the setting is off, all DBFS functionality is enabled
+mounts). When the setting is off, all DBFS functionality is enabled
 
     .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyDbfsResponse
 
         Delete the disable legacy DBFS setting.
-        
-        Deletes the disable legacy DBFS setting for a workspace, reverting back to the default.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`DeleteDisableLegacyDbfsResponse`
-        
+
+Deletes the disable legacy DBFS setting for a workspace, reverting back to the default.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`DeleteDisableLegacyDbfsResponse`
+
 
     .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyDbfs
 
         Get the disable legacy DBFS setting.
-        
-        Gets the disable legacy DBFS setting.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`DisableLegacyDbfs`
-        
+
+Gets the disable legacy DBFS setting.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`DisableLegacyDbfs`
+
 
     .. py:method:: update(allow_missing: bool, setting: DisableLegacyDbfs, field_mask: str) -> DisableLegacyDbfs
 
         Update the disable legacy DBFS setting.
-        
-        Updates the disable legacy DBFS setting for the workspace.
-        
-        :param allow_missing: bool
-          This should always be set to true for Settings API. Added for AIP compliance.
-        :param setting: :class:`DisableLegacyDbfs`
-        :param field_mask: str
-          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-          the entire collection field can be specified. Field names must exactly match the resource field
-          names.
-          
-          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-          changes in the future.
-        
-        :returns: :class:`DisableLegacyDbfs`
-        
\ No newline at end of file
+
+Updates the disable legacy DBFS setting for the workspace.
+
+:param allow_missing: bool
+  This should always be set to true for Settings API. Added for AIP compliance.
+:param setting: :class:`DisableLegacyDbfs`
+:param field_mask: str
+  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+  the entire collection field can be specified. Field names must exactly match the resource field
+  names.
+  
+  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+  changes in the future.
+
+:returns: :class:`DisableLegacyDbfs`
diff --git a/docs/workspace/settings/enhanced_security_monitoring.rst b/docs/workspace/settings/enhanced_security_monitoring.rst
index c9dfb547d..2b1e6b5f5 100644
--- a/docs/workspace/settings/enhanced_security_monitoring.rst
+++ b/docs/workspace/settings/enhanced_security_monitoring.rst
@@ -5,50 +5,49 @@
 .. py:class:: EnhancedSecurityMonitoringAPI
 
     Controls whether enhanced security monitoring is enabled for the current workspace. If the compliance
-    security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the
-    compliance security profile is enabled, this is automatically enabled.
-    
-    If the compliance security profile is disabled, you can enable or disable this setting and it is not
-    permanent.
+security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the
+compliance security profile is enabled, this is automatically enabled.
+
+If the compliance security profile is disabled, you can enable or disable this setting and it is not
+permanent.
 
     .. py:method:: get( [, etag: Optional[str]]) -> EnhancedSecurityMonitoringSetting
 
         Get the enhanced security monitoring setting.
-        
-        Gets the enhanced security monitoring setting.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`EnhancedSecurityMonitoringSetting`
-        
+
+Gets the enhanced security monitoring setting.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`EnhancedSecurityMonitoringSetting`
+
 
     .. py:method:: update(allow_missing: bool, setting: EnhancedSecurityMonitoringSetting, field_mask: str) -> EnhancedSecurityMonitoringSetting
 
         Update the enhanced security monitoring setting.
-        
-        Updates the enhanced security monitoring setting for the workspace. A fresh etag needs to be provided
-        in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET`
-        request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and
-        the request must be retried by using the fresh etag in the 409 response.
-        
-        :param allow_missing: bool
-          This should always be set to true for Settings API. Added for AIP compliance.
-        :param setting: :class:`EnhancedSecurityMonitoringSetting`
-        :param field_mask: str
-          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-          the entire collection field can be specified. Field names must exactly match the resource field
-          names.
-          
-          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-          changes in the future.
-        
-        :returns: :class:`EnhancedSecurityMonitoringSetting`
-        
\ No newline at end of file
+
+Updates the enhanced security monitoring setting for the workspace. A fresh etag needs to be provided
+in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET`
+request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and
+the request must be retried by using the fresh etag in the 409 response.
+
+:param allow_missing: bool
+  This should always be set to true for Settings API. Added for AIP compliance.
+:param setting: :class:`EnhancedSecurityMonitoringSetting`
+:param field_mask: str
+  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+  the entire collection field can be specified. Field names must exactly match the resource field
+  names.
+  
+  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+  changes in the future.
+
+:returns: :class:`EnhancedSecurityMonitoringSetting`
diff --git a/docs/workspace/settings/ip_access_lists.rst b/docs/workspace/settings/ip_access_lists.rst
index a265c5943..162009162 100644
--- a/docs/workspace/settings/ip_access_lists.rst
+++ b/docs/workspace/settings/ip_access_lists.rst
@@ -5,22 +5,22 @@
 .. py:class:: IpAccessListsAPI
 
     IP Access List enables admins to configure IP access lists.
-    
-    IP access lists affect web application access and REST API access to this workspace only. If the feature
-    is disabled for a workspace, all access is allowed for this workspace. There is support for allow lists
-    (inclusion) and block lists (exclusion).
-    
-    When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address
-    matches any block list, the connection is rejected. 2. **If the connection was not rejected by block
-    lists**, the IP address is compared with the allow lists.
-    
-    If there is at least one allow list for the workspace, the connection is allowed only if the IP address
-    matches an allow list. If there are no allow lists for the workspace, all IP addresses are allowed.
-    
-    For all allow lists and block lists combined, the workspace supports a maximum of 1000 IP/CIDR values,
-    where one CIDR counts as a single value.
-    
-    After changes to the IP access list feature, it can take a few minutes for changes to take effect.
+
+IP access lists affect web application access and REST API access to this workspace only. If the feature
+is disabled for a workspace, all access is allowed for this workspace. There is support for allow lists
+(inclusion) and block lists (exclusion).
+
+When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address
+matches any block list, the connection is rejected. 2. **If the connection was not rejected by block
+lists**, the IP address is compared with the allow lists.
+
+If there is at least one allow list for the workspace, the connection is allowed only if the IP address
+matches an allow list. If there are no allow lists for the workspace, all IP addresses are allowed.
+
+For all allow lists and block lists combined, the workspace supports a maximum of 1000 IP/CIDR values,
+where one CIDR counts as a single value.
+
+After changes to the IP access list feature, it can take a few minutes for changes to take effect.
 
     .. py:method:: create(label: str, list_type: ListType [, ip_addresses: Optional[List[str]]]) -> CreateIpAccessListResponse
 
@@ -44,45 +44,45 @@
             w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id)
 
         Create access list.
-        
-        Creates an IP access list for this workspace.
-        
-        A list can be an allow list or a block list. See the top of this file for a description of how the
-        server treats allow lists and block lists at runtime.
-        
-        When creating or updating an IP access list:
-        
-        * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
-        where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
-        `error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP,
-        error 400 is returned with `error_code` value `INVALID_STATE`.
-        
-        It can take a few minutes for the changes to take effect. **Note**: Your new IP access list has no
-        effect until you enable the feature. See :method:workspaceconf/setStatus
-        
-        :param label: str
-          Label for the IP access list. This **cannot** be empty.
-        :param list_type: :class:`ListType`
-          Type of IP access list. Valid values are as follows and are case-sensitive:
-          
-          * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
-          range. IP addresses in the block list are excluded even if they are included in an allow list.
-        :param ip_addresses: List[str] (optional)
-        
-        :returns: :class:`CreateIpAccessListResponse`
-        
+
+Creates an IP access list for this workspace.
+
+A list can be an allow list or a block list. See the top of this file for a description of how the
+server treats allow lists and block lists at runtime.
+
+When creating or updating an IP access list:
+
+* For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
+where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
+`error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP,
+error 400 is returned with `error_code` value `INVALID_STATE`.
+
+It can take a few minutes for the changes to take effect. **Note**: Your new IP access list has no
+effect until you enable the feature. See :method:workspaceconf/setStatus
+
+:param label: str
+  Label for the IP access list. This **cannot** be empty.
+:param list_type: :class:`ListType`
+  Type of IP access list. Valid values are as follows and are case-sensitive:
+  
+  * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
+  range. IP addresses in the block list are excluded even if they are included in an allow list.
+:param ip_addresses: List[str] (optional)
+
+:returns: :class:`CreateIpAccessListResponse`
+
 
     .. py:method:: delete(ip_access_list_id: str)
 
         Delete access list.
-        
-        Deletes an IP access list, specified by its list ID.
-        
-        :param ip_access_list_id: str
-          The ID for the corresponding IP access list
-        
-        
-        
+
+Deletes an IP access list, specified by its list ID.
+
+:param ip_access_list_id: str
+  The ID for the corresponding IP access list
+
+
+
 
     .. py:method:: get(ip_access_list_id: str) -> FetchIpAccessListResponse
 
@@ -108,14 +108,14 @@
             w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id)
 
         Get access list.
-        
-        Gets an IP access list, specified by its list ID.
-        
-        :param ip_access_list_id: str
-          The ID for the corresponding IP access list
-        
-        :returns: :class:`FetchIpAccessListResponse`
-        
+
+Gets an IP access list, specified by its list ID.
+
+:param ip_access_list_id: str
+  The ID for the corresponding IP access list
+
+:returns: :class:`FetchIpAccessListResponse`
+
 
     .. py:method:: list() -> Iterator[IpAccessListInfo]
 
@@ -131,11 +131,11 @@
             all = w.ip_access_lists.list()
 
         Get access lists.
-        
-        Gets all IP access lists for the specified workspace.
-        
-        :returns: Iterator over :class:`IpAccessListInfo`
-        
+
+Gets all IP access lists for the specified workspace.
+
+:returns: Iterator over :class:`IpAccessListInfo`
+
 
     .. py:method:: replace(ip_access_list_id: str, label: str, list_type: ListType, enabled: bool [, ip_addresses: Optional[List[str]]])
 
@@ -165,65 +165,64 @@
             w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id)
 
         Replace access list.
-        
-        Replaces an IP access list, specified by its ID.
-        
-        A list can include allow lists and block lists. See the top of this file for a description of how the
-        server treats allow lists and block lists at run time. When replacing an IP access list: * For all
-        allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one
-        CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value
-        `QUOTA_EXCEEDED`. * If the resulting list would block the calling user's current IP, error 400 is
-        returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take
-        effect. Note that your resulting IP access list has no effect until you enable the feature. See
-        :method:workspaceconf/setStatus.
-        
-        :param ip_access_list_id: str
-          The ID for the corresponding IP access list
-        :param label: str
-          Label for the IP access list. This **cannot** be empty.
-        :param list_type: :class:`ListType`
-          Type of IP access list. Valid values are as follows and are case-sensitive:
-          
-          * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
-          range. IP addresses in the block list are excluded even if they are included in an allow list.
-        :param enabled: bool
-          Specifies whether this IP access list is enabled.
-        :param ip_addresses: List[str] (optional)
-        
-        
-        
+
+Replaces an IP access list, specified by its ID.
+
+A list can include allow lists and block lists. See the top of this file for a description of how the
+server treats allow lists and block lists at run time. When replacing an IP access list: * For all
+allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one
+CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value
+`QUOTA_EXCEEDED`. * If the resulting list would block the calling user's current IP, error 400 is
+returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take
+effect. Note that your resulting IP access list has no effect until you enable the feature. See
+:method:workspaceconf/setStatus.
+
+:param ip_access_list_id: str
+  The ID for the corresponding IP access list
+:param label: str
+  Label for the IP access list. This **cannot** be empty.
+:param list_type: :class:`ListType`
+  Type of IP access list. Valid values are as follows and are case-sensitive:
+  
+  * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
+  range. IP addresses in the block list are excluded even if they are included in an allow list.
+:param enabled: bool
+  Specifies whether this IP access list is enabled.
+:param ip_addresses: List[str] (optional)
+
+
+
 
     .. py:method:: update(ip_access_list_id: str [, enabled: Optional[bool], ip_addresses: Optional[List[str]], label: Optional[str], list_type: Optional[ListType]])
 
         Update access list.
-        
-        Updates an existing IP access list, specified by its ID.
-        
-        A list can include allow lists and block lists. See the top of this file for a description of how the
-        server treats allow lists and block lists at run time.
-        
-        When updating an IP access list:
-        
-        * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
-        where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
-        `error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP,
-        error 400 is returned with `error_code` value `INVALID_STATE`.
-        
-        It can take a few minutes for the changes to take effect. Note that your resulting IP access list has
-        no effect until you enable the feature. See :method:workspaceconf/setStatus.
-        
-        :param ip_access_list_id: str
-          The ID for the corresponding IP access list
-        :param enabled: bool (optional)
-          Specifies whether this IP access list is enabled.
-        :param ip_addresses: List[str] (optional)
-        :param label: str (optional)
-          Label for the IP access list. This **cannot** be empty.
-        :param list_type: :class:`ListType` (optional)
-          Type of IP access list. Valid values are as follows and are case-sensitive:
-          
-          * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
-          range. IP addresses in the block list are excluded even if they are included in an allow list.
-        
-        
-        
\ No newline at end of file
+
+Updates an existing IP access list, specified by its ID.
+
+A list can include allow lists and block lists. See the top of this file for a description of how the
+server treats allow lists and block lists at run time.
+
+When updating an IP access list:
+
+* For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
+where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
+`error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP,
+error 400 is returned with `error_code` value `INVALID_STATE`.
+
+It can take a few minutes for the changes to take effect. Note that your resulting IP access list has
+no effect until you enable the feature. See :method:workspaceconf/setStatus.
+
+:param ip_access_list_id: str
+  The ID for the corresponding IP access list
+:param enabled: bool (optional)
+  Specifies whether this IP access list is enabled.
+:param ip_addresses: List[str] (optional)
+:param label: str (optional)
+  Label for the IP access list. This **cannot** be empty.
+:param list_type: :class:`ListType` (optional)
+  Type of IP access list. Valid values are as follows and are case-sensitive:
+  
+  * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
+  range. IP addresses in the block list are excluded even if they are included in an allow list.
+
+
diff --git a/docs/workspace/settings/notification_destinations.rst b/docs/workspace/settings/notification_destinations.rst
index 8fb2d0c3c..e43383704 100644
--- a/docs/workspace/settings/notification_destinations.rst
+++ b/docs/workspace/settings/notification_destinations.rst
@@ -5,71 +5,70 @@
 .. py:class:: NotificationDestinationsAPI
 
     The notification destinations API lets you programmatically manage a workspace's notification
-    destinations. Notification destinations are used to send notifications for query alerts and jobs to
-    destinations outside of Databricks. Only workspace admins can create, update, and delete notification
-    destinations.
+destinations. Notification destinations are used to send notifications for query alerts and jobs to
+destinations outside of Databricks. Only workspace admins can create, update, and delete notification
+destinations.
 
     .. py:method:: create( [, config: Optional[Config], display_name: Optional[str]]) -> NotificationDestination
 
         Create a notification destination.
-        
-        Creates a notification destination. Requires workspace admin permissions.
-        
-        :param config: :class:`Config` (optional)
-          The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.
-        :param display_name: str (optional)
-          The display name for the notification destination.
-        
-        :returns: :class:`NotificationDestination`
-        
+
+Creates a notification destination. Requires workspace admin permissions.
+
+:param config: :class:`Config` (optional)
+  The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.
+:param display_name: str (optional)
+  The display name for the notification destination.
+
+:returns: :class:`NotificationDestination`
+
 
     .. py:method:: delete(id: str)
 
         Delete a notification destination.
-        
-        Deletes a notification destination. Requires workspace admin permissions.
-        
-        :param id: str
-        
-        
-        
+
+Deletes a notification destination. Requires workspace admin permissions.
+
+:param id: str
+
+
+
 
     .. py:method:: get(id: str) -> NotificationDestination
 
         Get a notification destination.
-        
-        Gets a notification destination.
-        
-        :param id: str
-        
-        :returns: :class:`NotificationDestination`
-        
+
+Gets a notification destination.
+
+:param id: str
+
+:returns: :class:`NotificationDestination`
+
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListNotificationDestinationsResult]
 
         List notification destinations.
-        
-        Lists notification destinations.
-        
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`ListNotificationDestinationsResult`
-        
+
+Lists notification destinations.
+
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`ListNotificationDestinationsResult`
+
 
     .. py:method:: update(id: str [, config: Optional[Config], display_name: Optional[str]]) -> NotificationDestination
 
         Update a notification destination.
-        
-        Updates a notification destination. Requires workspace admin permissions. At least one field is
-        required in the request body.
-        
-        :param id: str
-          UUID identifying notification destination.
-        :param config: :class:`Config` (optional)
-          The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.
-        :param display_name: str (optional)
-          The display name for the notification destination.
-        
-        :returns: :class:`NotificationDestination`
-        
\ No newline at end of file
+
+Updates a notification destination. Requires workspace admin permissions. At least one field is
+required in the request body.
+
+:param id: str
+  UUID identifying notification destination.
+:param config: :class:`Config` (optional)
+  The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.
+:param display_name: str (optional)
+  The display name for the notification destination.
+
+:returns: :class:`NotificationDestination`
diff --git a/docs/workspace/settings/restrict_workspace_admins.rst b/docs/workspace/settings/restrict_workspace_admins.rst
index b025112cc..9d44b6189 100644
--- a/docs/workspace/settings/restrict_workspace_admins.rst
+++ b/docs/workspace/settings/restrict_workspace_admins.rst
@@ -5,72 +5,71 @@
 .. py:class:: RestrictWorkspaceAdminsAPI
 
     The Restrict Workspace Admins setting lets you control the capabilities of workspace admins. With the
-    setting status set to ALLOW_ALL, workspace admins can create service principal personal access tokens on
-    behalf of any service principal in their workspace. Workspace admins can also change a job owner to any
-    user in their workspace. And they can change the job run_as setting to any user in their workspace or to a
-    service principal on which they have the Service Principal User role. With the setting status set to
-    RESTRICT_TOKENS_AND_JOB_RUN_AS, workspace admins can only create personal access tokens on behalf of
-    service principals they have the Service Principal User role on. They can also only change a job owner to
-    themselves. And they can change the job run_as setting to themselves or to a service principal on which
-    they have the Service Principal User role.
+setting status set to ALLOW_ALL, workspace admins can create service principal personal access tokens on
+behalf of any service principal in their workspace. Workspace admins can also change a job owner to any
+user in their workspace. And they can change the job run_as setting to any user in their workspace or to a
+service principal on which they have the Service Principal User role. With the setting status set to
+RESTRICT_TOKENS_AND_JOB_RUN_AS, workspace admins can only create personal access tokens on behalf of
+service principals they have the Service Principal User role on. They can also only change a job owner to
+themselves. And they can change the job run_as setting to themselves or to a service principal on which
+they have the Service Principal User role.
 
     .. py:method:: delete( [, etag: Optional[str]]) -> DeleteRestrictWorkspaceAdminsSettingResponse
 
         Delete the restrict workspace admins setting.
-        
-        Reverts the restrict workspace admins setting status for the workspace. A fresh etag needs to be
-        provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET`
-        request before the DELETE request. If the setting is updated/deleted concurrently, `DELETE` fails with
-        409 and the request must be retried by using the fresh etag in the 409 response.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`DeleteRestrictWorkspaceAdminsSettingResponse`
-        
+
+Reverts the restrict workspace admins setting status for the workspace. A fresh etag needs to be
+provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET`
+request before the DELETE request. If the setting is updated/deleted concurrently, `DELETE` fails with
+409 and the request must be retried by using the fresh etag in the 409 response.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`DeleteRestrictWorkspaceAdminsSettingResponse`
+
 
     .. py:method:: get( [, etag: Optional[str]]) -> RestrictWorkspaceAdminsSetting
 
         Get the restrict workspace admins setting.
-        
-        Gets the restrict workspace admins setting.
-        
-        :param etag: str (optional)
-          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-          request, and pass it with the DELETE request to identify the rule set version you are deleting.
-        
-        :returns: :class:`RestrictWorkspaceAdminsSetting`
-        
+
+Gets the restrict workspace admins setting.
+
+:param etag: str (optional)
+  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+  request, and pass it with the DELETE request to identify the rule set version you are deleting.
+
+:returns: :class:`RestrictWorkspaceAdminsSetting`
+
 
     .. py:method:: update(allow_missing: bool, setting: RestrictWorkspaceAdminsSetting, field_mask: str) -> RestrictWorkspaceAdminsSetting
 
         Update the restrict workspace admins setting.
-        
-        Updates the restrict workspace admins setting for the workspace. A fresh etag needs to be provided in
-        `PATCH` requests (as part of the setting field). The etag can be retrieved by making a GET request
-        before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the
-        request must be retried by using the fresh etag in the 409 response.
-        
-        :param allow_missing: bool
-          This should always be set to true for Settings API. Added for AIP compliance.
-        :param setting: :class:`RestrictWorkspaceAdminsSetting`
-        :param field_mask: str
-          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-          the entire collection field can be specified. Field names must exactly match the resource field
-          names.
-          
-          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-          changes in the future.
-        
-        :returns: :class:`RestrictWorkspaceAdminsSetting`
-        
\ No newline at end of file
+
+Updates the restrict workspace admins setting for the workspace. A fresh etag needs to be provided in
+`PATCH` requests (as part of the setting field). The etag can be retrieved by making a GET request
+before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the
+request must be retried by using the fresh etag in the 409 response.
+
+:param allow_missing: bool
+  This should always be set to true for Settings API. Added for AIP compliance.
+:param setting: :class:`RestrictWorkspaceAdminsSetting`
+:param field_mask: str
+  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+  the entire collection field can be specified. Field names must exactly match the resource field
+  names.
+  
+  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+  changes in the future.
+
+:returns: :class:`RestrictWorkspaceAdminsSetting`
diff --git a/docs/workspace/settings/settings.rst b/docs/workspace/settings/settings.rst
index aa806280e..8338866ec 100644
--- a/docs/workspace/settings/settings.rst
+++ b/docs/workspace/settings/settings.rst
@@ -10,77 +10,77 @@
         :type: AibiDashboardEmbeddingAccessPolicyAPI
 
         Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the
-        workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS).
+    workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS).
 
     .. py:property:: aibi_dashboard_embedding_approved_domains
         :type: AibiDashboardEmbeddingApprovedDomainsAPI
 
         Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains list
-        can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS.
+    can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS.
 
     .. py:property:: automatic_cluster_update
         :type: AutomaticClusterUpdateAPI
 
         Controls whether automatic cluster update is enabled for the current workspace. By default, it is turned
-        off.
+    off.
 
     .. py:property:: compliance_security_profile
         :type: ComplianceSecurityProfileAPI
 
         Controls whether to enable the compliance security profile for the current workspace. Enabling it on a
-        workspace is permanent. By default, it is turned off.
-        
-        This settings can NOT be disabled once it is enabled.
+    workspace is permanent. By default, it is turned off.
+    
+    This settings can NOT be disabled once it is enabled.
 
     .. py:property:: default_namespace
         :type: DefaultNamespaceAPI
 
         The default namespace setting API allows users to configure the default namespace for a Databricks
-        workspace.
-        
-        Through this API, users can retrieve, set, or modify the default namespace used when queries do not
-        reference a fully qualified three-level name. For example, if you use the API to set 'retail_prod' as the
-        default catalog, then a query 'SELECT * FROM myTable' would reference the object
-        'retail_prod.default.myTable' (the schema 'default' is always assumed).
-        
-        This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default
-        namespace only applies when using Unity Catalog-enabled compute.
+    workspace.
+    
+    Through this API, users can retrieve, set, or modify the default namespace used when queries do not
+    reference a fully qualified three-level name. For example, if you use the API to set 'retail_prod' as the
+    default catalog, then a query 'SELECT * FROM myTable' would reference the object
+    'retail_prod.default.myTable' (the schema 'default' is always assumed).
+    
+    This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default
+    namespace only applies when using Unity Catalog-enabled compute.
 
     .. py:property:: disable_legacy_access
         :type: DisableLegacyAccessAPI
 
         'Disabling legacy access' has the following impacts:
-        
-        1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore through HMS
-        Federation. 2. Disables Fallback Mode (docs link) on any External Location access from the workspace. 3.
-        Alters DBFS path access to use External Location permissions in place of legacy credentials. 4. Enforces
-        Unity Catalog access on all path based access.
+    
+    1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore through HMS
+    Federation. 2. Disables Fallback Mode (docs link) on any External Location access from the workspace. 3.
+    Alters DBFS path access to use External Location permissions in place of legacy credentials. 4. Enforces
+    Unity Catalog access on all path based access.
 
     .. py:property:: disable_legacy_dbfs
         :type: DisableLegacyDbfsAPI
 
         When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation of new
-        mounts). When the setting is off, all DBFS functionality is enabled
+    mounts). When the setting is off, all DBFS functionality is enabled
 
     .. py:property:: enhanced_security_monitoring
         :type: EnhancedSecurityMonitoringAPI
 
         Controls whether enhanced security monitoring is enabled for the current workspace. If the compliance
-        security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the
-        compliance security profile is enabled, this is automatically enabled.
-        
-        If the compliance security profile is disabled, you can enable or disable this setting and it is not
-        permanent.
+    security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the
+    compliance security profile is enabled, this is automatically enabled.
+    
+    If the compliance security profile is disabled, you can enable or disable this setting and it is not
+    permanent.
 
     .. py:property:: restrict_workspace_admins
         :type: RestrictWorkspaceAdminsAPI
 
         The Restrict Workspace Admins setting lets you control the capabilities of workspace admins. With the
-        setting status set to ALLOW_ALL, workspace admins can create service principal personal access tokens on
-        behalf of any service principal in their workspace. Workspace admins can also change a job owner to any
-        user in their workspace. And they can change the job run_as setting to any user in their workspace or to a
-        service principal on which they have the Service Principal User role. With the setting status set to
-        RESTRICT_TOKENS_AND_JOB_RUN_AS, workspace admins can only create personal access tokens on behalf of
-        service principals they have the Service Principal User role on. They can also only change a job owner to
-        themselves. And they can change the job run_as setting to themselves or to a service principal on which
-        they have the Service Principal User role.
\ No newline at end of file
+    setting status set to ALLOW_ALL, workspace admins can create service principal personal access tokens on
+    behalf of any service principal in their workspace. Workspace admins can also change a job owner to any
+    user in their workspace. And they can change the job run_as setting to any user in their workspace or to a
+    service principal on which they have the Service Principal User role. With the setting status set to
+    RESTRICT_TOKENS_AND_JOB_RUN_AS, workspace admins can only create personal access tokens on behalf of
+    service principals they have the Service Principal User role on. They can also only change a job owner to
+    themselves. And they can change the job run_as setting to themselves or to a service principal on which
+    they have the Service Principal User role.
\ No newline at end of file
diff --git a/docs/workspace/settings/token_management.rst b/docs/workspace/settings/token_management.rst
index 50dbe1328..a2fe7ddea 100644
--- a/docs/workspace/settings/token_management.rst
+++ b/docs/workspace/settings/token_management.rst
@@ -5,7 +5,7 @@
 .. py:class:: TokenManagementAPI
 
     Enables administrators to get all tokens and delete tokens for other users. Admins can either get every
-    token, get a specific token by ID, or get all tokens for a particular user.
+token, get a specific token by ID, or get all tokens for a particular user.
 
     .. py:method:: create_obo_token(application_id: str [, comment: Optional[str], lifetime_seconds: Optional[int]]) -> CreateOboTokenResponse
 
@@ -33,30 +33,30 @@
             w.token_management.delete(token_id=obo.token_info.token_id)
 
         Create on-behalf token.
-        
-        Creates a token on behalf of a service principal.
-        
-        :param application_id: str
-          Application ID of the service principal.
-        :param comment: str (optional)
-          Comment that describes the purpose of the token.
-        :param lifetime_seconds: int (optional)
-          The number of seconds before the token expires.
-        
-        :returns: :class:`CreateOboTokenResponse`
-        
+
+Creates a token on behalf of a service principal.
+
+:param application_id: str
+  Application ID of the service principal.
+:param comment: str (optional)
+  Comment that describes the purpose of the token.
+:param lifetime_seconds: int (optional)
+  The number of seconds before the token expires.
+
+:returns: :class:`CreateOboTokenResponse`
+
 
     .. py:method:: delete(token_id: str)
 
         Delete a token.
-        
-        Deletes a token, specified by its ID.
-        
-        :param token_id: str
-          The ID of the token to revoke.
-        
-        
-        
+
+Deletes a token, specified by its ID.
+
+:param token_id: str
+  The ID of the token to revoke.
+
+
+
 
     .. py:method:: get(token_id: str) -> GetTokenResponse
 
@@ -86,32 +86,32 @@
             w.token_management.delete(token_id=obo.token_info.token_id)
 
         Get token info.
-        
-        Gets information about a token, specified by its ID.
-        
-        :param token_id: str
-          The ID of the token to get.
-        
-        :returns: :class:`GetTokenResponse`
-        
+
+Gets information about a token, specified by its ID.
+
+:param token_id: str
+  The ID of the token to get.
+
+:returns: :class:`GetTokenResponse`
+
 
     .. py:method:: get_permission_levels() -> GetTokenPermissionLevelsResponse
 
         Get token permission levels.
-        
-        Gets the permission levels that a user can have on an object.
-        
-        :returns: :class:`GetTokenPermissionLevelsResponse`
-        
+
+Gets the permission levels that a user can have on an object.
+
+:returns: :class:`GetTokenPermissionLevelsResponse`
+
 
     .. py:method:: get_permissions() -> TokenPermissions
 
         Get token permissions.
-        
-        Gets the permissions of all tokens. Tokens can inherit permissions from their root object.
-        
-        :returns: :class:`TokenPermissions`
-        
+
+Gets the permissions of all tokens. Tokens can inherit permissions from their root object.
+
+:returns: :class:`TokenPermissions`
+
 
     .. py:method:: list( [, created_by_id: Optional[int], created_by_username: Optional[str]]) -> Iterator[TokenInfo]
 
@@ -128,36 +128,35 @@
             all = w.token_management.list(settings.ListTokenManagementRequest())
 
         List all tokens.
-        
-        Lists all tokens associated with the specified workspace or user.
-        
-        :param created_by_id: int (optional)
-          User ID of the user that created the token.
-        :param created_by_username: str (optional)
-          Username of the user that created the token.
-        
-        :returns: Iterator over :class:`TokenInfo`
-        
+
+Lists all tokens associated with the specified workspace or user.
+
+:param created_by_id: int (optional)
+  User ID of the user that created the token.
+:param created_by_username: str (optional)
+  Username of the user that created the token.
+
+:returns: Iterator over :class:`TokenInfo`
+
 
     .. py:method:: set_permissions( [, access_control_list: Optional[List[TokenAccessControlRequest]]]) -> TokenPermissions
 
         Set token permissions.
-        
-        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-        permissions if none are specified. Objects can inherit permissions from their root object.
-        
-        :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional)
-        
-        :returns: :class:`TokenPermissions`
-        
+
+Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+permissions if none are specified. Objects can inherit permissions from their root object.
+
+:param access_control_list: List[:class:`TokenAccessControlRequest`] (optional)
+
+:returns: :class:`TokenPermissions`
+
 
     .. py:method:: update_permissions( [, access_control_list: Optional[List[TokenAccessControlRequest]]]) -> TokenPermissions
 
         Update token permissions.
-        
-        Updates the permissions on all tokens. Tokens can inherit permissions from their root object.
-        
-        :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional)
-        
-        :returns: :class:`TokenPermissions`
-        
\ No newline at end of file
+
+Updates the permissions on all tokens. Tokens can inherit permissions from their root object.
+
+:param access_control_list: List[:class:`TokenAccessControlRequest`] (optional)
+
+:returns: :class:`TokenPermissions`
diff --git a/docs/workspace/settings/tokens.rst b/docs/workspace/settings/tokens.rst
index 899db00d1..273909746 100644
--- a/docs/workspace/settings/tokens.rst
+++ b/docs/workspace/settings/tokens.rst
@@ -5,7 +5,7 @@
 .. py:class:: TokensAPI
 
     The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access
-    Databricks REST APIs.
+Databricks REST APIs.
 
     .. py:method:: create( [, comment: Optional[str], lifetime_seconds: Optional[int]]) -> CreateTokenResponse
 
@@ -26,34 +26,34 @@
             w.tokens.delete(token_id=token.token_info.token_id)
 
         Create a user token.
-        
-        Creates and returns a token for a user. If this call is made through token authentication, it creates
-        a token with the same client ID as the authenticated token. If the user's token quota is exceeded,
-        this call returns an error **QUOTA_EXCEEDED**.
-        
-        :param comment: str (optional)
-          Optional description to attach to the token.
-        :param lifetime_seconds: int (optional)
-          The lifetime of the token, in seconds.
-          
-          If the lifetime is not specified, this token remains valid indefinitely.
-        
-        :returns: :class:`CreateTokenResponse`
-        
+
+Creates and returns a token for a user. If this call is made through token authentication, it creates
+a token with the same client ID as the authenticated token. If the user's token quota is exceeded,
+this call returns an error **QUOTA_EXCEEDED**.
+
+:param comment: str (optional)
+  Optional description to attach to the token.
+:param lifetime_seconds: int (optional)
+  The lifetime of the token, in seconds.
+  
+  If the lifetime is not specified, this token remains valid indefinitely.
+
+:returns: :class:`CreateTokenResponse`
+
 
     .. py:method:: delete(token_id: str)
 
         Revoke token.
-        
-        Revokes an access token.
-        
-        If a token with the specified ID is not valid, this call returns an error **RESOURCE_DOES_NOT_EXIST**.
-        
-        :param token_id: str
-          The ID of the token to be revoked.
-        
-        
-        
+
+Revokes an access token.
+
+If a token with the specified ID is not valid, this call returns an error **RESOURCE_DOES_NOT_EXIST**.
+
+:param token_id: str
+  The ID of the token to be revoked.
+
+
+
 
     .. py:method:: list() -> Iterator[PublicTokenInfo]
 
@@ -69,8 +69,7 @@
             all = w.tokens.list()
 
         List tokens.
-        
-        Lists all the valid tokens for a user-workspace pair.
-        
-        :returns: Iterator over :class:`PublicTokenInfo`
-        
\ No newline at end of file
+
+Lists all the valid tokens for a user-workspace pair.
+
+:returns: Iterator over :class:`PublicTokenInfo`
diff --git a/docs/workspace/settings/workspace_conf.rst b/docs/workspace/settings/workspace_conf.rst
index 3759de043..a3533f564 100644
--- a/docs/workspace/settings/workspace_conf.rst
+++ b/docs/workspace/settings/workspace_conf.rst
@@ -20,20 +20,19 @@
             conf = w.workspace_conf.get_status(keys="enableWorkspaceFilesystem")
 
         Check configuration status.
-        
-        Gets the configuration status for a workspace.
-        
-        :param keys: str
-        
-        :returns: Dict[str,str]
-        
+
+Gets the configuration status for a workspace.
+
+:param keys: str
+
+:returns: Dict[str,str]
+
 
     .. py:method:: set_status(contents: Dict[str, str])
 
         Enable/disable features.
-        
-        Sets the configuration status for a workspace, including enabling or disabling it.
-        
-        
-        
-        
\ No newline at end of file
+
+Sets the configuration status for a workspace, including enabling or disabling it.
+
+
+
diff --git a/docs/workspace/sharing/providers.rst b/docs/workspace/sharing/providers.rst
index 7d27acc3d..13d7e037b 100644
--- a/docs/workspace/sharing/providers.rst
+++ b/docs/workspace/sharing/providers.rst
@@ -5,7 +5,7 @@
 .. py:class:: ProvidersAPI
 
     A data provider is an object representing the organization in the real world who shares the data. A
-    provider contains shares which further contain the shared data.
+provider contains shares which further contain the shared data.
 
     .. py:method:: create(name: str, authentication_type: AuthenticationType [, comment: Optional[str], recipient_profile_str: Optional[str]]) -> ProviderInfo
 
@@ -33,35 +33,35 @@
             w.providers.delete(name=created.name)
 
         Create an auth provider.
-        
-        Creates a new authentication provider minimally based on a name and authentication type. The caller
-        must be an admin on the metastore.
-        
-        :param name: str
-          The name of the Provider.
-        :param authentication_type: :class:`AuthenticationType`
-          The delta sharing authentication type.
-        :param comment: str (optional)
-          Description about the provider.
-        :param recipient_profile_str: str (optional)
-          This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS**
-          or not provided.
-        
-        :returns: :class:`ProviderInfo`
-        
+
+Creates a new authentication provider minimally based on a name and authentication type. The caller
+must be an admin on the metastore.
+
+:param name: str
+  The name of the Provider.
+:param authentication_type: :class:`AuthenticationType`
+  The delta sharing authentication type.
+:param comment: str (optional)
+  Description about the provider.
+:param recipient_profile_str: str (optional)
+  This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS**
+  or not provided.
+
+:returns: :class:`ProviderInfo`
+
 
     .. py:method:: delete(name: str)
 
         Delete a provider.
-        
-        Deletes an authentication provider, if the caller is a metastore admin or is the owner of the
-        provider.
-        
-        :param name: str
-          Name of the provider.
-        
-        
-        
+
+Deletes an authentication provider, if the caller is a metastore admin or is the owner of the
+provider.
+
+:param name: str
+  Name of the provider.
+
+
+
 
     .. py:method:: get(name: str) -> ProviderInfo
 
@@ -91,15 +91,15 @@
             w.providers.delete(name=created.name)
 
         Get a provider.
-        
-        Gets a specific authentication provider. The caller must supply the name of the provider, and must
-        either be a metastore admin or the owner of the provider.
-        
-        :param name: str
-          Name of the provider.
-        
-        :returns: :class:`ProviderInfo`
-        
+
+Gets a specific authentication provider. The caller must supply the name of the provider, and must
+either be a metastore admin or the owner of the provider.
+
+:param name: str
+  Name of the provider.
+
+:returns: :class:`ProviderInfo`
+
 
     .. py:method:: list( [, data_provider_global_metastore_id: Optional[str], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderInfo]
 
@@ -116,27 +116,27 @@
             all = w.providers.list(sharing.ListProvidersRequest())
 
         List providers.
-        
-        Gets an array of available authentication providers. The caller must either be a metastore admin or
-        the owner of the providers. Providers not owned by the caller are not included in the response. There
-        is no guarantee of a specific ordering of the elements in the array.
-        
-        :param data_provider_global_metastore_id: str (optional)
-          If not provided, all providers will be returned. If no providers exist with this ID, no results will
-          be returned.
-        :param max_results: int (optional)
-          Maximum number of providers to return. - when set to 0, the page length is set to a server
-          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
-          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
-          error is returned; - If not set, all valid providers are returned (not recommended). - Note: The
-          number of returned providers might be less than the specified max_results size, even zero. The only
-          definitive indication that no further providers can be fetched is when the next_page_token is unset
-          from the response.
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`ProviderInfo`
-        
+
+Gets an array of available authentication providers. The caller must either be a metastore admin or
+the owner of the providers. Providers not owned by the caller are not included in the response. There
+is no guarantee of a specific ordering of the elements in the array.
+
+:param data_provider_global_metastore_id: str (optional)
+  If not provided, all providers will be returned. If no providers exist with this ID, no results will
+  be returned.
+:param max_results: int (optional)
+  Maximum number of providers to return. - when set to 0, the page length is set to a server
+  configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+  of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+  error is returned; - If not set, all valid providers are returned (not recommended). - Note: The
+  number of returned providers might be less than the specified max_results size, even zero. The only
+  definitive indication that no further providers can be fetched is when the next_page_token is unset
+  from the response.
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+
+:returns: Iterator over :class:`ProviderInfo`
+
 
     .. py:method:: list_shares(name: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderShare]
 
@@ -166,26 +166,26 @@
             w.providers.delete(name=created.name)
 
         List shares by Provider.
-        
-        Gets an array of a specified provider's shares within the metastore where:
-        
-        * the caller is a metastore admin, or * the caller is the owner.
-        
-        :param name: str
-          Name of the provider in which to list shares.
-        :param max_results: int (optional)
-          Maximum number of shares to return. - when set to 0, the page length is set to a server configured
-          value (recommended); - when set to a value greater than 0, the page length is the minimum of this
-          value and a server configured value; - when set to a value less than 0, an invalid parameter error
-          is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of
-          returned shares might be less than the specified max_results size, even zero. The only definitive
-          indication that no further shares can be fetched is when the next_page_token is unset from the
-          response.
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`ProviderShare`
-        
+
+Gets an array of a specified provider's shares within the metastore where:
+
+* the caller is a metastore admin, or * the caller is the owner.
+
+:param name: str
+  Name of the provider in which to list shares.
+:param max_results: int (optional)
+  Maximum number of shares to return. - when set to 0, the page length is set to a server configured
+  value (recommended); - when set to a value greater than 0, the page length is the minimum of this
+  value and a server configured value; - when set to a value less than 0, an invalid parameter error
+  is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of
+  returned shares might be less than the specified max_results size, even zero. The only definitive
+  indication that no further shares can be fetched is when the next_page_token is unset from the
+  response.
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+
+:returns: Iterator over :class:`ProviderShare`
+
 
     .. py:method:: update(name: str [, comment: Optional[str], new_name: Optional[str], owner: Optional[str], recipient_profile_str: Optional[str]]) -> ProviderInfo
 
@@ -215,22 +215,21 @@
             w.providers.delete(name=created.name)
 
         Update a provider.
-        
-        Updates the information for an authentication provider, if the caller is a metastore admin or is the
-        owner of the provider. If the update changes the provider name, the caller must be both a metastore
-        admin and the owner of the provider.
-        
-        :param name: str
-          Name of the provider.
-        :param comment: str (optional)
-          Description about the provider.
-        :param new_name: str (optional)
-          New name for the provider.
-        :param owner: str (optional)
-          Username of Provider owner.
-        :param recipient_profile_str: str (optional)
-          This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS**
-          or not provided.
-        
-        :returns: :class:`ProviderInfo`
-        
\ No newline at end of file
+
+Updates the information for an authentication provider, if the caller is a metastore admin or is the
+owner of the provider. If the update changes the provider name, the caller must be both a metastore
+admin and the owner of the provider.
+
+:param name: str
+  Name of the provider.
+:param comment: str (optional)
+  Description about the provider.
+:param new_name: str (optional)
+  New name for the provider.
+:param owner: str (optional)
+  Username of Provider owner.
+:param recipient_profile_str: str (optional)
+  This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS**
+  or not provided.
+
+:returns: :class:`ProviderInfo`
diff --git a/docs/workspace/sharing/recipient_activation.rst b/docs/workspace/sharing/recipient_activation.rst
index 2c214d9c0..4ac315098 100644
--- a/docs/workspace/sharing/recipient_activation.rst
+++ b/docs/workspace/sharing/recipient_activation.rst
@@ -5,33 +5,32 @@
 .. py:class:: RecipientActivationAPI
 
     The Recipient Activation API is only applicable in the open sharing model where the recipient object has
-    the authentication type of `TOKEN`. The data recipient follows the activation link shared by the data
-    provider to download the credential file that includes the access token. The recipient will then use the
-    credential file to establish a secure connection with the provider to receive the shared data.
-    
-    Note that you can download the credential file only once. Recipients should treat the downloaded
-    credential as a secret and must not share it outside of their organization.
+the authentication type of `TOKEN`. The data recipient follows the activation link shared by the data
+provider to download the credential file that includes the access token. The recipient will then use the
+credential file to establish a secure connection with the provider to receive the shared data.
+
+Note that you can download the credential file only once. Recipients should treat the downloaded
+credential as a secret and must not share it outside of their organization.
 
     .. py:method:: get_activation_url_info(activation_url: str)
 
         Get a share activation URL.
-        
-        Gets an activation URL for a share.
-        
-        :param activation_url: str
-          The one time activation url. It also accepts activation token.
-        
-        
-        
+
+Gets an activation URL for a share.
+
+:param activation_url: str
+  The one time activation url. It also accepts activation token.
+
+
+
 
     .. py:method:: retrieve_token(activation_url: str) -> RetrieveTokenResponse
 
         Get an access token.
-        
-        Retrieve access token with an activation url. This is a public API without any authentication.
-        
-        :param activation_url: str
-          The one time activation url. It also accepts activation token.
-        
-        :returns: :class:`RetrieveTokenResponse`
-        
\ No newline at end of file
+
+Retrieve access token with an activation url. This is a public API without any authentication.
+
+:param activation_url: str
+  The one time activation url. It also accepts activation token.
+
+:returns: :class:`RetrieveTokenResponse`
diff --git a/docs/workspace/sharing/recipients.rst b/docs/workspace/sharing/recipients.rst
index 76e1da171..b98291571 100644
--- a/docs/workspace/sharing/recipients.rst
+++ b/docs/workspace/sharing/recipients.rst
@@ -5,18 +5,18 @@
 .. py:class:: RecipientsAPI
 
     A recipient is an object you create using :method:recipients/create to represent an organization which you
-    want to allow access shares. The way how sharing works differs depending on whether or not your recipient
-    has access to a Databricks workspace that is enabled for Unity Catalog:
-    
-    - For recipients with access to a Databricks workspace that is enabled for Unity Catalog, you can create a
-    recipient object along with a unique sharing identifier you get from the recipient. The sharing identifier
-    is the key identifier that enables the secure connection. This sharing mode is called
-    **Databricks-to-Databricks sharing**.
-    
-    - For recipients without access to a Databricks workspace that is enabled for Unity Catalog, when you
-    create a recipient object, Databricks generates an activation link you can send to the recipient. The
-    recipient follows the activation link to download the credential file, and then uses the credential file
-    to establish a secure connection to receive the shared data. This sharing mode is called **open sharing**.
+want to allow access shares. The way how sharing works differs depending on whether or not your recipient
+has access to a Databricks workspace that is enabled for Unity Catalog:
+
+- For recipients with access to a Databricks workspace that is enabled for Unity Catalog, you can create a
+recipient object along with a unique sharing identifier you get from the recipient. The sharing identifier
+is the key identifier that enables the secure connection. This sharing mode is called
+**Databricks-to-Databricks sharing**.
+
+- For recipients without access to a Databricks workspace that is enabled for Unity Catalog, when you
+create a recipient object, Databricks generates an activation link you can send to the recipient. The
+recipient follows the activation link to download the credential file, and then uses the credential file
+to establish a secure connection to receive the shared data. This sharing mode is called **open sharing**.
 
     .. py:method:: create(name: str, authentication_type: AuthenticationType [, comment: Optional[str], data_recipient_global_metastore_id: Optional[str], expiration_time: Optional[int], ip_access_list: Optional[IpAccessList], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs], sharing_code: Optional[str]]) -> RecipientInfo
 
@@ -37,48 +37,48 @@
             w.recipients.delete(name=created.name)
 
         Create a share recipient.
-        
-        Creates a new recipient with the delta sharing authentication type in the metastore. The caller must
-        be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore.
-        
-        :param name: str
-          Name of Recipient.
-        :param authentication_type: :class:`AuthenticationType`
-          The delta sharing authentication type.
-        :param comment: str (optional)
-          Description about the recipient.
-        :param data_recipient_global_metastore_id: str (optional)
-          The global Unity Catalog metastore id provided by the data recipient. This field is only present
-          when the __authentication_type__ is **DATABRICKS**. The identifier is of format
-          __cloud__:__region__:__metastore-uuid__.
-        :param expiration_time: int (optional)
-          Expiration timestamp of the token, in epoch milliseconds.
-        :param ip_access_list: :class:`IpAccessList` (optional)
-          IP Access List
-        :param owner: str (optional)
-          Username of the recipient owner.
-        :param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional)
-          Recipient properties as map of string key-value pairs. When provided in update request, the
-          specified properties will override the existing properties. To add and remove properties, one would
-          need to perform a read-modify-write.
-        :param sharing_code: str (optional)
-          The one-time sharing code provided by the data recipient. This field is only present when the
-          __authentication_type__ is **DATABRICKS**.
-        
-        :returns: :class:`RecipientInfo`
-        
+
+Creates a new recipient with the delta sharing authentication type in the metastore. The caller must
+be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore.
+
+:param name: str
+  Name of Recipient.
+:param authentication_type: :class:`AuthenticationType`
+  The delta sharing authentication type.
+:param comment: str (optional)
+  Description about the recipient.
+:param data_recipient_global_metastore_id: str (optional)
+  The global Unity Catalog metastore id provided by the data recipient. This field is only present
+  when the __authentication_type__ is **DATABRICKS**. The identifier is of format
+  __cloud__:__region__:__metastore-uuid__.
+:param expiration_time: int (optional)
+  Expiration timestamp of the token, in epoch milliseconds.
+:param ip_access_list: :class:`IpAccessList` (optional)
+  IP Access List
+:param owner: str (optional)
+  Username of the recipient owner.
+:param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional)
+  Recipient properties as map of string key-value pairs. When provided in update request, the
+  specified properties will override the existing properties. To add and remove properties, one would
+  need to perform a read-modify-write.
+:param sharing_code: str (optional)
+  The one-time sharing code provided by the data recipient. This field is only present when the
+  __authentication_type__ is **DATABRICKS**.
+
+:returns: :class:`RecipientInfo`
+
 
     .. py:method:: delete(name: str)
 
         Delete a share recipient.
-        
-        Deletes the specified recipient from the metastore. The caller must be the owner of the recipient.
-        
-        :param name: str
-          Name of the recipient.
-        
-        
-        
+
+Deletes the specified recipient from the metastore. The caller must be the owner of the recipient.
+
+:param name: str
+  Name of the recipient.
+
+
+
 
     .. py:method:: get(name: str) -> RecipientInfo
 
@@ -101,16 +101,16 @@
             w.recipients.delete(name=created.name)
 
         Get a share recipient.
-        
-        Gets a share recipient from the metastore if:
-        
-        * the caller is the owner of the share recipient, or: * is a metastore admin
-        
-        :param name: str
-          Name of the recipient.
-        
-        :returns: :class:`RecipientInfo`
-        
+
+Gets a share recipient from the metastore if:
+
+* the caller is the owner of the share recipient, or: * is a metastore admin
+
+:param name: str
+  Name of the recipient.
+
+:returns: :class:`RecipientInfo`
+
 
     .. py:method:: list( [, data_recipient_global_metastore_id: Optional[str], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[RecipientInfo]
 
@@ -127,28 +127,28 @@
             all = w.recipients.list(sharing.ListRecipientsRequest())
 
         List share recipients.
-        
-        Gets an array of all share recipients within the current metastore where:
-        
-        * the caller is a metastore admin, or * the caller is the owner. There is no guarantee of a specific
-        ordering of the elements in the array.
-        
-        :param data_recipient_global_metastore_id: str (optional)
-          If not provided, all recipients will be returned. If no recipients exist with this ID, no results
-          will be returned.
-        :param max_results: int (optional)
-          Maximum number of recipients to return. - when set to 0, the page length is set to a server
-          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
-          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
-          error is returned; - If not set, all valid recipients are returned (not recommended). - Note: The
-          number of returned recipients might be less than the specified max_results size, even zero. The only
-          definitive indication that no further recipients can be fetched is when the next_page_token is unset
-          from the response.
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`RecipientInfo`
-        
+
+Gets an array of all share recipients within the current metastore where:
+
+* the caller is a metastore admin, or * the caller is the owner. There is no guarantee of a specific
+ordering of the elements in the array.
+
+:param data_recipient_global_metastore_id: str (optional)
+  If not provided, all recipients will be returned. If no recipients exist with this ID, no results
+  will be returned.
+:param max_results: int (optional)
+  Maximum number of recipients to return. - when set to 0, the page length is set to a server
+  configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+  of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+  error is returned; - If not set, all valid recipients are returned (not recommended). - Note: The
+  number of returned recipients might be less than the specified max_results size, even zero. The only
+  definitive indication that no further recipients can be fetched is when the next_page_token is unset
+  from the response.
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+
+:returns: Iterator over :class:`RecipientInfo`
+
 
     .. py:method:: rotate_token(name: str, existing_token_expire_in_seconds: int) -> RecipientInfo
 
@@ -171,19 +171,19 @@
             w.recipients.delete(name=created.name)
 
         Rotate a token.
-        
-        Refreshes the specified recipient's delta sharing authentication token with the provided token info.
-        The caller must be the owner of the recipient.
-        
-        :param name: str
-          The name of the Recipient.
-        :param existing_token_expire_in_seconds: int
-          The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of
-          existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to expire
-          the existing token immediately, negative number will return an error.
-        
-        :returns: :class:`RecipientInfo`
-        
+
+Refreshes the specified recipient's delta sharing authentication token with the provided token info.
+The caller must be the owner of the recipient.
+
+:param name: str
+  The name of the Recipient.
+:param existing_token_expire_in_seconds: int
+  The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of
+  existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to expire
+  the existing token immediately, negative number will return an error.
+
+:returns: :class:`RecipientInfo`
+
 
     .. py:method:: share_permissions(name: str [, max_results: Optional[int], page_token: Optional[str]]) -> GetRecipientSharePermissionsResponse
 
@@ -206,25 +206,25 @@
             w.recipients.delete(name=created.name)
 
         Get recipient share permissions.
-        
-        Gets the share permissions for the specified Recipient. The caller must be a metastore admin or the
-        owner of the Recipient.
-        
-        :param name: str
-          The name of the Recipient.
-        :param max_results: int (optional)
-          Maximum number of permissions to return. - when set to 0, the page length is set to a server
-          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
-          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
-          error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The
-          number of returned permissions might be less than the specified max_results size, even zero. The
-          only definitive indication that no further permissions can be fetched is when the next_page_token is
-          unset from the response.
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: :class:`GetRecipientSharePermissionsResponse`
-        
+
+Gets the share permissions for the specified Recipient. The caller must be a metastore admin or the
+owner of the Recipient.
+
+:param name: str
+  The name of the Recipient.
+:param max_results: int (optional)
+  Maximum number of permissions to return. - when set to 0, the page length is set to a server
+  configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+  of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+  error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The
+  number of returned permissions might be less than the specified max_results size, even zero. The
+  only definitive indication that no further permissions can be fetched is when the next_page_token is
+  unset from the response.
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+
+:returns: :class:`GetRecipientSharePermissionsResponse`
+
 
     .. py:method:: update(name: str [, comment: Optional[str], expiration_time: Optional[int], ip_access_list: Optional[IpAccessList], new_name: Optional[str], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs]]) -> RecipientInfo
 
@@ -247,27 +247,26 @@
             w.recipients.delete(name=created.name)
 
         Update a share recipient.
-        
-        Updates an existing recipient in the metastore. The caller must be a metastore admin or the owner of
-        the recipient. If the recipient name will be updated, the user must be both a metastore admin and the
-        owner of the recipient.
-        
-        :param name: str
-          Name of the recipient.
-        :param comment: str (optional)
-          Description about the recipient.
-        :param expiration_time: int (optional)
-          Expiration timestamp of the token, in epoch milliseconds.
-        :param ip_access_list: :class:`IpAccessList` (optional)
-          IP Access List
-        :param new_name: str (optional)
-          New name for the recipient. .
-        :param owner: str (optional)
-          Username of the recipient owner.
-        :param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional)
-          Recipient properties as map of string key-value pairs. When provided in update request, the
-          specified properties will override the existing properties. To add and remove properties, one would
-          need to perform a read-modify-write.
-        
-        :returns: :class:`RecipientInfo`
-        
\ No newline at end of file
+
+Updates an existing recipient in the metastore. The caller must be a metastore admin or the owner of
+the recipient. If the recipient name will be updated, the user must be both a metastore admin and the
+owner of the recipient.
+
+:param name: str
+  Name of the recipient.
+:param comment: str (optional)
+  Description about the recipient.
+:param expiration_time: int (optional)
+  Expiration timestamp of the token, in epoch milliseconds.
+:param ip_access_list: :class:`IpAccessList` (optional)
+  IP Access List
+:param new_name: str (optional)
+  New name for the recipient. .
+:param owner: str (optional)
+  Username of the recipient owner.
+:param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional)
+  Recipient properties as map of string key-value pairs. When provided in update request, the
+  specified properties will override the existing properties. To add and remove properties, one would
+  need to perform a read-modify-write.
+
+:returns: :class:`RecipientInfo`
diff --git a/docs/workspace/sharing/shares.rst b/docs/workspace/sharing/shares.rst
index 4d14b811d..f1c10ebeb 100644
--- a/docs/workspace/sharing/shares.rst
+++ b/docs/workspace/sharing/shares.rst
@@ -5,9 +5,9 @@
 .. py:class:: SharesAPI
 
     A share is a container instantiated with :method:shares/create. Once created you can iteratively register
-    a collection of existing data assets defined within the metastore using :method:shares/update. You can
-    register data assets under their original name, qualified by their original schema, or provide alternate
-    exposed names.
+a collection of existing data assets defined within the metastore using :method:shares/update. You can
+register data assets under their original name, qualified by their original schema, or provide alternate
+exposed names.
 
     .. py:method:: create(name: str [, comment: Optional[str], storage_root: Optional[str]]) -> ShareInfo
 
@@ -28,31 +28,31 @@
             w.shares.delete(name=created_share.name)
 
         Create a share.
-        
-        Creates a new share for data objects. Data objects can be added after creation with **update**. The
-        caller must be a metastore admin or have the **CREATE_SHARE** privilege on the metastore.
-        
-        :param name: str
-          Name of the share.
-        :param comment: str (optional)
-          User-provided free-form text description.
-        :param storage_root: str (optional)
-          Storage root URL for the share.
-        
-        :returns: :class:`ShareInfo`
-        
+
+Creates a new share for data objects. Data objects can be added after creation with **update**. The
+caller must be a metastore admin or have the **CREATE_SHARE** privilege on the metastore.
+
+:param name: str
+  Name of the share.
+:param comment: str (optional)
+  User-provided free-form text description.
+:param storage_root: str (optional)
+  Storage root URL for the share.
+
+:returns: :class:`ShareInfo`
+
 
     .. py:method:: delete(name: str)
 
         Delete a share.
-        
-        Deletes a data object share from the metastore. The caller must be an owner of the share.
-        
-        :param name: str
-          The name of the share.
-        
-        
-        
+
+Deletes a data object share from the metastore. The caller must be an owner of the share.
+
+:param name: str
+  The name of the share.
+
+
+
 
     .. py:method:: get(name: str [, include_shared_data: Optional[bool]]) -> ShareInfo
 
@@ -75,17 +75,17 @@
             w.shares.delete(name=created_share.name)
 
         Get a share.
-        
-        Gets a data object share from the metastore. The caller must be a metastore admin or the owner of the
-        share.
-        
-        :param name: str
-          The name of the share.
-        :param include_shared_data: bool (optional)
-          Query for data to include in the share.
-        
-        :returns: :class:`ShareInfo`
-        
+
+Gets a data object share from the metastore. The caller must be a metastore admin or the owner of the
+share.
+
+:param name: str
+  The name of the share.
+:param include_shared_data: bool (optional)
+  Query for data to include in the share.
+
+:returns: :class:`ShareInfo`
+
 
     .. py:method:: list( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ShareInfo]
 
@@ -102,46 +102,46 @@
             all = w.shares.list(sharing.ListSharesRequest())
 
         List shares.
-        
-        Gets an array of data object shares from the metastore. The caller must be a metastore admin or the
-        owner of the share. There is no guarantee of a specific ordering of the elements in the array.
-        
-        :param max_results: int (optional)
-          Maximum number of shares to return. - when set to 0, the page length is set to a server configured
-          value (recommended); - when set to a value greater than 0, the page length is the minimum of this
-          value and a server configured value; - when set to a value less than 0, an invalid parameter error
-          is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of
-          returned shares might be less than the specified max_results size, even zero. The only definitive
-          indication that no further shares can be fetched is when the next_page_token is unset from the
-          response.
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`ShareInfo`
-        
+
+Gets an array of data object shares from the metastore. The caller must be a metastore admin or the
+owner of the share. There is no guarantee of a specific ordering of the elements in the array.
+
+:param max_results: int (optional)
+  Maximum number of shares to return. - when set to 0, the page length is set to a server configured
+  value (recommended); - when set to a value greater than 0, the page length is the minimum of this
+  value and a server configured value; - when set to a value less than 0, an invalid parameter error
+  is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of
+  returned shares might be less than the specified max_results size, even zero. The only definitive
+  indication that no further shares can be fetched is when the next_page_token is unset from the
+  response.
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+
+:returns: Iterator over :class:`ShareInfo`
+
 
     .. py:method:: share_permissions(name: str [, max_results: Optional[int], page_token: Optional[str]]) -> catalog.PermissionsList
 
         Get permissions.
-        
-        Gets the permissions for a data share from the metastore. The caller must be a metastore admin or the
-        owner of the share.
-        
-        :param name: str
-          The name of the share.
-        :param max_results: int (optional)
-          Maximum number of permissions to return. - when set to 0, the page length is set to a server
-          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
-          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
-          error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The
-          number of returned permissions might be less than the specified max_results size, even zero. The
-          only definitive indication that no further permissions can be fetched is when the next_page_token is
-          unset from the response.
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: :class:`PermissionsList`
-        
+
+Gets the permissions for a data share from the metastore. The caller must be a metastore admin or the
+owner of the share.
+
+:param name: str
+  The name of the share.
+:param max_results: int (optional)
+  Maximum number of permissions to return. - when set to 0, the page length is set to a server
+  configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+  of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+  error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The
+  number of returned permissions might be less than the specified max_results size, even zero. The
+  only definitive indication that no further permissions can be fetched is when the next_page_token is
+  unset from the response.
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+
+:returns: :class:`PermissionsList`
+
 
     .. py:method:: update(name: str [, comment: Optional[str], new_name: Optional[str], owner: Optional[str], storage_root: Optional[str], updates: Optional[List[SharedDataObjectUpdate]]]) -> ShareInfo
 
@@ -189,63 +189,62 @@
             w.shares.delete(name=created_share.name)
 
         Update a share.
-        
-        Updates the share with the changes and data objects in the request. The caller must be the owner of
-        the share or a metastore admin.
-        
-        When the caller is a metastore admin, only the __owner__ field can be updated.
-        
-        In the case that the share name is changed, **updateShare** requires that the caller is both the share
-        owner and a metastore admin.
-        
-        If there are notebook files in the share, the __storage_root__ field cannot be updated.
-        
-        For each table that is added through this method, the share owner must also have **SELECT** privilege
-        on the table. This privilege must be maintained indefinitely for recipients to be able to access the
-        table. Typically, you should use a group as the share owner.
-        
-        Table removals through **update** do not require additional privileges.
-        
-        :param name: str
-          The name of the share.
-        :param comment: str (optional)
-          User-provided free-form text description.
-        :param new_name: str (optional)
-          New name for the share.
-        :param owner: str (optional)
-          Username of current owner of share.
-        :param storage_root: str (optional)
-          Storage root URL for the share.
-        :param updates: List[:class:`SharedDataObjectUpdate`] (optional)
-          Array of shared data object updates.
-        
-        :returns: :class:`ShareInfo`
-        
+
+Updates the share with the changes and data objects in the request. The caller must be the owner of
+the share or a metastore admin.
+
+When the caller is a metastore admin, only the __owner__ field can be updated.
+
+In the case that the share name is changed, **updateShare** requires that the caller is both the share
+owner and a metastore admin.
+
+If there are notebook files in the share, the __storage_root__ field cannot be updated.
+
+For each table that is added through this method, the share owner must also have **SELECT** privilege
+on the table. This privilege must be maintained indefinitely for recipients to be able to access the
+table. Typically, you should use a group as the share owner.
+
+Table removals through **update** do not require additional privileges.
+
+:param name: str
+  The name of the share.
+:param comment: str (optional)
+  User-provided free-form text description.
+:param new_name: str (optional)
+  New name for the share.
+:param owner: str (optional)
+  Username of current owner of share.
+:param storage_root: str (optional)
+  Storage root URL for the share.
+:param updates: List[:class:`SharedDataObjectUpdate`] (optional)
+  Array of shared data object updates.
+
+:returns: :class:`ShareInfo`
+
 
     .. py:method:: update_permissions(name: str [, changes: Optional[List[catalog.PermissionsChange]], max_results: Optional[int], page_token: Optional[str]])
 
         Update permissions.
-        
-        Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an
-        owner of the share.
-        
-        For new recipient grants, the user must also be the owner of the recipients. recipient revocations do
-        not require additional privileges.
-        
-        :param name: str
-          The name of the share.
-        :param changes: List[:class:`PermissionsChange`] (optional)
-          Array of permission changes.
-        :param max_results: int (optional)
-          Maximum number of permissions to return. - when set to 0, the page length is set to a server
-          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
-          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
-          error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The
-          number of returned permissions might be less than the specified max_results size, even zero. The
-          only definitive indication that no further permissions can be fetched is when the next_page_token is
-          unset from the response.
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        
-        
\ No newline at end of file
+
+Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an
+owner of the share.
+
+For new recipient grants, the user must also be the owner of the recipients. recipient revocations do
+not require additional privileges.
+
+:param name: str
+  The name of the share.
+:param changes: List[:class:`PermissionsChange`] (optional)
+  Array of permission changes.
+:param max_results: int (optional)
+  Maximum number of permissions to return. - when set to 0, the page length is set to a server
+  configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+  of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+  error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The
+  number of returned permissions might be less than the specified max_results size, even zero. The
+  only definitive indication that no further permissions can be fetched is when the next_page_token is
+  unset from the response.
+:param page_token: str (optional)
+  Opaque pagination token to go to next page based on previous query.
+
+
diff --git a/docs/workspace/sql/alerts.rst b/docs/workspace/sql/alerts.rst
index c8d9c31ab..b209b1047 100644
--- a/docs/workspace/sql/alerts.rst
+++ b/docs/workspace/sql/alerts.rst
@@ -5,9 +5,9 @@
 .. py:class:: AlertsAPI
 
     The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that
-    periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or
-    notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of
-    the Jobs API, e.g. :method:jobs/create.
+periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or
+notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of
+the Jobs API, e.g. :method:jobs/create.
 
     .. py:method:: create( [, alert: Optional[CreateAlertRequestAlert]]) -> Alert
 
@@ -45,26 +45,26 @@
             w.alerts.delete(id=alert.id)
 
         Create an alert.
-        
-        Creates an alert.
-        
-        :param alert: :class:`CreateAlertRequestAlert` (optional)
-        
-        :returns: :class:`Alert`
-        
+
+Creates an alert.
+
+:param alert: :class:`CreateAlertRequestAlert` (optional)
+
+:returns: :class:`Alert`
+
 
     .. py:method:: delete(id: str)
 
         Delete an alert.
-        
-        Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and
-        can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently
-        deleted after 30 days.
-        
-        :param id: str
-        
-        
-        
+
+Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and
+can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently
+deleted after 30 days.
+
+:param id: str
+
+
+
 
     .. py:method:: get(id: str) -> Alert
 
@@ -104,13 +104,13 @@
             w.alerts.delete(id=alert.id)
 
         Get an alert.
-        
-        Gets an alert.
-        
-        :param id: str
-        
-        :returns: :class:`Alert`
-        
+
+Gets an alert.
+
+:param id: str
+
+:returns: :class:`Alert`
+
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListAlertsResponseAlert]
 
@@ -127,15 +127,15 @@
             all = w.alerts.list(sql.ListAlertsRequest())
 
         List alerts.
-        
-        Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API
-        concurrently 10 or more times could result in throttling, service degradation, or a temporary ban.
-        
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`ListAlertsResponseAlert`
-        
+
+Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API
+concurrently 10 or more times could result in throttling, service degradation, or a temporary ban.
+
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`ListAlertsResponseAlert`
+
 
     .. py:method:: update(id: str, update_mask: str [, alert: Optional[UpdateAlertRequestAlert]]) -> Alert
 
@@ -177,21 +177,20 @@
             w.alerts.delete(id=alert.id)
 
         Update an alert.
-        
-        Updates an alert.
-        
-        :param id: str
-        :param update_mask: str
-          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-          the entire collection field can be specified. Field names must exactly match the resource field
-          names.
-          
-          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-          changes in the future.
-        :param alert: :class:`UpdateAlertRequestAlert` (optional)
-        
-        :returns: :class:`Alert`
-        
\ No newline at end of file
+
+Updates an alert.
+
+:param id: str
+:param update_mask: str
+  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+  the entire collection field can be specified. Field names must exactly match the resource field
+  names.
+  
+  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+  changes in the future.
+:param alert: :class:`UpdateAlertRequestAlert` (optional)
+
+:returns: :class:`Alert`
diff --git a/docs/workspace/sql/alerts_legacy.rst b/docs/workspace/sql/alerts_legacy.rst
index 6dfd96128..e5f11673e 100644
--- a/docs/workspace/sql/alerts_legacy.rst
+++ b/docs/workspace/sql/alerts_legacy.rst
@@ -5,110 +5,109 @@
 .. py:class:: AlertsLegacyAPI
 
     The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that
-    periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or
-    notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of
-    the Jobs API, e.g. :method:jobs/create.
-    
-    **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
-    more]
-    
-    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or
+notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of
+the Jobs API, e.g. :method:jobs/create.
+
+**Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
+more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
 
     .. py:method:: create(name: str, options: AlertOptions, query_id: str [, parent: Optional[str], rearm: Optional[int]]) -> LegacyAlert
 
         Create an alert.
-        
-        Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a
-        condition of its result, and notifies users or notification destinations if the condition was met.
-        
-        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create
-        instead. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
-        :param name: str
-          Name of the alert.
-        :param options: :class:`AlertOptions`
-          Alert configuration options.
-        :param query_id: str
-          Query ID.
-        :param parent: str (optional)
-          The identifier of the workspace folder containing the object.
-        :param rearm: int (optional)
-          Number of seconds after being triggered before the alert rearms itself and can be triggered again.
-          If `null`, alert will never be triggered again.
-        
-        :returns: :class:`LegacyAlert`
-        
+
+Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a
+condition of its result, and notifies users or notification destinations if the condition was met.
+
+**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create
+instead. [Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+:param name: str
+  Name of the alert.
+:param options: :class:`AlertOptions`
+  Alert configuration options.
+:param query_id: str
+  Query ID.
+:param parent: str (optional)
+  The identifier of the workspace folder containing the object.
+:param rearm: int (optional)
+  Number of seconds after being triggered before the alert rearms itself and can be triggered again.
+  If `null`, alert will never be triggered again.
+
+:returns: :class:`LegacyAlert`
+
 
     .. py:method:: delete(alert_id: str)
 
         Delete an alert.
-        
-        Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike
-        queries and dashboards, alerts cannot be moved to the trash.
-        
-        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete
-        instead. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
-        :param alert_id: str
-        
-        
-        
+
+Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike
+queries and dashboards, alerts cannot be moved to the trash.
+
+**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete
+instead. [Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+:param alert_id: str
+
+
+
 
     .. py:method:: get(alert_id: str) -> LegacyAlert
 
         Get an alert.
-        
-        Gets an alert.
-        
-        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get
-        instead. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
-        :param alert_id: str
-        
-        :returns: :class:`LegacyAlert`
-        
+
+Gets an alert.
+
+**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get
+instead. [Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+:param alert_id: str
+
+:returns: :class:`LegacyAlert`
+
 
     .. py:method:: list() -> Iterator[LegacyAlert]
 
         Get alerts.
-        
-        Gets a list of alerts.
-        
-        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list
-        instead. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
-        :returns: Iterator over :class:`LegacyAlert`
-        
+
+Gets a list of alerts.
+
+**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list
+instead. [Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+:returns: Iterator over :class:`LegacyAlert`
+
 
     .. py:method:: update(alert_id: str, name: str, options: AlertOptions, query_id: str [, rearm: Optional[int]])
 
         Update an alert.
-        
-        Updates an alert.
-        
-        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update
-        instead. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
-        :param alert_id: str
-        :param name: str
-          Name of the alert.
-        :param options: :class:`AlertOptions`
-          Alert configuration options.
-        :param query_id: str
-          Query ID.
-        :param rearm: int (optional)
-          Number of seconds after being triggered before the alert rearms itself and can be triggered again.
-          If `null`, alert will never be triggered again.
-        
-        
-        
\ No newline at end of file
+
+Updates an alert.
+
+**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update
+instead. [Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+:param alert_id: str
+:param name: str
+  Name of the alert.
+:param options: :class:`AlertOptions`
+  Alert configuration options.
+:param query_id: str
+  Query ID.
+:param rearm: int (optional)
+  Number of seconds after being triggered before the alert rearms itself and can be triggered again.
+  If `null`, alert will never be triggered again.
+
+
diff --git a/docs/workspace/sql/dashboard_widgets.rst b/docs/workspace/sql/dashboard_widgets.rst
index d4bbcde1d..df6a37f35 100644
--- a/docs/workspace/sql/dashboard_widgets.rst
+++ b/docs/workspace/sql/dashboard_widgets.rst
@@ -5,52 +5,51 @@
 .. py:class:: DashboardWidgetsAPI
 
     This is an evolving API that facilitates the addition and removal of widgets from existing dashboards
-    within the Databricks Workspace. Data structures may change over time.
+within the Databricks Workspace. Data structures may change over time.
 
     .. py:method:: create(dashboard_id: str, options: WidgetOptions, width: int [, text: Optional[str], visualization_id: Optional[str]]) -> Widget
 
         Add widget to a dashboard.
-        
-        :param dashboard_id: str
-          Dashboard ID returned by :method:dashboards/create.
-        :param options: :class:`WidgetOptions`
-        :param width: int
-          Width of a widget
-        :param text: str (optional)
-          If this is a textbox widget, the application displays this text. This field is ignored if the widget
-          contains a visualization in the `visualization` field.
-        :param visualization_id: str (optional)
-          Query Vizualization ID returned by :method:queryvisualizations/create.
-        
-        :returns: :class:`Widget`
-        
+
+:param dashboard_id: str
+  Dashboard ID returned by :method:dashboards/create.
+:param options: :class:`WidgetOptions`
+:param width: int
+  Width of a widget
+:param text: str (optional)
+  If this is a textbox widget, the application displays this text. This field is ignored if the widget
+  contains a visualization in the `visualization` field.
+:param visualization_id: str (optional)
+  Query Vizualization ID returned by :method:queryvisualizations/create.
+
+:returns: :class:`Widget`
+
 
     .. py:method:: delete(id: str)
 
         Remove widget.
-        
-        :param id: str
-          Widget ID returned by :method:dashboardwidgets/create
-        
-        
-        
+
+:param id: str
+  Widget ID returned by :method:dashboardwidgets/create
+
+
+
 
     .. py:method:: update(id: str, dashboard_id: str, options: WidgetOptions, width: int [, text: Optional[str], visualization_id: Optional[str]]) -> Widget
 
         Update existing widget.
-        
-        :param id: str
-          Widget ID returned by :method:dashboardwidgets/create
-        :param dashboard_id: str
-          Dashboard ID returned by :method:dashboards/create.
-        :param options: :class:`WidgetOptions`
-        :param width: int
-          Width of a widget
-        :param text: str (optional)
-          If this is a textbox widget, the application displays this text. This field is ignored if the widget
-          contains a visualization in the `visualization` field.
-        :param visualization_id: str (optional)
-          Query Vizualization ID returned by :method:queryvisualizations/create.
-        
-        :returns: :class:`Widget`
-        
\ No newline at end of file
+
+:param id: str
+  Widget ID returned by :method:dashboardwidgets/create
+:param dashboard_id: str
+  Dashboard ID returned by :method:dashboards/create.
+:param options: :class:`WidgetOptions`
+:param width: int
+  Width of a widget
+:param text: str (optional)
+  If this is a textbox widget, the application displays this text. This field is ignored if the widget
+  contains a visualization in the `visualization` field.
+:param visualization_id: str (optional)
+  Query Vizualization ID returned by :method:queryvisualizations/create.
+
+:returns: :class:`Widget`
diff --git a/docs/workspace/sql/dashboards.rst b/docs/workspace/sql/dashboards.rst
index f22c7c96b..2b44a1edd 100644
--- a/docs/workspace/sql/dashboards.rst
+++ b/docs/workspace/sql/dashboards.rst
@@ -5,10 +5,10 @@
 .. py:class:: DashboardsAPI
 
     In general, there is little need to modify dashboards using the API. However, it can be useful to use
-    dashboard objects to look-up a collection of related query IDs. The API can also be used to duplicate
-    multiple dashboards at once since you can get a dashboard definition with a GET request and then POST it
-    to create a new one. Dashboards can be scheduled using the `sql_task` type of the Jobs API, e.g.
-    :method:jobs/create.
+dashboard objects to look-up a collection of related query IDs. The API can also be used to duplicate
+multiple dashboards at once since you can get a dashboard definition with a GET request and then POST it
+to create a new one. Dashboards can be scheduled using the `sql_task` type of the Jobs API, e.g.
+:method:jobs/create.
 
     .. py:method:: create(name: str [, dashboard_filters_enabled: Optional[bool], is_favorite: Optional[bool], parent: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> Dashboard
 
@@ -29,22 +29,22 @@
             w.dashboards.delete(dashboard_id=created.id)
 
         Create a dashboard object.
-        
-        :param name: str
-          The title of this dashboard that appears in list views and at the top of the dashboard page.
-        :param dashboard_filters_enabled: bool (optional)
-          Indicates whether the dashboard filters are enabled
-        :param is_favorite: bool (optional)
-          Indicates whether this dashboard object should appear in the current user's favorites list.
-        :param parent: str (optional)
-          The identifier of the workspace folder containing the object.
-        :param run_as_role: :class:`RunAsRole` (optional)
-          Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
-          viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
-        :param tags: List[str] (optional)
-        
-        :returns: :class:`Dashboard`
-        
+
+:param name: str
+  The title of this dashboard that appears in list views and at the top of the dashboard page.
+:param dashboard_filters_enabled: bool (optional)
+  Indicates whether the dashboard filters are enabled
+:param is_favorite: bool (optional)
+  Indicates whether this dashboard object should appear in the current user's favorites list.
+:param parent: str (optional)
+  The identifier of the workspace folder containing the object.
+:param run_as_role: :class:`RunAsRole` (optional)
+  Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
+  viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
+:param tags: List[str] (optional)
+
+:returns: :class:`Dashboard`
+
 
     .. py:method:: delete(dashboard_id: str)
 
@@ -67,14 +67,14 @@
             w.dashboards.delete(dashboard_id=created.id)
 
         Remove a dashboard.
-        
-        Moves a dashboard to the trash. Trashed dashboards do not appear in list views or searches, and cannot
-        be shared.
-        
-        :param dashboard_id: str
-        
-        
-        
+
+Moves a dashboard to the trash. Trashed dashboards do not appear in list views or searches, and cannot
+be shared.
+
+:param dashboard_id: str
+
+
+
 
     .. py:method:: get(dashboard_id: str) -> Dashboard
 
@@ -97,13 +97,13 @@
             w.dashboards.delete(dashboard_id=created.id)
 
         Retrieve a definition.
-        
-        Returns a JSON representation of a dashboard object, including its visualization and query objects.
-        
-        :param dashboard_id: str
-        
-        :returns: :class:`Dashboard`
-        
+
+Returns a JSON representation of a dashboard object, including its visualization and query objects.
+
+:param dashboard_id: str
+
+:returns: :class:`Dashboard`
+
 
     .. py:method:: list( [, order: Optional[ListOrder], page: Optional[int], page_size: Optional[int], q: Optional[str]]) -> Iterator[Dashboard]
 
@@ -120,23 +120,23 @@
             all = w.dashboards.list(sql.ListDashboardsRequest())
 
         Get dashboard objects.
-        
-        Fetch a paginated list of dashboard objects.
-        
-        **Warning**: Calling this API concurrently 10 or more times could result in throttling, service
-        degradation, or a temporary ban.
-        
-        :param order: :class:`ListOrder` (optional)
-          Name of dashboard attribute to order by.
-        :param page: int (optional)
-          Page number to retrieve.
-        :param page_size: int (optional)
-          Number of dashboards to return per page.
-        :param q: str (optional)
-          Full text search term.
-        
-        :returns: Iterator over :class:`Dashboard`
-        
+
+Fetch a paginated list of dashboard objects.
+
+**Warning**: Calling this API concurrently 10 or more times could result in throttling, service
+degradation, or a temporary ban.
+
+:param order: :class:`ListOrder` (optional)
+  Name of dashboard attribute to order by.
+:param page: int (optional)
+  Page number to retrieve.
+:param page_size: int (optional)
+  Number of dashboards to return per page.
+:param q: str (optional)
+  Full text search term.
+
+:returns: Iterator over :class:`Dashboard`
+
 
     .. py:method:: restore(dashboard_id: str)
 
@@ -159,30 +159,29 @@
             w.dashboards.delete(dashboard_id=created.id)
 
         Restore a dashboard.
-        
-        A restored dashboard appears in list views and searches and can be shared.
-        
-        :param dashboard_id: str
-        
-        
-        
+
+A restored dashboard appears in list views and searches and can be shared.
+
+:param dashboard_id: str
+
+
+
 
     .. py:method:: update(dashboard_id: str [, name: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> Dashboard
 
         Change a dashboard definition.
-        
-        Modify this dashboard definition. This operation only affects attributes of the dashboard object. It
-        does not add, modify, or remove widgets.
-        
-        **Note**: You cannot undo this operation.
-        
-        :param dashboard_id: str
-        :param name: str (optional)
-          The title of this dashboard that appears in list views and at the top of the dashboard page.
-        :param run_as_role: :class:`RunAsRole` (optional)
-          Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
-          viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
-        :param tags: List[str] (optional)
-        
-        :returns: :class:`Dashboard`
-        
\ No newline at end of file
+
+Modify this dashboard definition. This operation only affects attributes of the dashboard object. It
+does not add, modify, or remove widgets.
+
+**Note**: You cannot undo this operation.
+
+:param dashboard_id: str
+:param name: str (optional)
+  The title of this dashboard that appears in list views and at the top of the dashboard page.
+:param run_as_role: :class:`RunAsRole` (optional)
+  Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
+  viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
+:param tags: List[str] (optional)
+
+:returns: :class:`Dashboard`
diff --git a/docs/workspace/sql/data_sources.rst b/docs/workspace/sql/data_sources.rst
index 8f7321fa0..4b05ce137 100644
--- a/docs/workspace/sql/data_sources.rst
+++ b/docs/workspace/sql/data_sources.rst
@@ -5,16 +5,16 @@
 .. py:class:: DataSourcesAPI
 
     This API is provided to assist you in making new query objects. When creating a query object, you may
-    optionally specify a `data_source_id` for the SQL warehouse against which it will run. If you don't
-    already know the `data_source_id` for your desired SQL warehouse, this API will help you find it.
-    
-    This API does not support searches. It returns the full list of SQL warehouses in your workspace. We
-    advise you to use any text editor, REST client, or `grep` to search the response from this API for the
-    name of your SQL warehouse as it appears in Databricks SQL.
-    
-    **Note**: A new version of the Databricks SQL API is now available. [Learn more]
-    
-    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+optionally specify a `data_source_id` for the SQL warehouse against which it will run. If you don't
+already know the `data_source_id` for your desired SQL warehouse, this API will help you find it.
+
+This API does not support searches. It returns the full list of SQL warehouses in your workspace. We
+advise you to use any text editor, REST client, or `grep` to search the response from this API for the
+name of your SQL warehouse as it appears in Databricks SQL.
+
+**Note**: A new version of the Databricks SQL API is now available. [Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
 
     .. py:method:: list() -> Iterator[DataSource]
 
@@ -30,15 +30,14 @@
             srcs = w.data_sources.list()
 
         Get a list of SQL warehouses.
-        
-        Retrieves a full list of SQL warehouses available in this workspace. All fields that appear in this
-        API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new
-        queries against it.
-        
-        **Note**: A new version of the Databricks SQL API is now available. Please use :method:warehouses/list
-        instead. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
-        :returns: Iterator over :class:`DataSource`
-        
\ No newline at end of file
+
+Retrieves a full list of SQL warehouses available in this workspace. All fields that appear in this
+API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new
+queries against it.
+
+**Note**: A new version of the Databricks SQL API is now available. Please use :method:warehouses/list
+instead. [Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+:returns: Iterator over :class:`DataSource`
diff --git a/docs/workspace/sql/dbsql_permissions.rst b/docs/workspace/sql/dbsql_permissions.rst
index 7f9e5d19c..18da30ff0 100644
--- a/docs/workspace/sql/dbsql_permissions.rst
+++ b/docs/workspace/sql/dbsql_permissions.rst
@@ -5,78 +5,77 @@
 .. py:class:: DbsqlPermissionsAPI
 
     The SQL Permissions API is similar to the endpoints of the :method:permissions/set. However, this exposes
-    only one endpoint, which gets the Access Control List for a given object. You cannot modify any
-    permissions using this API.
-    
-    There are three levels of permission:
-    
-    - `CAN_VIEW`: Allows read-only access
-    
-    - `CAN_RUN`: Allows read access and run access (superset of `CAN_VIEW`)
-    
-    - `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`)
-    
-    **Note**: A new version of the Databricks SQL API is now available. [Learn more]
-    
-    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+only one endpoint, which gets the Access Control List for a given object. You cannot modify any
+permissions using this API.
+
+There are three levels of permission:
+
+- `CAN_VIEW`: Allows read-only access
+
+- `CAN_RUN`: Allows read access and run access (superset of `CAN_VIEW`)
+
+- `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`)
+
+**Note**: A new version of the Databricks SQL API is now available. [Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
 
     .. py:method:: get(object_type: ObjectTypePlural, object_id: str) -> GetResponse
 
         Get object ACL.
-        
-        Gets a JSON representation of the access control list (ACL) for a specified object.
-        
-        **Note**: A new version of the Databricks SQL API is now available. Please use
-        :method:workspace/getpermissions instead. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
-        :param object_type: :class:`ObjectTypePlural`
-          The type of object permissions to check.
-        :param object_id: str
-          Object ID. An ACL is returned for the object with this UUID.
-        
-        :returns: :class:`GetResponse`
-        
+
+Gets a JSON representation of the access control list (ACL) for a specified object.
+
+**Note**: A new version of the Databricks SQL API is now available. Please use
+:method:workspace/getpermissions instead. [Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+:param object_type: :class:`ObjectTypePlural`
+  The type of object permissions to check.
+:param object_id: str
+  Object ID. An ACL is returned for the object with this UUID.
+
+:returns: :class:`GetResponse`
+
 
     .. py:method:: set(object_type: ObjectTypePlural, object_id: str [, access_control_list: Optional[List[AccessControl]]]) -> SetResponse
 
         Set object ACL.
-        
-        Sets the access control list (ACL) for a specified object. This operation will complete rewrite the
-        ACL.
-        
-        **Note**: A new version of the Databricks SQL API is now available. Please use
-        :method:workspace/setpermissions instead. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
-        :param object_type: :class:`ObjectTypePlural`
-          The type of object permission to set.
-        :param object_id: str
-          Object ID. The ACL for the object with this UUID is overwritten by this request's POST content.
-        :param access_control_list: List[:class:`AccessControl`] (optional)
-        
-        :returns: :class:`SetResponse`
-        
+
+Sets the access control list (ACL) for a specified object. This operation will complete rewrite the
+ACL.
+
+**Note**: A new version of the Databricks SQL API is now available. Please use
+:method:workspace/setpermissions instead. [Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+:param object_type: :class:`ObjectTypePlural`
+  The type of object permission to set.
+:param object_id: str
+  Object ID. The ACL for the object with this UUID is overwritten by this request's POST content.
+:param access_control_list: List[:class:`AccessControl`] (optional)
+
+:returns: :class:`SetResponse`
+
 
     .. py:method:: transfer_ownership(object_type: OwnableObjectType, object_id: TransferOwnershipObjectId [, new_owner: Optional[str]]) -> Success
 
         Transfer object ownership.
-        
-        Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key.
-        
-        **Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use
-        :method:queries/update and :method:alerts/update respectively instead. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
-        :param object_type: :class:`OwnableObjectType`
-          The type of object on which to change ownership.
-        :param object_id: :class:`TransferOwnershipObjectId`
-          The ID of the object on which to change ownership.
-        :param new_owner: str (optional)
-          Email address for the new owner, who must exist in the workspace.
-        
-        :returns: :class:`Success`
-        
\ No newline at end of file
+
+Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key.
+
+**Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use
+:method:queries/update and :method:alerts/update respectively instead. [Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+:param object_type: :class:`OwnableObjectType`
+  The type of object on which to change ownership.
+:param object_id: :class:`TransferOwnershipObjectId`
+  The ID of the object on which to change ownership.
+:param new_owner: str (optional)
+  Email address for the new owner, who must exist in the workspace.
+
+:returns: :class:`Success`
diff --git a/docs/workspace/sql/queries.rst b/docs/workspace/sql/queries.rst
index 959552850..8c7b356e2 100644
--- a/docs/workspace/sql/queries.rst
+++ b/docs/workspace/sql/queries.rst
@@ -5,8 +5,8 @@
 .. py:class:: QueriesAPI
 
     The queries API can be used to perform CRUD operations on queries. A query is a Databricks SQL object that
-    includes the target SQL warehouse, query text, name, description, tags, and parameters. Queries can be
-    scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.
+includes the target SQL warehouse, query text, name, description, tags, and parameters. Queries can be
+scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.
 
     .. py:method:: create( [, query: Optional[CreateQueryRequestQuery]]) -> Query
 
@@ -33,26 +33,26 @@
             w.queries.delete(id=query.id)
 
         Create a query.
-        
-        Creates a query.
-        
-        :param query: :class:`CreateQueryRequestQuery` (optional)
-        
-        :returns: :class:`Query`
-        
+
+Creates a query.
+
+:param query: :class:`CreateQueryRequestQuery` (optional)
+
+:returns: :class:`Query`
+
 
     .. py:method:: delete(id: str)
 
         Delete a query.
-        
-        Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and
-        cannot be used for alerts. You can restore a trashed query through the UI. A trashed query is
-        permanently deleted after 30 days.
-        
-        :param id: str
-        
-        
-        
+
+Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and
+cannot be used for alerts. You can restore a trashed query through the UI. A trashed query is
+permanently deleted after 30 days.
+
+:param id: str
+
+
+
 
     .. py:method:: get(id: str) -> Query
 
@@ -81,39 +81,39 @@
             w.queries.delete(id=query.id)
 
         Get a query.
-        
-        Gets a query.
-        
-        :param id: str
-        
-        :returns: :class:`Query`
-        
+
+Gets a query.
+
+:param id: str
+
+:returns: :class:`Query`
+
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListQueryObjectsResponseQuery]
 
         List queries.
-        
-        Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API
-        concurrently 10 or more times could result in throttling, service degradation, or a temporary ban.
-        
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`ListQueryObjectsResponseQuery`
-        
+
+Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API
+concurrently 10 or more times could result in throttling, service degradation, or a temporary ban.
+
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`ListQueryObjectsResponseQuery`
+
 
     .. py:method:: list_visualizations(id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Visualization]
 
         List visualizations on a query.
-        
-        Gets a list of visualizations on a query.
-        
-        :param id: str
-        :param page_size: int (optional)
-        :param page_token: str (optional)
-        
-        :returns: Iterator over :class:`Visualization`
-        
+
+Gets a list of visualizations on a query.
+
+:param id: str
+:param page_size: int (optional)
+:param page_token: str (optional)
+
+:returns: Iterator over :class:`Visualization`
+
 
     .. py:method:: update(id: str, update_mask: str [, query: Optional[UpdateQueryRequestQuery]]) -> Query
 
@@ -146,21 +146,20 @@
             w.queries.delete(id=query.id)
 
         Update a query.
-        
-        Updates a query.
-        
-        :param id: str
-        :param update_mask: str
-          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-          the entire collection field can be specified. Field names must exactly match the resource field
-          names.
-          
-          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-          changes in the future.
-        :param query: :class:`UpdateQueryRequestQuery` (optional)
-        
-        :returns: :class:`Query`
-        
\ No newline at end of file
+
+Updates a query.
+
+:param id: str
+:param update_mask: str
+  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+  the entire collection field can be specified. Field names must exactly match the resource field
+  names.
+  
+  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+  changes in the future.
+:param query: :class:`UpdateQueryRequestQuery` (optional)
+
+:returns: :class:`Query`
diff --git a/docs/workspace/sql/queries_legacy.rst b/docs/workspace/sql/queries_legacy.rst
index a7ab56836..694be0946 100644
--- a/docs/workspace/sql/queries_legacy.rst
+++ b/docs/workspace/sql/queries_legacy.rst
@@ -5,179 +5,178 @@
 .. py:class:: QueriesLegacyAPI
 
     These endpoints are used for CRUD operations on query definitions. Query definitions include the target
-    SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be
-    scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.
-    
-    **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
-    more]
-    
-    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be
+scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.
+
+**Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
+more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
 
     .. py:method:: create( [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], parent: Optional[str], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> LegacyQuery
 
         Create a new query definition.
-        
-        Creates a new query definition. Queries created with this endpoint belong to the authenticated user
-        making the request.
-        
-        The `data_source_id` field specifies the ID of the SQL warehouse to run this query against. You can
-        use the Data Sources API to see a complete list of available SQL warehouses. Or you can copy the
-        `data_source_id` from an existing query.
-        
-        **Note**: You cannot add a visualization until you create the query.
-        
-        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/create
-        instead. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
-        :param data_source_id: str (optional)
-          Data source ID maps to the ID of the data source used by the resource and is distinct from the
-          warehouse ID. [Learn more]
-          
-          [Learn more]: https://docs.databricks.com/api/workspace/datasources/list
-        :param description: str (optional)
-          General description that conveys additional information about this query such as usage notes.
-        :param name: str (optional)
-          The title of this query that appears in list views, widget headings, and on the query page.
-        :param options: Any (optional)
-          Exclusively used for storing a list parameter definitions. A parameter is an object with `title`,
-          `name`, `type`, and `value` properties. The `value` field here is the default value. It can be
-          overridden at runtime.
-        :param parent: str (optional)
-          The identifier of the workspace folder containing the object.
-        :param query: str (optional)
-          The text of the query to be run.
-        :param run_as_role: :class:`RunAsRole` (optional)
-          Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
-          viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
-        :param tags: List[str] (optional)
-        
-        :returns: :class:`LegacyQuery`
-        
+
+Creates a new query definition. Queries created with this endpoint belong to the authenticated user
+making the request.
+
+The `data_source_id` field specifies the ID of the SQL warehouse to run this query against. You can
+use the Data Sources API to see a complete list of available SQL warehouses. Or you can copy the
+`data_source_id` from an existing query.
+
+**Note**: You cannot add a visualization until you create the query.
+
+**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/create
+instead. [Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+:param data_source_id: str (optional)
+  Data source ID maps to the ID of the data source used by the resource and is distinct from the
+  warehouse ID. [Learn more]
+  
+  [Learn more]: https://docs.databricks.com/api/workspace/datasources/list
+:param description: str (optional)
+  General description that conveys additional information about this query such as usage notes.
+:param name: str (optional)
+  The title of this query that appears in list views, widget headings, and on the query page.
+:param options: Any (optional)
+  Exclusively used for storing a list parameter definitions. A parameter is an object with `title`,
+  `name`, `type`, and `value` properties. The `value` field here is the default value. It can be
+  overridden at runtime.
+:param parent: str (optional)
+  The identifier of the workspace folder containing the object.
+:param query: str (optional)
+  The text of the query to be run.
+:param run_as_role: :class:`RunAsRole` (optional)
+  Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
+  viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
+:param tags: List[str] (optional)
+
+:returns: :class:`LegacyQuery`
+
 
     .. py:method:: delete(query_id: str)
 
         Delete a query.
-        
-        Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and
-        they cannot be used for alerts. The trash is deleted after 30 days.
-        
-        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete
-        instead. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
-        :param query_id: str
-        
-        
-        
+
+Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and
+they cannot be used for alerts. The trash is deleted after 30 days.
+
+**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete
+instead. [Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+:param query_id: str
+
+
+
 
     .. py:method:: get(query_id: str) -> LegacyQuery
 
         Get a query definition.
-        
-        Retrieve a query object definition along with contextual permissions information about the currently
-        authenticated user.
-        
-        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get
-        instead. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
-        :param query_id: str
-        
-        :returns: :class:`LegacyQuery`
-        
+
+Retrieve a query object definition along with contextual permissions information about the currently
+authenticated user.
+
+**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get
+instead. [Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+:param query_id: str
+
+:returns: :class:`LegacyQuery`
+
 
     .. py:method:: list( [, order: Optional[str], page: Optional[int], page_size: Optional[int], q: Optional[str]]) -> Iterator[LegacyQuery]
 
         Get a list of queries.
-        
-        Gets a list of queries. Optionally, this list can be filtered by a search term.
-        
-        **Warning**: Calling this API concurrently 10 or more times could result in throttling, service
-        degradation, or a temporary ban.
-        
-        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/list
-        instead. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
-        :param order: str (optional)
-          Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order
-          descending instead.
-          
-          - `name`: The name of the query.
-          
-          - `created_at`: The timestamp the query was created.
-          
-          - `runtime`: The time it took to run this query. This is blank for parameterized queries. A blank
-          value is treated as the highest value for sorting.
-          
-          - `executed_at`: The timestamp when the query was last run.
-          
-          - `created_by`: The user name of the user that created the query.
-        :param page: int (optional)
-          Page number to retrieve.
-        :param page_size: int (optional)
-          Number of queries to return per page.
-        :param q: str (optional)
-          Full text search term
-        
-        :returns: Iterator over :class:`LegacyQuery`
-        
+
+Gets a list of queries. Optionally, this list can be filtered by a search term.
+
+**Warning**: Calling this API concurrently 10 or more times could result in throttling, service
+degradation, or a temporary ban.
+
+**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/list
+instead. [Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+:param order: str (optional)
+  Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order
+  descending instead.
+  
+  - `name`: The name of the query.
+  
+  - `created_at`: The timestamp the query was created.
+  
+  - `runtime`: The time it took to run this query. This is blank for parameterized queries. A blank
+  value is treated as the highest value for sorting.
+  
+  - `executed_at`: The timestamp when the query was last run.
+  
+  - `created_by`: The user name of the user that created the query.
+:param page: int (optional)
+  Page number to retrieve.
+:param page_size: int (optional)
+  Number of queries to return per page.
+:param q: str (optional)
+  Full text search term
+
+:returns: Iterator over :class:`LegacyQuery`
+
 
     .. py:method:: restore(query_id: str)
 
         Restore a query.
-        
-        Restore a query that has been moved to the trash. A restored query appears in list views and searches.
-        You can use restored queries for alerts.
-        
-        **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.
-        [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
-        :param query_id: str
-        
-        
-        
+
+Restore a query that has been moved to the trash. A restored query appears in list views and searches.
+You can use restored queries for alerts.
+
+**Note**: A new version of the Databricks SQL API is now available. Please see the latest version.
+[Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+:param query_id: str
+
+
+
 
     .. py:method:: update(query_id: str [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> LegacyQuery
 
         Change a query definition.
-        
-        Modify this query definition.
-        
-        **Note**: You cannot undo this operation.
-        
-        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/update
-        instead. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
-        :param query_id: str
-        :param data_source_id: str (optional)
-          Data source ID maps to the ID of the data source used by the resource and is distinct from the
-          warehouse ID. [Learn more]
-          
-          [Learn more]: https://docs.databricks.com/api/workspace/datasources/list
-        :param description: str (optional)
-          General description that conveys additional information about this query such as usage notes.
-        :param name: str (optional)
-          The title of this query that appears in list views, widget headings, and on the query page.
-        :param options: Any (optional)
-          Exclusively used for storing a list parameter definitions. A parameter is an object with `title`,
-          `name`, `type`, and `value` properties. The `value` field here is the default value. It can be
-          overridden at runtime.
-        :param query: str (optional)
-          The text of the query to be run.
-        :param run_as_role: :class:`RunAsRole` (optional)
-          Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
-          viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
-        :param tags: List[str] (optional)
-        
-        :returns: :class:`LegacyQuery`
-        
\ No newline at end of file
+
+Modify this query definition.
+
+**Note**: You cannot undo this operation.
+
+**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/update
+instead. [Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+:param query_id: str
+:param data_source_id: str (optional)
+  Data source ID maps to the ID of the data source used by the resource and is distinct from the
+  warehouse ID. [Learn more]
+  
+  [Learn more]: https://docs.databricks.com/api/workspace/datasources/list
+:param description: str (optional)
+  General description that conveys additional information about this query such as usage notes.
+:param name: str (optional)
+  The title of this query that appears in list views, widget headings, and on the query page.
+:param options: Any (optional)
+  Exclusively used for storing a list parameter definitions. A parameter is an object with `title`,
+  `name`, `type`, and `value` properties. The `value` field here is the default value. It can be
+  overridden at runtime.
+:param query: str (optional)
+  The text of the query to be run.
+:param run_as_role: :class:`RunAsRole` (optional)
+  Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
+  viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
+:param tags: List[str] (optional)
+
+:returns: :class:`LegacyQuery`
diff --git a/docs/workspace/sql/query_history.rst b/docs/workspace/sql/query_history.rst
index 2f5520cdf..30d1db6ce 100644
--- a/docs/workspace/sql/query_history.rst
+++ b/docs/workspace/sql/query_history.rst
@@ -5,7 +5,7 @@
 .. py:class:: QueryHistoryAPI
 
     A service responsible for storing and retrieving the list of queries run against SQL endpoints and
-    serverless compute.
+serverless compute.
 
     .. py:method:: list( [, filter_by: Optional[QueryFilter], include_metrics: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> ListQueriesResponse
 
@@ -23,24 +23,23 @@
                 query_start_time_range=sql.TimeRange(start_time_ms=1690243200000, end_time_ms=1690329600000)))
 
         List Queries.
-        
-        List the history of queries through SQL warehouses, and serverless compute.
-        
-        You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are
-        returned first (up to max_results in request). The pagination token returned in response can be used
-        to list subsequent query statuses.
-        
-        :param filter_by: :class:`QueryFilter` (optional)
-          A filter to limit query history results. This field is optional.
-        :param include_metrics: bool (optional)
-          Whether to include the query metrics with each query. Only use this for a small subset of queries
-          (max_results). Defaults to false.
-        :param max_results: int (optional)
-          Limit the number of results returned in one page. Must be less than 1000 and the default is 100.
-        :param page_token: str (optional)
-          A token that can be used to get the next page of results. The token can contains characters that
-          need to be encoded before using it in a URL. For example, the character '+' needs to be replaced by
-          %2B. This field is optional.
-        
-        :returns: :class:`ListQueriesResponse`
-        
\ No newline at end of file
+
+List the history of queries through SQL warehouses, and serverless compute.
+
+You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are
+returned first (up to max_results in request). The pagination token returned in response can be used
+to list subsequent query statuses.
+
+:param filter_by: :class:`QueryFilter` (optional)
+  A filter to limit query history results. This field is optional.
+:param include_metrics: bool (optional)
+  Whether to include the query metrics with each query. Only use this for a small subset of queries
+  (max_results). Defaults to false.
+:param max_results: int (optional)
+  Limit the number of results returned in one page. Must be less than 1000 and the default is 100.
+:param page_token: str (optional)
+  A token that can be used to get the next page of results. The token can contains characters that
+  need to be encoded before using it in a URL. For example, the character '+' needs to be replaced by
+  %2B. This field is optional.
+
+:returns: :class:`ListQueriesResponse`
diff --git a/docs/workspace/sql/query_visualizations.rst b/docs/workspace/sql/query_visualizations.rst
index ac3d6c565..6011fbeb0 100644
--- a/docs/workspace/sql/query_visualizations.rst
+++ b/docs/workspace/sql/query_visualizations.rst
@@ -5,48 +5,47 @@
 .. py:class:: QueryVisualizationsAPI
 
     This is an evolving API that facilitates the addition and removal of visualizations from existing queries
-    in the Databricks Workspace. Data structures can change over time.
+in the Databricks Workspace. Data structures can change over time.
 
     .. py:method:: create( [, visualization: Optional[CreateVisualizationRequestVisualization]]) -> Visualization
 
         Add a visualization to a query.
-        
-        Adds a visualization to a query.
-        
-        :param visualization: :class:`CreateVisualizationRequestVisualization` (optional)
-        
-        :returns: :class:`Visualization`
-        
+
+Adds a visualization to a query.
+
+:param visualization: :class:`CreateVisualizationRequestVisualization` (optional)
+
+:returns: :class:`Visualization`
+
 
     .. py:method:: delete(id: str)
 
         Remove a visualization.
-        
-        Removes a visualization.
-        
-        :param id: str
-        
-        
-        
+
+Removes a visualization.
+
+:param id: str
+
+
+
 
     .. py:method:: update(id: str, update_mask: str [, visualization: Optional[UpdateVisualizationRequestVisualization]]) -> Visualization
 
         Update a visualization.
-        
-        Updates a visualization.
-        
-        :param id: str
-        :param update_mask: str
-          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-          the entire collection field can be specified. Field names must exactly match the resource field
-          names.
-          
-          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-          changes in the future.
-        :param visualization: :class:`UpdateVisualizationRequestVisualization` (optional)
-        
-        :returns: :class:`Visualization`
-        
\ No newline at end of file
+
+Updates a visualization.
+
+:param id: str
+:param update_mask: str
+  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+  the entire collection field can be specified. Field names must exactly match the resource field
+  names.
+  
+  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+  changes in the future.
+:param visualization: :class:`UpdateVisualizationRequestVisualization` (optional)
+
+:returns: :class:`Visualization`
diff --git a/docs/workspace/sql/query_visualizations_legacy.rst b/docs/workspace/sql/query_visualizations_legacy.rst
index f56f78a5f..aca56b516 100644
--- a/docs/workspace/sql/query_visualizations_legacy.rst
+++ b/docs/workspace/sql/query_visualizations_legacy.rst
@@ -5,81 +5,80 @@
 .. py:class:: QueryVisualizationsLegacyAPI
 
     This is an evolving API that facilitates the addition and removal of vizualisations from existing queries
-    within the Databricks Workspace. Data structures may change over time.
-    
-    **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
-    more]
-    
-    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+within the Databricks Workspace. Data structures may change over time.
+
+**Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
+more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
 
     .. py:method:: create(query_id: str, type: str, options: Any [, description: Optional[str], name: Optional[str]]) -> LegacyVisualization
 
         Add visualization to a query.
-        
-        Creates visualization in the query.
-        
-        **Note**: A new version of the Databricks SQL API is now available. Please use
-        :method:queryvisualizations/create instead. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
-        :param query_id: str
-          The identifier returned by :method:queries/create
-        :param type: str
-          The type of visualization: chart, table, pivot table, and so on.
-        :param options: Any
-          The options object varies widely from one visualization type to the next and is unsupported.
-          Databricks does not recommend modifying visualization settings in JSON.
-        :param description: str (optional)
-          A short description of this visualization. This is not displayed in the UI.
-        :param name: str (optional)
-          The name of the visualization that appears on dashboards and the query screen.
-        
-        :returns: :class:`LegacyVisualization`
-        
+
+Creates visualization in the query.
+
+**Note**: A new version of the Databricks SQL API is now available. Please use
+:method:queryvisualizations/create instead. [Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+:param query_id: str
+  The identifier returned by :method:queries/create
+:param type: str
+  The type of visualization: chart, table, pivot table, and so on.
+:param options: Any
+  The options object varies widely from one visualization type to the next and is unsupported.
+  Databricks does not recommend modifying visualization settings in JSON.
+:param description: str (optional)
+  A short description of this visualization. This is not displayed in the UI.
+:param name: str (optional)
+  The name of the visualization that appears on dashboards and the query screen.
+
+:returns: :class:`LegacyVisualization`
+
 
     .. py:method:: delete(id: str)
 
         Remove visualization.
-        
-        Removes a visualization from the query.
-        
-        **Note**: A new version of the Databricks SQL API is now available. Please use
-        :method:queryvisualizations/delete instead. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
-        :param id: str
-          Widget ID returned by :method:queryvizualisations/create
-        
-        
-        
+
+Removes a visualization from the query.
+
+**Note**: A new version of the Databricks SQL API is now available. Please use
+:method:queryvisualizations/delete instead. [Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+:param id: str
+  Widget ID returned by :method:queryvizualisations/create
+
+
+
 
     .. py:method:: update(id: str [, created_at: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[LegacyQuery], type: Optional[str], updated_at: Optional[str]]) -> LegacyVisualization
 
         Edit existing visualization.
-        
-        Updates visualization in the query.
-        
-        **Note**: A new version of the Databricks SQL API is now available. Please use
-        :method:queryvisualizations/update instead. [Learn more]
-        
-        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-        
-        :param id: str
-          The UUID for this visualization.
-        :param created_at: str (optional)
-        :param description: str (optional)
-          A short description of this visualization. This is not displayed in the UI.
-        :param name: str (optional)
-          The name of the visualization that appears on dashboards and the query screen.
-        :param options: Any (optional)
-          The options object varies widely from one visualization type to the next and is unsupported.
-          Databricks does not recommend modifying visualization settings in JSON.
-        :param query: :class:`LegacyQuery` (optional)
-        :param type: str (optional)
-          The type of visualization: chart, table, pivot table, and so on.
-        :param updated_at: str (optional)
-        
-        :returns: :class:`LegacyVisualization`
-        
\ No newline at end of file
+
+Updates visualization in the query.
+
+**Note**: A new version of the Databricks SQL API is now available. Please use
+:method:queryvisualizations/update instead. [Learn more]
+
+[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+
+:param id: str
+  The UUID for this visualization.
+:param created_at: str (optional)
+:param description: str (optional)
+  A short description of this visualization. This is not displayed in the UI.
+:param name: str (optional)
+  The name of the visualization that appears on dashboards and the query screen.
+:param options: Any (optional)
+  The options object varies widely from one visualization type to the next and is unsupported.
+  Databricks does not recommend modifying visualization settings in JSON.
+:param query: :class:`LegacyQuery` (optional)
+:param type: str (optional)
+  The type of visualization: chart, table, pivot table, and so on.
+:param updated_at: str (optional)
+
+:returns: :class:`LegacyVisualization`
diff --git a/docs/workspace/sql/redash_config.rst b/docs/workspace/sql/redash_config.rst
index 9b4382dd5..2ab3de7ea 100644
--- a/docs/workspace/sql/redash_config.rst
+++ b/docs/workspace/sql/redash_config.rst
@@ -9,6 +9,5 @@
     .. py:method:: get_config() -> ClientConfig
 
         Read workspace configuration for Redash-v2.
-        
-        :returns: :class:`ClientConfig`
-        
\ No newline at end of file
+
+:returns: :class:`ClientConfig`
diff --git a/docs/workspace/sql/statement_execution.rst b/docs/workspace/sql/statement_execution.rst
index 44f64b512..1fd0d3407 100644
--- a/docs/workspace/sql/statement_execution.rst
+++ b/docs/workspace/sql/statement_execution.rst
@@ -5,248 +5,247 @@
 .. py:class:: StatementExecutionAPI
 
     The Databricks SQL Statement Execution API can be used to execute SQL statements on a SQL warehouse and
-    fetch the result.
-    
-    **Getting started**
-    
-    We suggest beginning with the [Databricks SQL Statement Execution API tutorial].
-    
-    **Overview of statement execution and result fetching**
-    
-    Statement execution begins by issuing a :method:statementexecution/executeStatement request with a valid
-    SQL statement and warehouse ID, along with optional parameters such as the data catalog and output format.
-    If no other parameters are specified, the server will wait for up to 10s before returning a response. If
-    the statement has completed within this timespan, the response will include the result data as a JSON
-    array and metadata. Otherwise, if no result is available after the 10s timeout expired, the response will
-    provide the statement ID that can be used to poll for results by using a
-    :method:statementexecution/getStatement request.
-    
-    You can specify whether the call should behave synchronously, asynchronously or start synchronously with a
-    fallback to asynchronous execution. This is controlled with the `wait_timeout` and `on_wait_timeout`
-    settings. If `wait_timeout` is set between 5-50 seconds (default: 10s), the call waits for results up to
-    the specified timeout; when set to `0s`, the call is asynchronous and responds immediately with a
-    statement ID. The `on_wait_timeout` setting specifies what should happen when the timeout is reached while
-    the statement execution has not yet finished. This can be set to either `CONTINUE`, to fallback to
-    asynchronous mode, or it can be set to `CANCEL`, which cancels the statement.
-    
-    In summary: - Synchronous mode - `wait_timeout=30s` and `on_wait_timeout=CANCEL` - The call waits up to 30
-    seconds; if the statement execution finishes within this time, the result data is returned directly in the
-    response. If the execution takes longer than 30 seconds, the execution is canceled and the call returns
-    with a `CANCELED` state. - Asynchronous mode - `wait_timeout=0s` (`on_wait_timeout` is ignored) - The call
-    doesn't wait for the statement to finish but returns directly with a statement ID. The status of the
-    statement execution can be polled by issuing :method:statementexecution/getStatement with the statement
-    ID. Once the execution has succeeded, this call also returns the result and metadata in the response. -
-    Hybrid mode (default) - `wait_timeout=10s` and `on_wait_timeout=CONTINUE` - The call waits for up to 10
-    seconds; if the statement execution finishes within this time, the result data is returned directly in the
-    response. If the execution takes longer than 10 seconds, a statement ID is returned. The statement ID can
-    be used to fetch status and results in the same way as in the asynchronous mode.
-    
-    Depending on the size, the result can be split into multiple chunks. If the statement execution is
-    successful, the statement response contains a manifest and the first chunk of the result. The manifest
-    contains schema information and provides metadata for each chunk in the result. Result chunks can be
-    retrieved by index with :method:statementexecution/getStatementResultChunkN which may be called in any
-    order and in parallel. For sequential fetching, each chunk, apart from the last, also contains a
-    `next_chunk_index` and `next_chunk_internal_link` that point to the next chunk.
-    
-    A statement can be canceled with :method:statementexecution/cancelExecution.
-    
-    **Fetching result data: format and disposition**
-    
-    To specify the format of the result data, use the `format` field, which can be set to one of the following
-    options: `JSON_ARRAY` (JSON), `ARROW_STREAM` ([Apache Arrow Columnar]), or `CSV`.
-    
-    There are two ways to receive statement results, controlled by the `disposition` setting, which can be
-    either `INLINE` or `EXTERNAL_LINKS`:
-    
-    - `INLINE`: In this mode, the result data is directly included in the response. It's best suited for
-    smaller results. This mode can only be used with the `JSON_ARRAY` format.
-    
-    - `EXTERNAL_LINKS`: In this mode, the response provides links that can be used to download the result data
-    in chunks separately. This approach is ideal for larger results and offers higher throughput. This mode
-    can be used with all the formats: `JSON_ARRAY`, `ARROW_STREAM`, and `CSV`.
-    
-    By default, the API uses `format=JSON_ARRAY` and `disposition=INLINE`.
-    
-    **Limits and limitations**
-    
-    Note: The byte limit for INLINE disposition is based on internal storage metrics and will not exactly
-    match the byte count of the actual payload.
-    
-    - Statements with `disposition=INLINE` are limited to 25 MiB and will fail when this limit is exceeded. -
-    Statements with `disposition=EXTERNAL_LINKS` are limited to 100 GiB. Result sets larger than this limit
-    will be truncated. Truncation is indicated by the `truncated` field in the result manifest. - The maximum
-    query text size is 16 MiB. - Cancelation might silently fail. A successful response from a cancel request
-    indicates that the cancel request was successfully received and sent to the processing engine. However, an
-    outstanding statement might have already completed execution when the cancel request arrives. Polling for
-    status until a terminal state is reached is a reliable way to determine the final state. - Wait timeouts
-    are approximate, occur server-side, and cannot account for things such as caller delays and network
-    latency from caller to service. - To guarantee that the statement is kept alive, you must poll at least
-    once every 15 minutes. - The results are only available for one hour after success; polling does not
-    extend this. - The SQL Execution API must be used for the entire lifecycle of the statement. For example,
-    you cannot use the Jobs API to execute the command, and then the SQL Execution API to cancel it.
-    
-    [Apache Arrow Columnar]: https://arrow.apache.org/overview/
-    [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html
+fetch the result.
+
+**Getting started**
+
+We suggest beginning with the [Databricks SQL Statement Execution API tutorial].
+
+**Overview of statement execution and result fetching**
+
+Statement execution begins by issuing a :method:statementexecution/executeStatement request with a valid
+SQL statement and warehouse ID, along with optional parameters such as the data catalog and output format.
+If no other parameters are specified, the server will wait for up to 10s before returning a response. If
+the statement has completed within this timespan, the response will include the result data as a JSON
+array and metadata. Otherwise, if no result is available after the 10s timeout expired, the response will
+provide the statement ID that can be used to poll for results by using a
+:method:statementexecution/getStatement request.
+
+You can specify whether the call should behave synchronously, asynchronously or start synchronously with a
+fallback to asynchronous execution. This is controlled with the `wait_timeout` and `on_wait_timeout`
+settings. If `wait_timeout` is set between 5-50 seconds (default: 10s), the call waits for results up to
+the specified timeout; when set to `0s`, the call is asynchronous and responds immediately with a
+statement ID. The `on_wait_timeout` setting specifies what should happen when the timeout is reached while
+the statement execution has not yet finished. This can be set to either `CONTINUE`, to fallback to
+asynchronous mode, or it can be set to `CANCEL`, which cancels the statement.
+
+In summary: - Synchronous mode - `wait_timeout=30s` and `on_wait_timeout=CANCEL` - The call waits up to 30
+seconds; if the statement execution finishes within this time, the result data is returned directly in the
+response. If the execution takes longer than 30 seconds, the execution is canceled and the call returns
+with a `CANCELED` state. - Asynchronous mode - `wait_timeout=0s` (`on_wait_timeout` is ignored) - The call
+doesn't wait for the statement to finish but returns directly with a statement ID. The status of the
+statement execution can be polled by issuing :method:statementexecution/getStatement with the statement
+ID. Once the execution has succeeded, this call also returns the result and metadata in the response. -
+Hybrid mode (default) - `wait_timeout=10s` and `on_wait_timeout=CONTINUE` - The call waits for up to 10
+seconds; if the statement execution finishes within this time, the result data is returned directly in the
+response. If the execution takes longer than 10 seconds, a statement ID is returned. The statement ID can
+be used to fetch status and results in the same way as in the asynchronous mode.
+
+Depending on the size, the result can be split into multiple chunks. If the statement execution is
+successful, the statement response contains a manifest and the first chunk of the result. The manifest
+contains schema information and provides metadata for each chunk in the result. Result chunks can be
+retrieved by index with :method:statementexecution/getStatementResultChunkN which may be called in any
+order and in parallel. For sequential fetching, each chunk, apart from the last, also contains a
+`next_chunk_index` and `next_chunk_internal_link` that point to the next chunk.
+
+A statement can be canceled with :method:statementexecution/cancelExecution.
+
+**Fetching result data: format and disposition**
+
+To specify the format of the result data, use the `format` field, which can be set to one of the following
+options: `JSON_ARRAY` (JSON), `ARROW_STREAM` ([Apache Arrow Columnar]), or `CSV`.
+
+There are two ways to receive statement results, controlled by the `disposition` setting, which can be
+either `INLINE` or `EXTERNAL_LINKS`:
+
+- `INLINE`: In this mode, the result data is directly included in the response. It's best suited for
+smaller results. This mode can only be used with the `JSON_ARRAY` format.
+
+- `EXTERNAL_LINKS`: In this mode, the response provides links that can be used to download the result data
+in chunks separately. This approach is ideal for larger results and offers higher throughput. This mode
+can be used with all the formats: `JSON_ARRAY`, `ARROW_STREAM`, and `CSV`.
+
+By default, the API uses `format=JSON_ARRAY` and `disposition=INLINE`.
+
+**Limits and limitations**
+
+Note: The byte limit for INLINE disposition is based on internal storage metrics and will not exactly
+match the byte count of the actual payload.
+
+- Statements with `disposition=INLINE` are limited to 25 MiB and will fail when this limit is exceeded. -
+Statements with `disposition=EXTERNAL_LINKS` are limited to 100 GiB. Result sets larger than this limit
+will be truncated. Truncation is indicated by the `truncated` field in the result manifest. - The maximum
+query text size is 16 MiB. - Cancelation might silently fail. A successful response from a cancel request
+indicates that the cancel request was successfully received and sent to the processing engine. However, an
+outstanding statement might have already completed execution when the cancel request arrives. Polling for
+status until a terminal state is reached is a reliable way to determine the final state. - Wait timeouts
+are approximate, occur server-side, and cannot account for things such as caller delays and network
+latency from caller to service. - To guarantee that the statement is kept alive, you must poll at least
+once every 15 minutes. - The results are only available for one hour after success; polling does not
+extend this. - The SQL Execution API must be used for the entire lifecycle of the statement. For example,
+you cannot use the Jobs API to execute the command, and then the SQL Execution API to cancel it.
+
+[Apache Arrow Columnar]: https://arrow.apache.org/overview/
+[Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html
 
     .. py:method:: cancel_execution(statement_id: str)
 
         Cancel statement execution.
-        
-        Requests that an executing statement be canceled. Callers must poll for status to see the terminal
-        state.
-        
-        :param statement_id: str
-          The statement ID is returned upon successfully submitting a SQL statement, and is a required
-          reference for all subsequent calls.
-        
-        
-        
+
+Requests that an executing statement be canceled. Callers must poll for status to see the terminal
+state.
+
+:param statement_id: str
+  The statement ID is returned upon successfully submitting a SQL statement, and is a required
+  reference for all subsequent calls.
+
+
+
 
     .. py:method:: execute_statement(statement: str, warehouse_id: str [, byte_limit: Optional[int], catalog: Optional[str], disposition: Optional[Disposition], format: Optional[Format], on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout], parameters: Optional[List[StatementParameterListItem]], row_limit: Optional[int], schema: Optional[str], wait_timeout: Optional[str]]) -> StatementResponse
 
         Execute a SQL statement.
-        
-        :param statement: str
-          The SQL statement to execute. The statement can optionally be parameterized, see `parameters`.
-        :param warehouse_id: str
-          Warehouse upon which to execute a statement. See also [What are SQL warehouses?]
-          
-          [What are SQL warehouses?]: https://docs.databricks.com/sql/admin/warehouse-type.html
-        :param byte_limit: int (optional)
-          Applies the given byte limit to the statement's result size. Byte counts are based on internal data
-          representations and might not match the final size in the requested `format`. If the result was
-          truncated due to the byte limit, then `truncated` in the response is set to `true`. When using
-          `EXTERNAL_LINKS` disposition, a default `byte_limit` of 100 GiB is applied if `byte_limit` is not
-          explcitly set.
-        :param catalog: str (optional)
-          Sets default catalog for statement execution, similar to [`USE CATALOG`] in SQL.
-          
-          [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html
-        :param disposition: :class:`Disposition` (optional)
-        :param format: :class:`Format` (optional)
-          Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and
-          `CSV`.
-          
-          Important: The formats `ARROW_STREAM` and `CSV` are supported only with `EXTERNAL_LINKS`
-          disposition. `JSON_ARRAY` is supported in `INLINE` and `EXTERNAL_LINKS` disposition.
-          
-          When specifying `format=JSON_ARRAY`, result data will be formatted as an array of arrays of values,
-          where each value is either the *string representation* of a value, or `null`. For example, the
-          output of `SELECT concat('id-', id) AS strCol, id AS intCol, null AS nullCol FROM range(3)` would
-          look like this:
-          
-          ``` [ [ "id-1", "1", null ], [ "id-2", "2", null ], [ "id-3", "3", null ], ] ```
-          
-          When specifying `format=JSON_ARRAY` and `disposition=EXTERNAL_LINKS`, each chunk in the result
-          contains compact JSON with no indentation or extra whitespace.
-          
-          When specifying `format=ARROW_STREAM` and `disposition=EXTERNAL_LINKS`, each chunk in the result
-          will be formatted as Apache Arrow Stream. See the [Apache Arrow streaming format].
-          
-          When specifying `format=CSV` and `disposition=EXTERNAL_LINKS`, each chunk in the result will be a
-          CSV according to [RFC 4180] standard. All the columns values will have *string representation*
-          similar to the `JSON_ARRAY` format, and `null` values will be encoded as “null”. Only the first
-          chunk in the result would contain a header row with column names. For example, the output of `SELECT
-          concat('id-', id) AS strCol, id AS intCol, null as nullCol FROM range(3)` would look like this:
-          
-          ``` strCol,intCol,nullCol id-1,1,null id-2,2,null id-3,3,null ```
-          
-          [Apache Arrow streaming format]: https://arrow.apache.org/docs/format/Columnar.html#ipc-streaming-format
-          [RFC 4180]: https://www.rfc-editor.org/rfc/rfc4180
-        :param on_wait_timeout: :class:`ExecuteStatementRequestOnWaitTimeout` (optional)
-          When `wait_timeout > 0s`, the call will block up to the specified time. If the statement execution
-          doesn't finish within this time, `on_wait_timeout` determines whether the execution should continue
-          or be canceled. When set to `CONTINUE`, the statement execution continues asynchronously and the
-          call returns a statement ID which can be used for polling with
-          :method:statementexecution/getStatement. When set to `CANCEL`, the statement execution is canceled
-          and the call returns with a `CANCELED` state.
-        :param parameters: List[:class:`StatementParameterListItem`] (optional)
-          A list of parameters to pass into a SQL statement containing parameter markers. A parameter consists
-          of a name, a value, and optionally a type. To represent a NULL value, the `value` field may be
-          omitted or set to `null` explicitly. If the `type` field is omitted, the value is interpreted as a
-          string.
-          
-          If the type is given, parameters will be checked for type correctness according to the given type. A
-          value is correct if the provided string can be converted to the requested type using the `cast`
-          function. The exact semantics are described in the section [`cast` function] of the SQL language
-          reference.
-          
-          For example, the following statement contains two parameters, `my_name` and `my_date`:
-          
-          SELECT * FROM my_table WHERE name = :my_name AND date = :my_date
-          
-          The parameters can be passed in the request body as follows:
-          
-          { ..., "statement": "SELECT * FROM my_table WHERE name = :my_name AND date = :my_date",
-          "parameters": [ { "name": "my_name", "value": "the name" }, { "name": "my_date", "value":
-          "2020-01-01", "type": "DATE" } ] }
-          
-          Currently, positional parameters denoted by a `?` marker are not supported by the Databricks SQL
-          Statement Execution API.
-          
-          Also see the section [Parameter markers] of the SQL language reference.
-          
-          [Parameter markers]: https://docs.databricks.com/sql/language-manual/sql-ref-parameter-marker.html
-          [`cast` function]: https://docs.databricks.com/sql/language-manual/functions/cast.html
-        :param row_limit: int (optional)
-          Applies the given row limit to the statement's result set, but unlike the `LIMIT` clause in SQL, it
-          also sets the `truncated` field in the response to indicate whether the result was trimmed due to
-          the limit or not.
-        :param schema: str (optional)
-          Sets default schema for statement execution, similar to [`USE SCHEMA`] in SQL.
-          
-          [`USE SCHEMA`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-schema.html
-        :param wait_timeout: str (optional)
-          The time in seconds the call will wait for the statement's result set as `Ns`, where `N` can be set
-          to 0 or to a value between 5 and 50.
-          
-          When set to `0s`, the statement will execute in asynchronous mode and the call will not wait for the
-          execution to finish. In this case, the call returns directly with `PENDING` state and a statement ID
-          which can be used for polling with :method:statementexecution/getStatement.
-          
-          When set between 5 and 50 seconds, the call will behave synchronously up to this timeout and wait
-          for the statement execution to finish. If the execution finishes within this time, the call returns
-          immediately with a manifest and result data (or a `FAILED` state in case of an execution error). If
-          the statement takes longer to execute, `on_wait_timeout` determines what should happen after the
-          timeout is reached.
-        
-        :returns: :class:`StatementResponse`
-        
+
+:param statement: str
+  The SQL statement to execute. The statement can optionally be parameterized, see `parameters`.
+:param warehouse_id: str
+  Warehouse upon which to execute a statement. See also [What are SQL warehouses?]
+  
+  [What are SQL warehouses?]: https://docs.databricks.com/sql/admin/warehouse-type.html
+:param byte_limit: int (optional)
+  Applies the given byte limit to the statement's result size. Byte counts are based on internal data
+  representations and might not match the final size in the requested `format`. If the result was
+  truncated due to the byte limit, then `truncated` in the response is set to `true`. When using
+  `EXTERNAL_LINKS` disposition, a default `byte_limit` of 100 GiB is applied if `byte_limit` is not
+  explcitly set.
+:param catalog: str (optional)
+  Sets default catalog for statement execution, similar to [`USE CATALOG`] in SQL.
+  
+  [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html
+:param disposition: :class:`Disposition` (optional)
+:param format: :class:`Format` (optional)
+  Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and
+  `CSV`.
+  
+  Important: The formats `ARROW_STREAM` and `CSV` are supported only with `EXTERNAL_LINKS`
+  disposition. `JSON_ARRAY` is supported in `INLINE` and `EXTERNAL_LINKS` disposition.
+  
+  When specifying `format=JSON_ARRAY`, result data will be formatted as an array of arrays of values,
+  where each value is either the *string representation* of a value, or `null`. For example, the
+  output of `SELECT concat('id-', id) AS strCol, id AS intCol, null AS nullCol FROM range(3)` would
+  look like this:
+  
+  ``` [ [ "id-1", "1", null ], [ "id-2", "2", null ], [ "id-3", "3", null ], ] ```
+  
+  When specifying `format=JSON_ARRAY` and `disposition=EXTERNAL_LINKS`, each chunk in the result
+  contains compact JSON with no indentation or extra whitespace.
+  
+  When specifying `format=ARROW_STREAM` and `disposition=EXTERNAL_LINKS`, each chunk in the result
+  will be formatted as Apache Arrow Stream. See the [Apache Arrow streaming format].
+  
+  When specifying `format=CSV` and `disposition=EXTERNAL_LINKS`, each chunk in the result will be a
+  CSV according to [RFC 4180] standard. All the columns values will have *string representation*
+  similar to the `JSON_ARRAY` format, and `null` values will be encoded as “null”. Only the first
+  chunk in the result would contain a header row with column names. For example, the output of `SELECT
+  concat('id-', id) AS strCol, id AS intCol, null as nullCol FROM range(3)` would look like this:
+  
+  ``` strCol,intCol,nullCol id-1,1,null id-2,2,null id-3,3,null ```
+  
+  [Apache Arrow streaming format]: https://arrow.apache.org/docs/format/Columnar.html#ipc-streaming-format
+  [RFC 4180]: https://www.rfc-editor.org/rfc/rfc4180
+:param on_wait_timeout: :class:`ExecuteStatementRequestOnWaitTimeout` (optional)
+  When `wait_timeout > 0s`, the call will block up to the specified time. If the statement execution
+  doesn't finish within this time, `on_wait_timeout` determines whether the execution should continue
+  or be canceled. When set to `CONTINUE`, the statement execution continues asynchronously and the
+  call returns a statement ID which can be used for polling with
+  :method:statementexecution/getStatement. When set to `CANCEL`, the statement execution is canceled
+  and the call returns with a `CANCELED` state.
+:param parameters: List[:class:`StatementParameterListItem`] (optional)
+  A list of parameters to pass into a SQL statement containing parameter markers. A parameter consists
+  of a name, a value, and optionally a type. To represent a NULL value, the `value` field may be
+  omitted or set to `null` explicitly. If the `type` field is omitted, the value is interpreted as a
+  string.
+  
+  If the type is given, parameters will be checked for type correctness according to the given type. A
+  value is correct if the provided string can be converted to the requested type using the `cast`
+  function. The exact semantics are described in the section [`cast` function] of the SQL language
+  reference.
+  
+  For example, the following statement contains two parameters, `my_name` and `my_date`:
+  
+  SELECT * FROM my_table WHERE name = :my_name AND date = :my_date
+  
+  The parameters can be passed in the request body as follows:
+  
+  { ..., "statement": "SELECT * FROM my_table WHERE name = :my_name AND date = :my_date",
+  "parameters": [ { "name": "my_name", "value": "the name" }, { "name": "my_date", "value":
+  "2020-01-01", "type": "DATE" } ] }
+  
+  Currently, positional parameters denoted by a `?` marker are not supported by the Databricks SQL
+  Statement Execution API.
+  
+  Also see the section [Parameter markers] of the SQL language reference.
+  
+  [Parameter markers]: https://docs.databricks.com/sql/language-manual/sql-ref-parameter-marker.html
+  [`cast` function]: https://docs.databricks.com/sql/language-manual/functions/cast.html
+:param row_limit: int (optional)
+  Applies the given row limit to the statement's result set, but unlike the `LIMIT` clause in SQL, it
+  also sets the `truncated` field in the response to indicate whether the result was trimmed due to
+  the limit or not.
+:param schema: str (optional)
+  Sets default schema for statement execution, similar to [`USE SCHEMA`] in SQL.
+  
+  [`USE SCHEMA`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-schema.html
+:param wait_timeout: str (optional)
+  The time in seconds the call will wait for the statement's result set as `Ns`, where `N` can be set
+  to 0 or to a value between 5 and 50.
+  
+  When set to `0s`, the statement will execute in asynchronous mode and the call will not wait for the
+  execution to finish. In this case, the call returns directly with `PENDING` state and a statement ID
+  which can be used for polling with :method:statementexecution/getStatement.
+  
+  When set between 5 and 50 seconds, the call will behave synchronously up to this timeout and wait
+  for the statement execution to finish. If the execution finishes within this time, the call returns
+  immediately with a manifest and result data (or a `FAILED` state in case of an execution error). If
+  the statement takes longer to execute, `on_wait_timeout` determines what should happen after the
+  timeout is reached.
+
+:returns: :class:`StatementResponse`
+
 
     .. py:method:: get_statement(statement_id: str) -> StatementResponse
 
         Get status, manifest, and result first chunk.
-        
-        This request can be used to poll for the statement's status. When the `status.state` field is
-        `SUCCEEDED` it will also return the result manifest and the first chunk of the result data. When the
-        statement is in the terminal states `CANCELED`, `CLOSED` or `FAILED`, it returns HTTP 200 with the
-        state set. After at least 12 hours in terminal state, the statement is removed from the warehouse and
-        further calls will receive an HTTP 404 response.
-        
-        **NOTE** This call currently might take up to 5 seconds to get the latest status and result.
-        
-        :param statement_id: str
-          The statement ID is returned upon successfully submitting a SQL statement, and is a required
-          reference for all subsequent calls.
-        
-        :returns: :class:`StatementResponse`
-        
+
+This request can be used to poll for the statement's status. When the `status.state` field is
+`SUCCEEDED` it will also return the result manifest and the first chunk of the result data. When the
+statement is in the terminal states `CANCELED`, `CLOSED` or `FAILED`, it returns HTTP 200 with the
+state set. After at least 12 hours in terminal state, the statement is removed from the warehouse and
+further calls will receive an HTTP 404 response.
+
+**NOTE** This call currently might take up to 5 seconds to get the latest status and result.
+
+:param statement_id: str
+  The statement ID is returned upon successfully submitting a SQL statement, and is a required
+  reference for all subsequent calls.
+
+:returns: :class:`StatementResponse`
+
 
     .. py:method:: get_statement_result_chunk_n(statement_id: str, chunk_index: int) -> ResultData
 
         Get result chunk by index.
-        
-        After the statement execution has `SUCCEEDED`, this request can be used to fetch any chunk by index.
-        Whereas the first chunk with `chunk_index=0` is typically fetched with
-        :method:statementexecution/executeStatement or :method:statementexecution/getStatement, this request
-        can be used to fetch subsequent chunks. The response structure is identical to the nested `result`
-        element described in the :method:statementexecution/getStatement request, and similarly includes the
-        `next_chunk_index` and `next_chunk_internal_link` fields for simple iteration through the result set.
-        
-        :param statement_id: str
-          The statement ID is returned upon successfully submitting a SQL statement, and is a required
-          reference for all subsequent calls.
-        :param chunk_index: int
-        
-        :returns: :class:`ResultData`
-        
\ No newline at end of file
+
+After the statement execution has `SUCCEEDED`, this request can be used to fetch any chunk by index.
+Whereas the first chunk with `chunk_index=0` is typically fetched with
+:method:statementexecution/executeStatement or :method:statementexecution/getStatement, this request
+can be used to fetch subsequent chunks. The response structure is identical to the nested `result`
+element described in the :method:statementexecution/getStatement request, and similarly includes the
+`next_chunk_index` and `next_chunk_internal_link` fields for simple iteration through the result set.
+
+:param statement_id: str
+  The statement ID is returned upon successfully submitting a SQL statement, and is a required
+  reference for all subsequent calls.
+:param chunk_index: int
+
+:returns: :class:`ResultData`
diff --git a/docs/workspace/sql/warehouses.rst b/docs/workspace/sql/warehouses.rst
index fd55d5b0c..e5afd9419 100644
--- a/docs/workspace/sql/warehouses.rst
+++ b/docs/workspace/sql/warehouses.rst
@@ -5,7 +5,7 @@
 .. py:class:: WarehousesAPI
 
     A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks
-    SQL. Compute resources are infrastructure resources that provide processing capabilities in the cloud.
+SQL. Compute resources are infrastructure resources that provide processing capabilities in the cloud.
 
     .. py:method:: create( [, auto_stop_mins: Optional[int], channel: Optional[Channel], cluster_size: Optional[str], creator_name: Optional[str], enable_photon: Optional[bool], enable_serverless_compute: Optional[bool], instance_profile_arn: Optional[str], max_num_clusters: Optional[int], min_num_clusters: Optional[int], name: Optional[str], spot_instance_policy: Optional[SpotInstancePolicy], tags: Optional[EndpointTags], warehouse_type: Optional[CreateWarehouseRequestWarehouseType]]) -> Wait[GetWarehouseResponse]
 
@@ -34,69 +34,69 @@
             w.warehouses.delete(id=created.id)
 
         Create a warehouse.
-        
-        Creates a new SQL warehouse.
-        
-        :param auto_stop_mins: int (optional)
-          The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it
-          is automatically stopped.
-          
-          Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins for
-          non-serverless warehouses - 0 indicates no autostop.
-          
-          Defaults to 120 mins
-        :param channel: :class:`Channel` (optional)
-          Channel Details
-        :param cluster_size: str (optional)
-          Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you
-          to run larger queries on it. If you want to increase the number of concurrent queries, please tune
-          max_num_clusters.
-          
-          Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large -
-          4X-Large
-        :param creator_name: str (optional)
-          warehouse creator name
-        :param enable_photon: bool (optional)
-          Configures whether the warehouse should use Photon optimized clusters.
-          
-          Defaults to false.
-        :param enable_serverless_compute: bool (optional)
-          Configures whether the warehouse should use serverless compute
-        :param instance_profile_arn: str (optional)
-          Deprecated. Instance profile used to pass IAM role to the cluster
-        :param max_num_clusters: int (optional)
-          Maximum number of clusters that the autoscaler will create to handle concurrent queries.
-          
-          Supported values: - Must be >= min_num_clusters - Must be <= 30.
-          
-          Defaults to min_clusters if unset.
-        :param min_num_clusters: int (optional)
-          Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this
-          will ensure that a larger number of clusters are always running and therefore may reduce the cold
-          start time for new queries. This is similar to reserved vs. revocable cores in a resource manager.
-          
-          Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30)
-          
-          Defaults to 1
-        :param name: str (optional)
-          Logical name for the cluster.
-          
-          Supported values: - Must be unique within an org. - Must be less than 100 characters.
-        :param spot_instance_policy: :class:`SpotInstancePolicy` (optional)
-          Configurations whether the warehouse should use spot instances.
-        :param tags: :class:`EndpointTags` (optional)
-          A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes)
-          associated with this SQL warehouse.
-          
-          Supported values: - Number of tags < 45.
-        :param warehouse_type: :class:`CreateWarehouseRequestWarehouseType` (optional)
-          Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and
-          also set the field `enable_serverless_compute` to `true`.
-        
-        :returns:
-          Long-running operation waiter for :class:`GetWarehouseResponse`.
-          See :method:wait_get_warehouse_running for more details.
-        
+
+Creates a new SQL warehouse.
+
+:param auto_stop_mins: int (optional)
+  The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it
+  is automatically stopped.
+  
+  Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins for
+  non-serverless warehouses - 0 indicates no autostop.
+  
+  Defaults to 120 mins
+:param channel: :class:`Channel` (optional)
+  Channel Details
+:param cluster_size: str (optional)
+  Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you
+  to run larger queries on it. If you want to increase the number of concurrent queries, please tune
+  max_num_clusters.
+  
+  Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large -
+  4X-Large
+:param creator_name: str (optional)
+  warehouse creator name
+:param enable_photon: bool (optional)
+  Configures whether the warehouse should use Photon optimized clusters.
+  
+  Defaults to false.
+:param enable_serverless_compute: bool (optional)
+  Configures whether the warehouse should use serverless compute
+:param instance_profile_arn: str (optional)
+  Deprecated. Instance profile used to pass IAM role to the cluster
+:param max_num_clusters: int (optional)
+  Maximum number of clusters that the autoscaler will create to handle concurrent queries.
+  
+  Supported values: - Must be >= min_num_clusters - Must be <= 30.
+  
+  Defaults to min_clusters if unset.
+:param min_num_clusters: int (optional)
+  Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this
+  will ensure that a larger number of clusters are always running and therefore may reduce the cold
+  start time for new queries. This is similar to reserved vs. revocable cores in a resource manager.
+  
+  Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30)
+  
+  Defaults to 1
+:param name: str (optional)
+  Logical name for the cluster.
+  
+  Supported values: - Must be unique within an org. - Must be less than 100 characters.
+:param spot_instance_policy: :class:`SpotInstancePolicy` (optional)
+  Configurations whether the warehouse should use spot instances.
+:param tags: :class:`EndpointTags` (optional)
+  A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes)
+  associated with this SQL warehouse.
+  
+  Supported values: - Number of tags < 45.
+:param warehouse_type: :class:`CreateWarehouseRequestWarehouseType` (optional)
+  Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and
+  also set the field `enable_serverless_compute` to `true`.
+
+:returns:
+  Long-running operation waiter for :class:`GetWarehouseResponse`.
+  See :method:wait_get_warehouse_running for more details.
+
 
     .. py:method:: create_and_wait( [, auto_stop_mins: Optional[int], channel: Optional[Channel], cluster_size: Optional[str], creator_name: Optional[str], enable_photon: Optional[bool], enable_serverless_compute: Optional[bool], instance_profile_arn: Optional[str], max_num_clusters: Optional[int], min_num_clusters: Optional[int], name: Optional[str], spot_instance_policy: Optional[SpotInstancePolicy], tags: Optional[EndpointTags], warehouse_type: Optional[CreateWarehouseRequestWarehouseType], timeout: datetime.timedelta = 0:20:00]) -> GetWarehouseResponse
 
@@ -104,14 +104,14 @@
     .. py:method:: delete(id: str)
 
         Delete a warehouse.
-        
-        Deletes a SQL warehouse.
-        
-        :param id: str
-          Required. Id of the SQL warehouse.
-        
-        
-        
+
+Deletes a SQL warehouse.
+
+:param id: str
+  Required. Id of the SQL warehouse.
+
+
+
 
     .. py:method:: edit(id: str [, auto_stop_mins: Optional[int], channel: Optional[Channel], cluster_size: Optional[str], creator_name: Optional[str], enable_photon: Optional[bool], enable_serverless_compute: Optional[bool], instance_profile_arn: Optional[str], max_num_clusters: Optional[int], min_num_clusters: Optional[int], name: Optional[str], spot_instance_policy: Optional[SpotInstancePolicy], tags: Optional[EndpointTags], warehouse_type: Optional[EditWarehouseRequestWarehouseType]]) -> Wait[GetWarehouseResponse]
 
@@ -146,70 +146,70 @@
             w.warehouses.delete(id=created.id)
 
         Update a warehouse.
-        
-        Updates the configuration for a SQL warehouse.
-        
-        :param id: str
-          Required. Id of the warehouse to configure.
-        :param auto_stop_mins: int (optional)
-          The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it
-          is automatically stopped.
-          
-          Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
-          
-          Defaults to 120 mins
-        :param channel: :class:`Channel` (optional)
-          Channel Details
-        :param cluster_size: str (optional)
-          Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you
-          to run larger queries on it. If you want to increase the number of concurrent queries, please tune
-          max_num_clusters.
-          
-          Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large -
-          4X-Large
-        :param creator_name: str (optional)
-          warehouse creator name
-        :param enable_photon: bool (optional)
-          Configures whether the warehouse should use Photon optimized clusters.
-          
-          Defaults to false.
-        :param enable_serverless_compute: bool (optional)
-          Configures whether the warehouse should use serverless compute.
-        :param instance_profile_arn: str (optional)
-          Deprecated. Instance profile used to pass IAM role to the cluster
-        :param max_num_clusters: int (optional)
-          Maximum number of clusters that the autoscaler will create to handle concurrent queries.
-          
-          Supported values: - Must be >= min_num_clusters - Must be <= 30.
-          
-          Defaults to min_clusters if unset.
-        :param min_num_clusters: int (optional)
-          Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this
-          will ensure that a larger number of clusters are always running and therefore may reduce the cold
-          start time for new queries. This is similar to reserved vs. revocable cores in a resource manager.
-          
-          Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30)
-          
-          Defaults to 1
-        :param name: str (optional)
-          Logical name for the cluster.
-          
-          Supported values: - Must be unique within an org. - Must be less than 100 characters.
-        :param spot_instance_policy: :class:`SpotInstancePolicy` (optional)
-          Configurations whether the warehouse should use spot instances.
-        :param tags: :class:`EndpointTags` (optional)
-          A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes)
-          associated with this SQL warehouse.
-          
-          Supported values: - Number of tags < 45.
-        :param warehouse_type: :class:`EditWarehouseRequestWarehouseType` (optional)
-          Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and
-          also set the field `enable_serverless_compute` to `true`.
-        
-        :returns:
-          Long-running operation waiter for :class:`GetWarehouseResponse`.
-          See :method:wait_get_warehouse_running for more details.
-        
+
+Updates the configuration for a SQL warehouse.
+
+:param id: str
+  Required. Id of the warehouse to configure.
+:param auto_stop_mins: int (optional)
+  The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it
+  is automatically stopped.
+  
+  Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
+  
+  Defaults to 120 mins
+:param channel: :class:`Channel` (optional)
+  Channel Details
+:param cluster_size: str (optional)
+  Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you
+  to run larger queries on it. If you want to increase the number of concurrent queries, please tune
+  max_num_clusters.
+  
+  Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large -
+  4X-Large
+:param creator_name: str (optional)
+  warehouse creator name
+:param enable_photon: bool (optional)
+  Configures whether the warehouse should use Photon optimized clusters.
+  
+  Defaults to false.
+:param enable_serverless_compute: bool (optional)
+  Configures whether the warehouse should use serverless compute.
+:param instance_profile_arn: str (optional)
+  Deprecated. Instance profile used to pass IAM role to the cluster
+:param max_num_clusters: int (optional)
+  Maximum number of clusters that the autoscaler will create to handle concurrent queries.
+  
+  Supported values: - Must be >= min_num_clusters - Must be <= 30.
+  
+  Defaults to min_clusters if unset.
+:param min_num_clusters: int (optional)
+  Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this
+  will ensure that a larger number of clusters are always running and therefore may reduce the cold
+  start time for new queries. This is similar to reserved vs. revocable cores in a resource manager.
+  
+  Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30)
+  
+  Defaults to 1
+:param name: str (optional)
+  Logical name for the cluster.
+  
+  Supported values: - Must be unique within an org. - Must be less than 100 characters.
+:param spot_instance_policy: :class:`SpotInstancePolicy` (optional)
+  Configurations whether the warehouse should use spot instances.
+:param tags: :class:`EndpointTags` (optional)
+  A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes)
+  associated with this SQL warehouse.
+  
+  Supported values: - Number of tags < 45.
+:param warehouse_type: :class:`EditWarehouseRequestWarehouseType` (optional)
+  Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and
+  also set the field `enable_serverless_compute` to `true`.
+
+:returns:
+  Long-running operation waiter for :class:`GetWarehouseResponse`.
+  See :method:wait_get_warehouse_running for more details.
+
 
     .. py:method:: edit_and_wait(id: str [, auto_stop_mins: Optional[int], channel: Optional[Channel], cluster_size: Optional[str], creator_name: Optional[str], enable_photon: Optional[bool], enable_serverless_compute: Optional[bool], instance_profile_arn: Optional[str], max_num_clusters: Optional[int], min_num_clusters: Optional[int], name: Optional[str], spot_instance_policy: Optional[SpotInstancePolicy], tags: Optional[EndpointTags], warehouse_type: Optional[EditWarehouseRequestWarehouseType], timeout: datetime.timedelta = 0:20:00]) -> GetWarehouseResponse
 
@@ -243,48 +243,48 @@
             w.warehouses.delete(id=created.id)
 
         Get warehouse info.
-        
-        Gets the information for a single SQL warehouse.
-        
-        :param id: str
-          Required. Id of the SQL warehouse.
-        
-        :returns: :class:`GetWarehouseResponse`
-        
+
+Gets the information for a single SQL warehouse.
+
+:param id: str
+  Required. Id of the SQL warehouse.
+
+:returns: :class:`GetWarehouseResponse`
+
 
     .. py:method:: get_permission_levels(warehouse_id: str) -> GetWarehousePermissionLevelsResponse
 
         Get SQL warehouse permission levels.
-        
-        Gets the permission levels that a user can have on an object.
-        
-        :param warehouse_id: str
-          The SQL warehouse for which to get or manage permissions.
-        
-        :returns: :class:`GetWarehousePermissionLevelsResponse`
-        
+
+Gets the permission levels that a user can have on an object.
+
+:param warehouse_id: str
+  The SQL warehouse for which to get or manage permissions.
+
+:returns: :class:`GetWarehousePermissionLevelsResponse`
+
 
     .. py:method:: get_permissions(warehouse_id: str) -> WarehousePermissions
 
         Get SQL warehouse permissions.
-        
-        Gets the permissions of a SQL warehouse. SQL warehouses can inherit permissions from their root
-        object.
-        
-        :param warehouse_id: str
-          The SQL warehouse for which to get or manage permissions.
-        
-        :returns: :class:`WarehousePermissions`
-        
+
+Gets the permissions of a SQL warehouse. SQL warehouses can inherit permissions from their root
+object.
+
+:param warehouse_id: str
+  The SQL warehouse for which to get or manage permissions.
+
+:returns: :class:`WarehousePermissions`
+
 
     .. py:method:: get_workspace_warehouse_config() -> GetWorkspaceWarehouseConfigResponse
 
         Get the workspace configuration.
-        
-        Gets the workspace level configuration that is shared by all SQL warehouses in a workspace.
-        
-        :returns: :class:`GetWorkspaceWarehouseConfigResponse`
-        
+
+Gets the workspace level configuration that is shared by all SQL warehouses in a workspace.
+
+:returns: :class:`GetWorkspaceWarehouseConfigResponse`
+
 
     .. py:method:: list( [, run_as_user_id: Optional[int]]) -> Iterator[EndpointInfo]
 
@@ -301,75 +301,75 @@
             all = w.warehouses.list(sql.ListWarehousesRequest())
 
         List warehouses.
-        
-        Lists all SQL warehouses that a user has manager permissions on.
-        
-        :param run_as_user_id: int (optional)
-          Service Principal which will be used to fetch the list of warehouses. If not specified, the user
-          from the session header is used.
-        
-        :returns: Iterator over :class:`EndpointInfo`
-        
+
+Lists all SQL warehouses that a user has manager permissions on.
+
+:param run_as_user_id: int (optional)
+  Service Principal which will be used to fetch the list of warehouses. If not specified, the user
+  from the session header is used.
+
+:returns: Iterator over :class:`EndpointInfo`
+
 
     .. py:method:: set_permissions(warehouse_id: str [, access_control_list: Optional[List[WarehouseAccessControlRequest]]]) -> WarehousePermissions
 
         Set SQL warehouse permissions.
-        
-        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-        permissions if none are specified. Objects can inherit permissions from their root object.
-        
-        :param warehouse_id: str
-          The SQL warehouse for which to get or manage permissions.
-        :param access_control_list: List[:class:`WarehouseAccessControlRequest`] (optional)
-        
-        :returns: :class:`WarehousePermissions`
-        
+
+Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+permissions if none are specified. Objects can inherit permissions from their root object.
+
+:param warehouse_id: str
+  The SQL warehouse for which to get or manage permissions.
+:param access_control_list: List[:class:`WarehouseAccessControlRequest`] (optional)
+
+:returns: :class:`WarehousePermissions`
+
 
     .. py:method:: set_workspace_warehouse_config( [, channel: Optional[Channel], config_param: Optional[RepeatedEndpointConfPairs], data_access_config: Optional[List[EndpointConfPair]], enabled_warehouse_types: Optional[List[WarehouseTypePair]], global_param: Optional[RepeatedEndpointConfPairs], google_service_account: Optional[str], instance_profile_arn: Optional[str], security_policy: Optional[SetWorkspaceWarehouseConfigRequestSecurityPolicy], sql_configuration_parameters: Optional[RepeatedEndpointConfPairs]])
 
         Set the workspace configuration.
-        
-        Sets the workspace level configuration that is shared by all SQL warehouses in a workspace.
-        
-        :param channel: :class:`Channel` (optional)
-          Optional: Channel selection details
-        :param config_param: :class:`RepeatedEndpointConfPairs` (optional)
-          Deprecated: Use sql_configuration_parameters
-        :param data_access_config: List[:class:`EndpointConfPair`] (optional)
-          Spark confs for external hive metastore configuration JSON serialized size must be less than <= 512K
-        :param enabled_warehouse_types: List[:class:`WarehouseTypePair`] (optional)
-          List of Warehouse Types allowed in this workspace (limits allowed value of the type field in
-          CreateWarehouse and EditWarehouse). Note: Some types cannot be disabled, they don't need to be
-          specified in SetWorkspaceWarehouseConfig. Note: Disabling a type may cause existing warehouses to be
-          converted to another type. Used by frontend to save specific type availability in the warehouse
-          create and edit form UI.
-        :param global_param: :class:`RepeatedEndpointConfPairs` (optional)
-          Deprecated: Use sql_configuration_parameters
-        :param google_service_account: str (optional)
-          GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage
-        :param instance_profile_arn: str (optional)
-          AWS Only: Instance profile used to pass IAM role to the cluster
-        :param security_policy: :class:`SetWorkspaceWarehouseConfigRequestSecurityPolicy` (optional)
-          Security policy for warehouses
-        :param sql_configuration_parameters: :class:`RepeatedEndpointConfPairs` (optional)
-          SQL configuration parameters
-        
-        
-        
+
+Sets the workspace level configuration that is shared by all SQL warehouses in a workspace.
+
+:param channel: :class:`Channel` (optional)
+  Optional: Channel selection details
+:param config_param: :class:`RepeatedEndpointConfPairs` (optional)
+  Deprecated: Use sql_configuration_parameters
+:param data_access_config: List[:class:`EndpointConfPair`] (optional)
+  Spark confs for external hive metastore configuration JSON serialized size must be less than <= 512K
+:param enabled_warehouse_types: List[:class:`WarehouseTypePair`] (optional)
+  List of Warehouse Types allowed in this workspace (limits allowed value of the type field in
+  CreateWarehouse and EditWarehouse). Note: Some types cannot be disabled, they don't need to be
+  specified in SetWorkspaceWarehouseConfig. Note: Disabling a type may cause existing warehouses to be
+  converted to another type. Used by frontend to save specific type availability in the warehouse
+  create and edit form UI.
+:param global_param: :class:`RepeatedEndpointConfPairs` (optional)
+  Deprecated: Use sql_configuration_parameters
+:param google_service_account: str (optional)
+  GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage
+:param instance_profile_arn: str (optional)
+  AWS Only: Instance profile used to pass IAM role to the cluster
+:param security_policy: :class:`SetWorkspaceWarehouseConfigRequestSecurityPolicy` (optional)
+  Security policy for warehouses
+:param sql_configuration_parameters: :class:`RepeatedEndpointConfPairs` (optional)
+  SQL configuration parameters
+
+
+
 
     .. py:method:: start(id: str) -> Wait[GetWarehouseResponse]
 
         Start a warehouse.
-        
-        Starts a SQL warehouse.
-        
-        :param id: str
-          Required. Id of the SQL warehouse.
-        
-        :returns:
-          Long-running operation waiter for :class:`GetWarehouseResponse`.
-          See :method:wait_get_warehouse_running for more details.
-        
+
+Starts a SQL warehouse.
+
+:param id: str
+  Required. Id of the SQL warehouse.
+
+:returns:
+  Long-running operation waiter for :class:`GetWarehouseResponse`.
+  See :method:wait_get_warehouse_running for more details.
+
 
     .. py:method:: start_and_wait(id: str, timeout: datetime.timedelta = 0:20:00) -> GetWarehouseResponse
 
@@ -377,16 +377,16 @@
     .. py:method:: stop(id: str) -> Wait[GetWarehouseResponse]
 
         Stop a warehouse.
-        
-        Stops a SQL warehouse.
-        
-        :param id: str
-          Required. Id of the SQL warehouse.
-        
-        :returns:
-          Long-running operation waiter for :class:`GetWarehouseResponse`.
-          See :method:wait_get_warehouse_stopped for more details.
-        
+
+Stops a SQL warehouse.
+
+:param id: str
+  Required. Id of the SQL warehouse.
+
+:returns:
+  Long-running operation waiter for :class:`GetWarehouseResponse`.
+  See :method:wait_get_warehouse_stopped for more details.
+
 
     .. py:method:: stop_and_wait(id: str, timeout: datetime.timedelta = 0:20:00) -> GetWarehouseResponse
 
@@ -394,16 +394,16 @@
     .. py:method:: update_permissions(warehouse_id: str [, access_control_list: Optional[List[WarehouseAccessControlRequest]]]) -> WarehousePermissions
 
         Update SQL warehouse permissions.
-        
-        Updates the permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root
-        object.
-        
-        :param warehouse_id: str
-          The SQL warehouse for which to get or manage permissions.
-        :param access_control_list: List[:class:`WarehouseAccessControlRequest`] (optional)
-        
-        :returns: :class:`WarehousePermissions`
-        
+
+Updates the permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root
+object.
+
+:param warehouse_id: str
+  The SQL warehouse for which to get or manage permissions.
+:param access_control_list: List[:class:`WarehouseAccessControlRequest`] (optional)
+
+:returns: :class:`WarehousePermissions`
+
 
     .. py:method:: wait_get_warehouse_running(id: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[GetWarehouseResponse], None]]) -> GetWarehouseResponse
 
diff --git a/docs/workspace/vectorsearch/vector_search_endpoints.rst b/docs/workspace/vectorsearch/vector_search_endpoints.rst
index 1abd09b95..c53697944 100644
--- a/docs/workspace/vectorsearch/vector_search_endpoints.rst
+++ b/docs/workspace/vectorsearch/vector_search_endpoints.rst
@@ -9,18 +9,18 @@
     .. py:method:: create_endpoint(name: str, endpoint_type: EndpointType) -> Wait[EndpointInfo]
 
         Create an endpoint.
-        
-        Create a new endpoint.
-        
-        :param name: str
-          Name of endpoint
-        :param endpoint_type: :class:`EndpointType`
-          Type of endpoint.
-        
-        :returns:
-          Long-running operation waiter for :class:`EndpointInfo`.
-          See :method:wait_get_endpoint_vector_search_endpoint_online for more details.
-        
+
+Create a new endpoint.
+
+:param name: str
+  Name of endpoint
+:param endpoint_type: :class:`EndpointType`
+  Type of endpoint.
+
+:returns:
+  Long-running operation waiter for :class:`EndpointInfo`.
+  See :method:wait_get_endpoint_vector_search_endpoint_online for more details.
+
 
     .. py:method:: create_endpoint_and_wait(name: str, endpoint_type: EndpointType, timeout: datetime.timedelta = 0:20:00) -> EndpointInfo
 
@@ -28,31 +28,31 @@
     .. py:method:: delete_endpoint(endpoint_name: str)
 
         Delete an endpoint.
-        
-        :param endpoint_name: str
-          Name of the endpoint
-        
-        
-        
+
+:param endpoint_name: str
+  Name of the endpoint
+
+
+
 
     .. py:method:: get_endpoint(endpoint_name: str) -> EndpointInfo
 
         Get an endpoint.
-        
-        :param endpoint_name: str
-          Name of the endpoint
-        
-        :returns: :class:`EndpointInfo`
-        
+
+:param endpoint_name: str
+  Name of the endpoint
+
+:returns: :class:`EndpointInfo`
+
 
     .. py:method:: list_endpoints( [, page_token: Optional[str]]) -> Iterator[EndpointInfo]
 
         List all endpoints.
-        
-        :param page_token: str (optional)
-          Token for pagination
-        
-        :returns: Iterator over :class:`EndpointInfo`
-        
+
+:param page_token: str (optional)
+  Token for pagination
+
+:returns: Iterator over :class:`EndpointInfo`
+
 
     .. py:method:: wait_get_endpoint_vector_search_endpoint_online(endpoint_name: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[EndpointInfo], None]]) -> EndpointInfo
diff --git a/docs/workspace/vectorsearch/vector_search_indexes.rst b/docs/workspace/vectorsearch/vector_search_indexes.rst
index 415e19d90..1222ab348 100644
--- a/docs/workspace/vectorsearch/vector_search_indexes.rst
+++ b/docs/workspace/vectorsearch/vector_search_indexes.rst
@@ -5,179 +5,178 @@
 .. py:class:: VectorSearchIndexesAPI
 
     **Index**: An efficient representation of your embedding vectors that supports real-time and efficient
-    approximate nearest neighbor (ANN) search queries.
-    
-    There are 2 types of Vector Search indexes: * **Delta Sync Index**: An index that automatically syncs with
-    a source Delta Table, automatically and incrementally updating the index as the underlying data in the
-    Delta Table changes. * **Direct Vector Access Index**: An index that supports direct read and write of
-    vectors and metadata through our REST and SDK APIs. With this model, the user manages index updates.
+approximate nearest neighbor (ANN) search queries.
+
+There are 2 types of Vector Search indexes: * **Delta Sync Index**: An index that automatically syncs with
+a source Delta Table, automatically and incrementally updating the index as the underlying data in the
+Delta Table changes. * **Direct Vector Access Index**: An index that supports direct read and write of
+vectors and metadata through our REST and SDK APIs. With this model, the user manages index updates.
 
     .. py:method:: create_index(name: str, endpoint_name: str, primary_key: str, index_type: VectorIndexType [, delta_sync_index_spec: Optional[DeltaSyncVectorIndexSpecRequest], direct_access_index_spec: Optional[DirectAccessVectorIndexSpec]]) -> CreateVectorIndexResponse
 
         Create an index.
-        
-        Create a new index.
-        
-        :param name: str
-          Name of the index
-        :param endpoint_name: str
-          Name of the endpoint to be used for serving the index
-        :param primary_key: str
-          Primary key of the index
-        :param index_type: :class:`VectorIndexType`
-          There are 2 types of Vector Search indexes:
-          
-          - `DELTA_SYNC`: An index that automatically syncs with a source Delta Table, automatically and
-          incrementally updating the index as the underlying data in the Delta Table changes. -
-          `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through our
-          REST and SDK APIs. With this model, the user manages index updates.
-        :param delta_sync_index_spec: :class:`DeltaSyncVectorIndexSpecRequest` (optional)
-          Specification for Delta Sync Index. Required if `index_type` is `DELTA_SYNC`.
-        :param direct_access_index_spec: :class:`DirectAccessVectorIndexSpec` (optional)
-          Specification for Direct Vector Access Index. Required if `index_type` is `DIRECT_ACCESS`.
-        
-        :returns: :class:`CreateVectorIndexResponse`
-        
+
+Create a new index.
+
+:param name: str
+  Name of the index
+:param endpoint_name: str
+  Name of the endpoint to be used for serving the index
+:param primary_key: str
+  Primary key of the index
+:param index_type: :class:`VectorIndexType`
+  There are 2 types of Vector Search indexes:
+  
+  - `DELTA_SYNC`: An index that automatically syncs with a source Delta Table, automatically and
+  incrementally updating the index as the underlying data in the Delta Table changes. -
+  `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through our
+  REST and SDK APIs. With this model, the user manages index updates.
+:param delta_sync_index_spec: :class:`DeltaSyncVectorIndexSpecRequest` (optional)
+  Specification for Delta Sync Index. Required if `index_type` is `DELTA_SYNC`.
+:param direct_access_index_spec: :class:`DirectAccessVectorIndexSpec` (optional)
+  Specification for Direct Vector Access Index. Required if `index_type` is `DIRECT_ACCESS`.
+
+:returns: :class:`CreateVectorIndexResponse`
+
 
     .. py:method:: delete_data_vector_index(index_name: str, primary_keys: List[str]) -> DeleteDataVectorIndexResponse
 
         Delete data from index.
-        
-        Handles the deletion of data from a specified vector index.
-        
-        :param index_name: str
-          Name of the vector index where data is to be deleted. Must be a Direct Vector Access Index.
-        :param primary_keys: List[str]
-          List of primary keys for the data to be deleted.
-        
-        :returns: :class:`DeleteDataVectorIndexResponse`
-        
+
+Handles the deletion of data from a specified vector index.
+
+:param index_name: str
+  Name of the vector index where data is to be deleted. Must be a Direct Vector Access Index.
+:param primary_keys: List[str]
+  List of primary keys for the data to be deleted.
+
+:returns: :class:`DeleteDataVectorIndexResponse`
+
 
     .. py:method:: delete_index(index_name: str)
 
         Delete an index.
-        
-        Delete an index.
-        
-        :param index_name: str
-          Name of the index
-        
-        
-        
+
+Delete an index.
+
+:param index_name: str
+  Name of the index
+
+
+
 
     .. py:method:: get_index(index_name: str) -> VectorIndex
 
         Get an index.
-        
-        Get an index.
-        
-        :param index_name: str
-          Name of the index
-        
-        :returns: :class:`VectorIndex`
-        
+
+Get an index.
+
+:param index_name: str
+  Name of the index
+
+:returns: :class:`VectorIndex`
+
 
     .. py:method:: list_indexes(endpoint_name: str [, page_token: Optional[str]]) -> Iterator[MiniVectorIndex]
 
         List indexes.
-        
-        List all indexes in the given endpoint.
-        
-        :param endpoint_name: str
-          Name of the endpoint
-        :param page_token: str (optional)
-          Token for pagination
-        
-        :returns: Iterator over :class:`MiniVectorIndex`
-        
+
+List all indexes in the given endpoint.
+
+:param endpoint_name: str
+  Name of the endpoint
+:param page_token: str (optional)
+  Token for pagination
+
+:returns: Iterator over :class:`MiniVectorIndex`
+
 
     .. py:method:: query_index(index_name: str, columns: List[str] [, filters_json: Optional[str], num_results: Optional[int], query_text: Optional[str], query_type: Optional[str], query_vector: Optional[List[float]], score_threshold: Optional[float]]) -> QueryVectorIndexResponse
 
         Query an index.
-        
-        Query the specified vector index.
-        
-        :param index_name: str
-          Name of the vector index to query.
-        :param columns: List[str]
-          List of column names to include in the response.
-        :param filters_json: str (optional)
-          JSON string representing query filters.
-          
-          Example filters: - `{"id <": 5}`: Filter for id less than 5. - `{"id >": 5}`: Filter for id greater
-          than 5. - `{"id <=": 5}`: Filter for id less than equal to 5. - `{"id >=": 5}`: Filter for id
-          greater than equal to 5. - `{"id": 5}`: Filter for id equal to 5.
-        :param num_results: int (optional)
-          Number of results to return. Defaults to 10.
-        :param query_text: str (optional)
-          Query text. Required for Delta Sync Index using model endpoint.
-        :param query_type: str (optional)
-          The query type to use. Choices are `ANN` and `HYBRID`. Defaults to `ANN`.
-        :param query_vector: List[float] (optional)
-          Query vector. Required for Direct Vector Access Index and Delta Sync Index using self-managed
-          vectors.
-        :param score_threshold: float (optional)
-          Threshold for the approximate nearest neighbor search. Defaults to 0.0.
-        
-        :returns: :class:`QueryVectorIndexResponse`
-        
+
+Query the specified vector index.
+
+:param index_name: str
+  Name of the vector index to query.
+:param columns: List[str]
+  List of column names to include in the response.
+:param filters_json: str (optional)
+  JSON string representing query filters.
+  
+  Example filters: - `{"id <": 5}`: Filter for id less than 5. - `{"id >": 5}`: Filter for id greater
+  than 5. - `{"id <=": 5}`: Filter for id less than equal to 5. - `{"id >=": 5}`: Filter for id
+  greater than equal to 5. - `{"id": 5}`: Filter for id equal to 5.
+:param num_results: int (optional)
+  Number of results to return. Defaults to 10.
+:param query_text: str (optional)
+  Query text. Required for Delta Sync Index using model endpoint.
+:param query_type: str (optional)
+  The query type to use. Choices are `ANN` and `HYBRID`. Defaults to `ANN`.
+:param query_vector: List[float] (optional)
+  Query vector. Required for Direct Vector Access Index and Delta Sync Index using self-managed
+  vectors.
+:param score_threshold: float (optional)
+  Threshold for the approximate nearest neighbor search. Defaults to 0.0.
+
+:returns: :class:`QueryVectorIndexResponse`
+
 
     .. py:method:: query_next_page(index_name: str [, endpoint_name: Optional[str], page_token: Optional[str]]) -> QueryVectorIndexResponse
 
         Query next page.
-        
-        Use `next_page_token` returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` request
-        to fetch next page of results.
-        
-        :param index_name: str
-          Name of the vector index to query.
-        :param endpoint_name: str (optional)
-          Name of the endpoint.
-        :param page_token: str (optional)
-          Page token returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` API.
-        
-        :returns: :class:`QueryVectorIndexResponse`
-        
+
+Use `next_page_token` returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` request
+to fetch next page of results.
+
+:param index_name: str
+  Name of the vector index to query.
+:param endpoint_name: str (optional)
+  Name of the endpoint.
+:param page_token: str (optional)
+  Page token returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` API.
+
+:returns: :class:`QueryVectorIndexResponse`
+
 
     .. py:method:: scan_index(index_name: str [, last_primary_key: Optional[str], num_results: Optional[int]]) -> ScanVectorIndexResponse
 
         Scan an index.
-        
-        Scan the specified vector index and return the first `num_results` entries after the exclusive
-        `primary_key`.
-        
-        :param index_name: str
-          Name of the vector index to scan.
-        :param last_primary_key: str (optional)
-          Primary key of the last entry returned in the previous scan.
-        :param num_results: int (optional)
-          Number of results to return. Defaults to 10.
-        
-        :returns: :class:`ScanVectorIndexResponse`
-        
+
+Scan the specified vector index and return the first `num_results` entries after the exclusive
+`primary_key`.
+
+:param index_name: str
+  Name of the vector index to scan.
+:param last_primary_key: str (optional)
+  Primary key of the last entry returned in the previous scan.
+:param num_results: int (optional)
+  Number of results to return. Defaults to 10.
+
+:returns: :class:`ScanVectorIndexResponse`
+
 
     .. py:method:: sync_index(index_name: str)
 
         Synchronize an index.
-        
-        Triggers a synchronization process for a specified vector index.
-        
-        :param index_name: str
-          Name of the vector index to synchronize. Must be a Delta Sync Index.
-        
-        
-        
+
+Triggers a synchronization process for a specified vector index.
+
+:param index_name: str
+  Name of the vector index to synchronize. Must be a Delta Sync Index.
+
+
+
 
     .. py:method:: upsert_data_vector_index(index_name: str, inputs_json: str) -> UpsertDataVectorIndexResponse
 
         Upsert data into an index.
-        
-        Handles the upserting of data into a specified vector index.
-        
-        :param index_name: str
-          Name of the vector index where data is to be upserted. Must be a Direct Vector Access Index.
-        :param inputs_json: str
-          JSON string representing the data to be upserted.
-        
-        :returns: :class:`UpsertDataVectorIndexResponse`
-        
\ No newline at end of file
+
+Handles the upserting of data into a specified vector index.
+
+:param index_name: str
+  Name of the vector index where data is to be upserted. Must be a Direct Vector Access Index.
+:param inputs_json: str
+  JSON string representing the data to be upserted.
+
+:returns: :class:`UpsertDataVectorIndexResponse`
diff --git a/docs/workspace/workspace/git_credentials.rst b/docs/workspace/workspace/git_credentials.rst
index 34851e84a..ea93845b6 100644
--- a/docs/workspace/workspace/git_credentials.rst
+++ b/docs/workspace/workspace/git_credentials.rst
@@ -5,10 +5,10 @@
 .. py:class:: GitCredentialsAPI
 
     Registers personal access token for Databricks to do operations on behalf of the user.
-    
-    See [more info].
-    
-    [more info]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
+
+See [more info].
+
+[more info]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
 
     .. py:method:: create(git_provider: str [, git_username: Optional[str], personal_access_token: Optional[str]]) -> CreateCredentialsResponse
 
@@ -27,41 +27,41 @@
             w.git_credentials.delete(credential_id=cr.credential_id)
 
         Create a credential entry.
-        
-        Creates a Git credential entry for the user. Only one Git credential per user is supported, so any
-        attempts to create credentials if an entry already exists will fail. Use the PATCH endpoint to update
-        existing credentials, or the DELETE endpoint to delete existing credentials.
-        
-        :param git_provider: str
-          Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
-          `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
-          `gitLabEnterpriseEdition` and `awsCodeCommit`.
-        :param git_username: str (optional)
-          The username or email provided with your Git provider account, depending on which provider you are
-          using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may
-          be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS CodeCommit, BitBucket or
-          BitBucket Server, username must be used. For all other providers please see your provider's Personal
-          Access Token authentication documentation to see what is supported.
-        :param personal_access_token: str (optional)
-          The personal access token used to authenticate to the corresponding Git provider. For certain
-          providers, support may exist for other types of scoped access tokens. [Learn more].
-          
-          [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
-        
-        :returns: :class:`CreateCredentialsResponse`
-        
+
+Creates a Git credential entry for the user. Only one Git credential per user is supported, so any
+attempts to create credentials if an entry already exists will fail. Use the PATCH endpoint to update
+existing credentials, or the DELETE endpoint to delete existing credentials.
+
+:param git_provider: str
+  Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+  `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+  `gitLabEnterpriseEdition` and `awsCodeCommit`.
+:param git_username: str (optional)
+  The username or email provided with your Git provider account, depending on which provider you are
+  using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may
+  be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS CodeCommit, BitBucket or
+  BitBucket Server, username must be used. For all other providers please see your provider's Personal
+  Access Token authentication documentation to see what is supported.
+:param personal_access_token: str (optional)
+  The personal access token used to authenticate to the corresponding Git provider. For certain
+  providers, support may exist for other types of scoped access tokens. [Learn more].
+  
+  [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
+
+:returns: :class:`CreateCredentialsResponse`
+
 
     .. py:method:: delete(credential_id: int)
 
         Delete a credential.
-        
-        Deletes the specified Git credential.
-        
-        :param credential_id: int
-          The ID for the corresponding credential to access.
-        
-        
-        
+
+Deletes the specified Git credential.
+
+:param credential_id: int
+  The ID for the corresponding credential to access.
+
+
+
 
     .. py:method:: get(credential_id: int) -> GetCredentialsResponse
 
@@ -82,14 +82,14 @@
             w.git_credentials.delete(credential_id=cr.credential_id)
 
         Get a credential entry.
-        
-        Gets the Git credential with the specified credential ID.
-        
-        :param credential_id: int
-          The ID for the corresponding credential to access.
-        
-        :returns: :class:`GetCredentialsResponse`
-        
+
+Gets the Git credential with the specified credential ID.
+
+:param credential_id: int
+  The ID for the corresponding credential to access.
+
+:returns: :class:`GetCredentialsResponse`
+
 
     .. py:method:: list() -> Iterator[CredentialInfo]
 
@@ -105,11 +105,11 @@
             list = w.git_credentials.list()
 
         Get Git credentials.
-        
-        Lists the calling user's Git credentials. One credential per user is supported.
-        
-        :returns: Iterator over :class:`CredentialInfo`
-        
+
+Lists the calling user's Git credentials. One credential per user is supported.
+
+:returns: Iterator over :class:`CredentialInfo`
+
 
     .. py:method:: update(credential_id: int, git_provider: str [, git_username: Optional[str], personal_access_token: Optional[str]])
 
@@ -135,26 +135,25 @@
             w.git_credentials.delete(credential_id=cr.credential_id)
 
         Update a credential.
-        
-        Updates the specified Git credential.
-        
-        :param credential_id: int
-          The ID for the corresponding credential to access.
-        :param git_provider: str
-          Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
-          `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
-          `gitLabEnterpriseEdition` and `awsCodeCommit`.
-        :param git_username: str (optional)
-          The username or email provided with your Git provider account, depending on which provider you are
-          using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may
-          be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS CodeCommit, BitBucket or
-          BitBucket Server, username must be used. For all other providers please see your provider's Personal
-          Access Token authentication documentation to see what is supported.
-        :param personal_access_token: str (optional)
-          The personal access token used to authenticate to the corresponding Git provider. For certain
-          providers, support may exist for other types of scoped access tokens. [Learn more].
-          
-          [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
-        
-        
-        
\ No newline at end of file
+
+Updates the specified Git credential.
+
+:param credential_id: int
+  The ID for the corresponding credential to access.
+:param git_provider: str
+  Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+  `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+  `gitLabEnterpriseEdition` and `awsCodeCommit`.
+:param git_username: str (optional)
+  The username or email provided with your Git provider account, depending on which provider you are
+  using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may
+  be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS CodeCommit, BitBucket or
+  BitBucket Server, username must be used. For all other providers please see your provider's Personal
+  Access Token authentication documentation to see what is supported.
+:param personal_access_token: str (optional)
+  The personal access token used to authenticate to the corresponding Git provider. For certain
+  providers, support may exist for other types of scoped access tokens. [Learn more].
+  
+  [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
+
+
diff --git a/docs/workspace/workspace/repos.rst b/docs/workspace/workspace/repos.rst
index 5f3e3e290..ce8908906 100644
--- a/docs/workspace/workspace/repos.rst
+++ b/docs/workspace/workspace/repos.rst
@@ -5,14 +5,14 @@
 .. py:class:: ReposAPI
 
     The Repos API allows users to manage their git repos. Users can use the API to access all repos that they
-    have manage permissions on.
-    
-    Databricks Repos is a visual Git client in Databricks. It supports common Git operations such a cloning a
-    repository, committing and pushing, pulling, branch management, and visual comparison of diffs when
-    committing.
-    
-    Within Repos you can develop code in notebooks or other files and follow data science and engineering code
-    development best practices using Git for version control, collaboration, and CI/CD.
+have manage permissions on.
+
+Databricks Repos is a visual Git client in Databricks. It supports common Git operations such a cloning a
+repository, committing and pushing, pulling, branch management, and visual comparison of diffs when
+committing.
+
+Within Repos you can develop code in notebooks or other files and follow data science and engineering code
+development best practices using Git for version control, collaboration, and CI/CD.
 
     .. py:method:: create(url: str, provider: str [, path: Optional[str], sparse_checkout: Optional[SparseCheckout]]) -> CreateRepoResponse
 
@@ -35,37 +35,37 @@
             w.repos.delete(repo_id=ri.id)
 
         Create a repo.
-        
-        Creates a repo in the workspace and links it to the remote Git repo specified. Note that repos created
-        programmatically must be linked to a remote Git repo, unlike repos created in the browser.
-        
-        :param url: str
-          URL of the Git repository to be linked.
-        :param provider: str
-          Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
-          `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
-          `gitLabEnterpriseEdition` and `awsCodeCommit`.
-        :param path: str (optional)
-          Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If repo
-          is created in `/Repos`, path must be in the format `/Repos/{folder}/{repo-name}`.
-        :param sparse_checkout: :class:`SparseCheckout` (optional)
-          If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable
-          sparse checkout after the repo is created.
-        
-        :returns: :class:`CreateRepoResponse`
-        
+
+Creates a repo in the workspace and links it to the remote Git repo specified. Note that repos created
+programmatically must be linked to a remote Git repo, unlike repos created in the browser.
+
+:param url: str
+  URL of the Git repository to be linked.
+:param provider: str
+  Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+  `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+  `gitLabEnterpriseEdition` and `awsCodeCommit`.
+:param path: str (optional)
+  Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If repo
+  is created in `/Repos`, path must be in the format `/Repos/{folder}/{repo-name}`.
+:param sparse_checkout: :class:`SparseCheckout` (optional)
+  If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable
+  sparse checkout after the repo is created.
+
+:returns: :class:`CreateRepoResponse`
+
 
     .. py:method:: delete(repo_id: int)
 
         Delete a repo.
-        
-        Deletes the specified repo.
-        
-        :param repo_id: int
-          The ID for the corresponding repo to delete.
-        
-        
-        
+
+Deletes the specified repo.
+
+:param repo_id: int
+  The ID for the corresponding repo to delete.
+
+
+
 
     .. py:method:: get(repo_id: int) -> GetRepoResponse
 
@@ -90,38 +90,38 @@
             w.repos.delete(repo_id=ri.id)
 
         Get a repo.
-        
-        Returns the repo with the given repo ID.
-        
-        :param repo_id: int
-          ID of the Git folder (repo) object in the workspace.
-        
-        :returns: :class:`GetRepoResponse`
-        
+
+Returns the repo with the given repo ID.
+
+:param repo_id: int
+  ID of the Git folder (repo) object in the workspace.
+
+:returns: :class:`GetRepoResponse`
+
 
     .. py:method:: get_permission_levels(repo_id: str) -> GetRepoPermissionLevelsResponse
 
         Get repo permission levels.
-        
-        Gets the permission levels that a user can have on an object.
-        
-        :param repo_id: str
-          The repo for which to get or manage permissions.
-        
-        :returns: :class:`GetRepoPermissionLevelsResponse`
-        
+
+Gets the permission levels that a user can have on an object.
+
+:param repo_id: str
+  The repo for which to get or manage permissions.
+
+:returns: :class:`GetRepoPermissionLevelsResponse`
+
 
     .. py:method:: get_permissions(repo_id: str) -> RepoPermissions
 
         Get repo permissions.
-        
-        Gets the permissions of a repo. Repos can inherit permissions from their root object.
-        
-        :param repo_id: str
-          The repo for which to get or manage permissions.
-        
-        :returns: :class:`RepoPermissions`
-        
+
+Gets the permissions of a repo. Repos can inherit permissions from their root object.
+
+:param repo_id: str
+  The repo for which to get or manage permissions.
+
+:returns: :class:`RepoPermissions`
+
 
     .. py:method:: list( [, next_page_token: Optional[str], path_prefix: Optional[str]]) -> Iterator[RepoInfo]
 
@@ -138,34 +138,34 @@
             all = w.repos.list(workspace.ListReposRequest())
 
         Get repos.
-        
-        Returns repos that the calling user has Manage permissions on. Use `next_page_token` to iterate
-        through additional pages.
-        
-        :param next_page_token: str (optional)
-          Token used to get the next page of results. If not specified, returns the first page of results as
-          well as a next page token if there are more results.
-        :param path_prefix: str (optional)
-          Filters repos that have paths starting with the given path prefix. If not provided or when provided
-          an effectively empty prefix (`/` or `/Workspace`) Git folders (repos) from `/Workspace/Repos` will
-          be served.
-        
-        :returns: Iterator over :class:`RepoInfo`
-        
+
+Returns repos that the calling user has Manage permissions on. Use `next_page_token` to iterate
+through additional pages.
+
+:param next_page_token: str (optional)
+  Token used to get the next page of results. If not specified, returns the first page of results as
+  well as a next page token if there are more results.
+:param path_prefix: str (optional)
+  Filters repos that have paths starting with the given path prefix. If not provided or when provided
+  an effectively empty prefix (`/` or `/Workspace`) Git folders (repos) from `/Workspace/Repos` will
+  be served.
+
+:returns: Iterator over :class:`RepoInfo`
+
 
     .. py:method:: set_permissions(repo_id: str [, access_control_list: Optional[List[RepoAccessControlRequest]]]) -> RepoPermissions
 
         Set repo permissions.
-        
-        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-        permissions if none are specified. Objects can inherit permissions from their root object.
-        
-        :param repo_id: str
-          The repo for which to get or manage permissions.
-        :param access_control_list: List[:class:`RepoAccessControlRequest`] (optional)
-        
-        :returns: :class:`RepoPermissions`
-        
+
+Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+permissions if none are specified. Objects can inherit permissions from their root object.
+
+:param repo_id: str
+  The repo for which to get or manage permissions.
+:param access_control_list: List[:class:`RepoAccessControlRequest`] (optional)
+
+:returns: :class:`RepoPermissions`
+
 
     .. py:method:: update(repo_id: int [, branch: Optional[str], sparse_checkout: Optional[SparseCheckoutUpdate], tag: Optional[str]])
 
@@ -190,34 +190,33 @@
             w.repos.delete(repo_id=ri.id)
 
         Update a repo.
-        
-        Updates the repo to a different branch or tag, or updates the repo to the latest commit on the same
-        branch.
-        
-        :param repo_id: int
-          ID of the Git folder (repo) object in the workspace.
-        :param branch: str (optional)
-          Branch that the local version of the repo is checked out to.
-        :param sparse_checkout: :class:`SparseCheckoutUpdate` (optional)
-          If specified, update the sparse checkout settings. The update will fail if sparse checkout is not
-          enabled for the repo.
-        :param tag: str (optional)
-          Tag that the local version of the repo is checked out to. Updating the repo to a tag puts the repo
-          in a detached HEAD state. Before committing new changes, you must update the repo to a branch
-          instead of the detached HEAD.
-        
-        
-        
+
+Updates the repo to a different branch or tag, or updates the repo to the latest commit on the same
+branch.
+
+:param repo_id: int
+  ID of the Git folder (repo) object in the workspace.
+:param branch: str (optional)
+  Branch that the local version of the repo is checked out to.
+:param sparse_checkout: :class:`SparseCheckoutUpdate` (optional)
+  If specified, update the sparse checkout settings. The update will fail if sparse checkout is not
+  enabled for the repo.
+:param tag: str (optional)
+  Tag that the local version of the repo is checked out to. Updating the repo to a tag puts the repo
+  in a detached HEAD state. Before committing new changes, you must update the repo to a branch
+  instead of the detached HEAD.
+
+
+
 
     .. py:method:: update_permissions(repo_id: str [, access_control_list: Optional[List[RepoAccessControlRequest]]]) -> RepoPermissions
 
         Update repo permissions.
-        
-        Updates the permissions on a repo. Repos can inherit permissions from their root object.
-        
-        :param repo_id: str
-          The repo for which to get or manage permissions.
-        :param access_control_list: List[:class:`RepoAccessControlRequest`] (optional)
-        
-        :returns: :class:`RepoPermissions`
-        
\ No newline at end of file
+
+Updates the permissions on a repo. Repos can inherit permissions from their root object.
+
+:param repo_id: str
+  The repo for which to get or manage permissions.
+:param access_control_list: List[:class:`RepoAccessControlRequest`] (optional)
+
+:returns: :class:`RepoPermissions`
diff --git a/docs/workspace/workspace/secrets.rst b/docs/workspace/workspace/secrets.rst
index 96d94e1de..cb37e6155 100644
--- a/docs/workspace/workspace/secrets.rst
+++ b/docs/workspace/workspace/secrets.rst
@@ -5,14 +5,14 @@
 .. py:class:: SecretsAPI
 
     The Secrets API allows you to manage secrets, secret scopes, and access permissions.
-    
-    Sometimes accessing data requires that you authenticate to external data sources through JDBC. Instead of
-    directly entering your credentials into a notebook, use Databricks secrets to store your credentials and
-    reference them in notebooks and jobs.
-    
-    Administrators, secret creators, and users granted permission can read Databricks secrets. While
-    Databricks makes an effort to redact secret values that might be displayed in notebooks, it is not
-    possible to prevent such users from reading secrets.
+
+Sometimes accessing data requires that you authenticate to external data sources through JDBC. Instead of
+directly entering your credentials into a notebook, use Databricks secrets to store your credentials and
+reference them in notebooks and jobs.
+
+Administrators, secret creators, and users granted permission can read Databricks secrets. While
+Databricks makes an effort to redact secret values that might be displayed in notebooks, it is not
+possible to prevent such users from reading secrets.
 
     .. py:method:: create_scope(scope: str [, backend_azure_keyvault: Optional[AzureKeyVaultSecretScopeMetadata], initial_manage_principal: Optional[str], scope_backend_type: Optional[ScopeBackendType]])
 
@@ -38,112 +38,112 @@
             w.secrets.delete_scope(scope=scope_name)
 
         Create a new secret scope.
-        
-        The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not
-        exceed 128 characters.
-        
-        :param scope: str
-          Scope name requested by the user. Scope names are unique.
-        :param backend_azure_keyvault: :class:`AzureKeyVaultSecretScopeMetadata` (optional)
-          The metadata for the secret scope if the type is `AZURE_KEYVAULT`
-        :param initial_manage_principal: str (optional)
-          The principal that is initially granted `MANAGE` permission to the created scope.
-        :param scope_backend_type: :class:`ScopeBackendType` (optional)
-          The backend type the scope will be created with. If not specified, will default to `DATABRICKS`
-        
-        
-        
+
+The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not
+exceed 128 characters.
+
+:param scope: str
+  Scope name requested by the user. Scope names are unique.
+:param backend_azure_keyvault: :class:`AzureKeyVaultSecretScopeMetadata` (optional)
+  The metadata for the secret scope if the type is `AZURE_KEYVAULT`
+:param initial_manage_principal: str (optional)
+  The principal that is initially granted `MANAGE` permission to the created scope.
+:param scope_backend_type: :class:`ScopeBackendType` (optional)
+  The backend type the scope will be created with. If not specified, will default to `DATABRICKS`
+
+
+
 
     .. py:method:: delete_acl(scope: str, principal: str)
 
         Delete an ACL.
-        
-        Deletes the given ACL on the given scope.
-        
-        Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` if no
-        such secret scope, principal, or ACL exists. Throws `PERMISSION_DENIED` if the user does not have
-        permission to make this API call.
-        
-        :param scope: str
-          The name of the scope to remove permissions from.
-        :param principal: str
-          The principal to remove an existing ACL from.
-        
-        
-        
+
+Deletes the given ACL on the given scope.
+
+Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` if no
+such secret scope, principal, or ACL exists. Throws `PERMISSION_DENIED` if the user does not have
+permission to make this API call.
+
+:param scope: str
+  The name of the scope to remove permissions from.
+:param principal: str
+  The principal to remove an existing ACL from.
+
+
+
 
     .. py:method:: delete_scope(scope: str)
 
         Delete a secret scope.
-        
-        Deletes a secret scope.
-        
-        Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if the user
-        does not have permission to make this API call.
-        
-        :param scope: str
-          Name of the scope to delete.
-        
-        
-        
+
+Deletes a secret scope.
+
+Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if the user
+does not have permission to make this API call.
+
+:param scope: str
+  Name of the scope to delete.
+
+
+
 
     .. py:method:: delete_secret(scope: str, key: str)
 
         Delete a secret.
-        
-        Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on the
-        secret scope.
-        
-        Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret exists. Throws `PERMISSION_DENIED`
-        if the user does not have permission to make this API call.
-        
-        :param scope: str
-          The name of the scope that contains the secret to delete.
-        :param key: str
-          Name of the secret to delete.
-        
-        
-        
+
+Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on the
+secret scope.
+
+Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret exists. Throws `PERMISSION_DENIED`
+if the user does not have permission to make this API call.
+
+:param scope: str
+  The name of the scope that contains the secret to delete.
+:param key: str
+  Name of the secret to delete.
+
+
+
 
     .. py:method:: get_acl(scope: str, principal: str) -> AclItem
 
         Get secret ACL details.
-        
-        Gets the details about the given ACL, such as the group and permission. Users must have the `MANAGE`
-        permission to invoke this API.
-        
-        Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the
-        user does not have permission to make this API call.
-        
-        :param scope: str
-          The name of the scope to fetch ACL information from.
-        :param principal: str
-          The principal to fetch ACL information for.
-        
-        :returns: :class:`AclItem`
-        
+
+Gets the details about the given ACL, such as the group and permission. Users must have the `MANAGE`
+permission to invoke this API.
+
+Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the
+user does not have permission to make this API call.
+
+:param scope: str
+  The name of the scope to fetch ACL information from.
+:param principal: str
+  The principal to fetch ACL information for.
+
+:returns: :class:`AclItem`
+
 
     .. py:method:: get_secret(scope: str, key: str) -> GetSecretResponse
 
         Get a secret.
-        
-        Gets the bytes representation of a secret value for the specified scope and key.
-        
-        Users need the READ permission to make this call.
-        
-        Note that the secret value returned is in bytes. The interpretation of the bytes is determined by the
-        caller in DBUtils and the type the data is decoded into.
-        
-        Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws
-        ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists.
-        
-        :param scope: str
-          The name of the scope to fetch secret information from.
-        :param key: str
-          The key to fetch secret for.
-        
-        :returns: :class:`GetSecretResponse`
-        
+
+Gets the bytes representation of a secret value for the specified scope and key.
+
+Users need the READ permission to make this call.
+
+Note that the secret value returned is in bytes. The interpretation of the bytes is determined by the
+caller in DBUtils and the type the data is decoded into.
+
+Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws
+``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists.
+
+:param scope: str
+  The name of the scope to fetch secret information from.
+:param key: str
+  The key to fetch secret for.
+
+:returns: :class:`GetSecretResponse`
+
 
     .. py:method:: list_acls(scope: str) -> Iterator[AclItem]
 
@@ -171,17 +171,17 @@
             w.secrets.delete_scope(scope=scope_name)
 
         Lists ACLs.
-        
-        List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this API.
-        
-        Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the
-        user does not have permission to make this API call.
-        
-        :param scope: str
-          The name of the scope to fetch ACL information from.
-        
-        :returns: Iterator over :class:`AclItem`
-        
+
+List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this API.
+
+Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the
+user does not have permission to make this API call.
+
+:param scope: str
+  The name of the scope to fetch ACL information from.
+
+:returns: Iterator over :class:`AclItem`
+
 
     .. py:method:: list_scopes() -> Iterator[SecretScope]
 
@@ -197,13 +197,13 @@
             scopes = w.secrets.list_scopes()
 
         List all scopes.
-        
-        Lists all secret scopes available in the workspace.
-        
-        Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.
-        
-        :returns: Iterator over :class:`SecretScope`
-        
+
+Lists all secret scopes available in the workspace.
+
+Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.
+
+:returns: Iterator over :class:`SecretScope`
+
 
     .. py:method:: list_secrets(scope: str) -> Iterator[SecretMetadata]
 
@@ -231,19 +231,19 @@
             w.secrets.delete_scope(scope=scope_name)
 
         List secret keys.
-        
-        Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data
-        cannot be retrieved using this API. Users need the READ permission to make this call.
-        
-        The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws `RESOURCE_DOES_NOT_EXIST` if
-        no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make
-        this API call.
-        
-        :param scope: str
-          The name of the scope to list secrets within.
-        
-        :returns: Iterator over :class:`SecretMetadata`
-        
+
+Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data
+cannot be retrieved using this API. Users need the READ permission to make this call.
+
+The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws `RESOURCE_DOES_NOT_EXIST` if
+no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make
+this API call.
+
+:param scope: str
+  The name of the scope to list secrets within.
+
+:returns: Iterator over :class:`SecretMetadata`
+
 
     .. py:method:: put_acl(scope: str, principal: str, permission: AclPermission)
 
@@ -275,41 +275,41 @@
             w.secrets.delete_scope(scope=scope_name)
 
         Create/update an ACL.
-        
-        Creates or overwrites the Access Control List (ACL) associated with the given principal (user or
-        group) on the specified scope point.
-        
-        In general, a user or group will use the most powerful permission available to them, and permissions
-        are ordered as follows:
-        
-        * `MANAGE` - Allowed to change ACLs, and read and write to this secret scope. * `WRITE` - Allowed to
-        read and write to this secret scope. * `READ` - Allowed to read this secret scope and list what
-        secrets are available.
-        
-        Note that in general, secret values can only be read from within a command on a cluster (for example,
-        through a notebook). There is no API to read the actual secret value material outside of a cluster.
-        However, the user's permission will be applied based on who is executing the command, and they must
-        have at least READ permission.
-        
-        Users must have the `MANAGE` permission to invoke this API.
-        
-        The principal is a user or group name corresponding to an existing Databricks principal to be granted
-        or revoked access.
-        
-        Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_ALREADY_EXISTS` if a
-        permission for the principal already exists. Throws `INVALID_PARAMETER_VALUE` if the permission or
-        principal is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this API
-        call.
-        
-        :param scope: str
-          The name of the scope to apply permissions to.
-        :param principal: str
-          The principal in which the permission is applied.
-        :param permission: :class:`AclPermission`
-          The permission level applied to the principal.
-        
-        
-        
+
+Creates or overwrites the Access Control List (ACL) associated with the given principal (user or
+group) on the specified scope point.
+
+In general, a user or group will use the most powerful permission available to them, and permissions
+are ordered as follows:
+
+* `MANAGE` - Allowed to change ACLs, and read and write to this secret scope. * `WRITE` - Allowed to
+read and write to this secret scope. * `READ` - Allowed to read this secret scope and list what
+secrets are available.
+
+Note that in general, secret values can only be read from within a command on a cluster (for example,
+through a notebook). There is no API to read the actual secret value material outside of a cluster.
+However, the user's permission will be applied based on who is executing the command, and they must
+have at least READ permission.
+
+Users must have the `MANAGE` permission to invoke this API.
+
+The principal is a user or group name corresponding to an existing Databricks principal to be granted
+or revoked access.
+
+Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_ALREADY_EXISTS` if a
+permission for the principal already exists. Throws `INVALID_PARAMETER_VALUE` if the permission or
+principal is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this API
+call.
+
+:param scope: str
+  The name of the scope to apply permissions to.
+:param principal: str
+  The principal in which the permission is applied.
+:param permission: :class:`AclPermission`
+  The permission level applied to the principal.
+
+
+
 
     .. py:method:: put_secret(scope: str, key: str [, bytes_value: Optional[str], string_value: Optional[str]])
 
@@ -337,31 +337,30 @@
             w.secrets.delete_scope(scope=scope_name)
 
         Add a secret.
-        
-        Inserts a secret under the provided scope with the given name. If a secret already exists with the
-        same name, this command overwrites the existing secret's value. The server encrypts the secret using
-        the secret scope's encryption settings before storing it.
-        
-        You must have `WRITE` or `MANAGE` permission on the secret scope. The secret key must consist of
-        alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128 characters. The
-        maximum allowed secret value size is 128 KB. The maximum number of secrets in a given scope is 1000.
-        
-        The input fields "string_value" or "bytes_value" specify the type of the secret, which will determine
-        the value returned when the secret value is requested. Exactly one must be specified.
-        
-        Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_LIMIT_EXCEEDED` if
-        maximum number of secrets in scope is exceeded. Throws `INVALID_PARAMETER_VALUE` if the key name or
-        value length is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this
-        API call.
-        
-        :param scope: str
-          The name of the scope to which the secret will be associated with.
-        :param key: str
-          A unique name to identify the secret.
-        :param bytes_value: str (optional)
-          If specified, value will be stored as bytes.
-        :param string_value: str (optional)
-          If specified, note that the value will be stored in UTF-8 (MB4) form.
-        
-        
-        
\ No newline at end of file
+
+Inserts a secret under the provided scope with the given name. If a secret already exists with the
+same name, this command overwrites the existing secret's value. The server encrypts the secret using
+the secret scope's encryption settings before storing it.
+
+You must have `WRITE` or `MANAGE` permission on the secret scope. The secret key must consist of
+alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128 characters. The
+maximum allowed secret value size is 128 KB. The maximum number of secrets in a given scope is 1000.
+
+The input fields "string_value" or "bytes_value" specify the type of the secret, which will determine
+the value returned when the secret value is requested. Exactly one must be specified.
+
+Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_LIMIT_EXCEEDED` if
+maximum number of secrets in scope is exceeded. Throws `INVALID_PARAMETER_VALUE` if the key name or
+value length is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this
+API call.
+
+:param scope: str
+  The name of the scope to which the secret will be associated with.
+:param key: str
+  A unique name to identify the secret.
+:param bytes_value: str (optional)
+  If specified, value will be stored as bytes.
+:param string_value: str (optional)
+  If specified, note that the value will be stored in UTF-8 (MB4) form.
+
+
diff --git a/docs/workspace/workspace/workspace.rst b/docs/workspace/workspace/workspace.rst
index 595872deb..5c7516cb8 100644
--- a/docs/workspace/workspace/workspace.rst
+++ b/docs/workspace/workspace/workspace.rst
@@ -5,30 +5,30 @@
 .. py:class:: WorkspaceExt
 
     The Workspace API allows you to list, import, export, and delete notebooks and folders.
-    
-    A notebook is a web-based interface to a document that contains runnable code, visualizations, and
-    explanatory text.
+
+A notebook is a web-based interface to a document that contains runnable code, visualizations, and
+explanatory text.
 
     .. py:method:: delete(path: str [, recursive: Optional[bool]])
 
         Delete a workspace object.
-        
-        Deletes an object or a directory (and optionally recursively deletes all objects in the directory). *
-        If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * If `path` is a
-        non-empty directory and `recursive` is set to `false`, this call returns an error
-        `DIRECTORY_NOT_EMPTY`.
-        
-        Object deletion cannot be undone and deleting a directory recursively is not atomic.
-        
-        :param path: str
-          The absolute path of the notebook or directory.
-        :param recursive: bool (optional)
-          The flag that specifies whether to delete the object recursively. It is `false` by default. Please
-          note this deleting directory is not atomic. If it fails in the middle, some of objects under this
-          directory may be deleted and cannot be undone.
-        
-        
-        
+
+Deletes an object or a directory (and optionally recursively deletes all objects in the directory). *
+If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * If `path` is a
+non-empty directory and `recursive` is set to `false`, this call returns an error
+`DIRECTORY_NOT_EMPTY`.
+
+Object deletion cannot be undone and deleting a directory recursively is not atomic.
+
+:param path: str
+  The absolute path of the notebook or directory.
+:param recursive: bool (optional)
+  The flag that specifies whether to delete the object recursively. It is `false` by default. Please
+  note this deleting directory is not atomic. If it fails in the middle, some of objects under this
+  directory may be deleted and cannot be undone.
+
+
+
 
     .. py:method:: download(path: str [, format: ExportFormat]) -> BinaryIO
 
@@ -55,15 +55,15 @@
             w.workspace.delete(py_file)
 
         
-        Downloads notebook or file from the workspace
-
-        :param path:     location of the file or notebook on workspace.
-        :param format:   By default, `ExportFormat.SOURCE`. If using `ExportFormat.AUTO` the `path`
-                         is imported or exported as either a workspace file or a notebook, depending
-                         on an analysis of the `item`’s extension and the header content provided in
-                         the request.
-        :return:         file-like `io.BinaryIO` of the `path` contents.
-        
+Downloads notebook or file from the workspace
+
+:param path:     location of the file or notebook on workspace.
+:param format:   By default, `ExportFormat.SOURCE`. If using `ExportFormat.AUTO` the `path`
+                 is imported or exported as either a workspace file or a notebook, depending
+                 on an analysis of the `item`’s extension and the header content provided in
+                 the request.
+:return:         file-like `io.BinaryIO` of the `path` contents.
+
 
     .. py:method:: export(path: str [, format: Optional[ExportFormat]]) -> ExportResponse
 
@@ -84,60 +84,60 @@
             export_response = w.workspace.export(format=workspace.ExportFormat.SOURCE, path=notebook)
 
         Export a workspace object.
-        
-        Exports an object or the contents of an entire directory.
-        
-        If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`.
-        
-        If the exported data would exceed size limit, this call returns `MAX_NOTEBOOK_SIZE_EXCEEDED`.
-        Currently, this API does not support exporting a library.
-        
-        :param path: str
-          The absolute path of the object or directory. Exporting a directory is only supported for the `DBC`,
-          `SOURCE`, and `AUTO` format.
-        :param format: :class:`ExportFormat` (optional)
-          This specifies the format of the exported file. By default, this is `SOURCE`.
-          
-          The value is case sensitive.
-          
-          - `SOURCE`: The notebook is exported as source code. Directory exports will not include non-notebook
-          entries. - `HTML`: The notebook is exported as an HTML file. - `JUPYTER`: The notebook is exported
-          as a Jupyter/IPython Notebook file. - `DBC`: The notebook is exported in Databricks archive format.
-          Directory exports will not include non-notebook entries. - `R_MARKDOWN`: The notebook is exported to
-          R Markdown format. - `AUTO`: The object or directory is exported depending on the objects type.
-          Directory exports will include notebooks and workspace files.
-        
-        :returns: :class:`ExportResponse`
-        
+
+Exports an object or the contents of an entire directory.
+
+If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`.
+
+If the exported data would exceed size limit, this call returns `MAX_NOTEBOOK_SIZE_EXCEEDED`.
+Currently, this API does not support exporting a library.
+
+:param path: str
+  The absolute path of the object or directory. Exporting a directory is only supported for the `DBC`,
+  `SOURCE`, and `AUTO` format.
+:param format: :class:`ExportFormat` (optional)
+  This specifies the format of the exported file. By default, this is `SOURCE`.
+  
+  The value is case sensitive.
+  
+  - `SOURCE`: The notebook is exported as source code. Directory exports will not include non-notebook
+  entries. - `HTML`: The notebook is exported as an HTML file. - `JUPYTER`: The notebook is exported
+  as a Jupyter/IPython Notebook file. - `DBC`: The notebook is exported in Databricks archive format.
+  Directory exports will not include non-notebook entries. - `R_MARKDOWN`: The notebook is exported to
+  R Markdown format. - `AUTO`: The object or directory is exported depending on the objects type.
+  Directory exports will include notebooks and workspace files.
+
+:returns: :class:`ExportResponse`
+
 
     .. py:method:: get_permission_levels(workspace_object_type: str, workspace_object_id: str) -> GetWorkspaceObjectPermissionLevelsResponse
 
         Get workspace object permission levels.
-        
-        Gets the permission levels that a user can have on an object.
-        
-        :param workspace_object_type: str
-          The workspace object type for which to get or manage permissions.
-        :param workspace_object_id: str
-          The workspace object for which to get or manage permissions.
-        
-        :returns: :class:`GetWorkspaceObjectPermissionLevelsResponse`
-        
+
+Gets the permission levels that a user can have on an object.
+
+:param workspace_object_type: str
+  The workspace object type for which to get or manage permissions.
+:param workspace_object_id: str
+  The workspace object for which to get or manage permissions.
+
+:returns: :class:`GetWorkspaceObjectPermissionLevelsResponse`
+
 
     .. py:method:: get_permissions(workspace_object_type: str, workspace_object_id: str) -> WorkspaceObjectPermissions
 
         Get workspace object permissions.
-        
-        Gets the permissions of a workspace object. Workspace objects can inherit permissions from their
-        parent objects or root object.
-        
-        :param workspace_object_type: str
-          The workspace object type for which to get or manage permissions.
-        :param workspace_object_id: str
-          The workspace object for which to get or manage permissions.
-        
-        :returns: :class:`WorkspaceObjectPermissions`
-        
+
+Gets the permissions of a workspace object. Workspace objects can inherit permissions from their
+parent objects or root object.
+
+:param workspace_object_type: str
+  The workspace object type for which to get or manage permissions.
+:param workspace_object_id: str
+  The workspace object for which to get or manage permissions.
+
+:returns: :class:`WorkspaceObjectPermissions`
+
 
     .. py:method:: get_status(path: str) -> ObjectInfo
 
@@ -157,15 +157,15 @@
             obj = w.workspace.get_status(path=notebook_path)
 
         Get status.
-        
-        Gets the status of an object or a directory. If `path` does not exist, this call returns an error
-        `RESOURCE_DOES_NOT_EXIST`.
-        
-        :param path: str
-          The absolute path of the notebook or directory.
-        
-        :returns: :class:`ObjectInfo`
-        
+
+Gets the status of an object or a directory. If `path` does not exist, this call returns an error
+`RESOURCE_DOES_NOT_EXIST`.
+
+:param path: str
+  The absolute path of the notebook or directory.
+
+:returns: :class:`ObjectInfo`
+
 
     .. py:method:: import_(path: str [, content: Optional[str], format: Optional[ImportFormat], language: Optional[Language], overwrite: Optional[bool]])
 
@@ -191,40 +191,40 @@
                                 path=notebook_path)
 
         Import a workspace object.
-        
-        Imports a workspace object (for example, a notebook or file) or the contents of an entire directory.
-        If `path` already exists and `overwrite` is set to `false`, this call returns an error
-        `RESOURCE_ALREADY_EXISTS`. To import a directory, you can use either the `DBC` format or the `SOURCE`
-        format with the `language` field unset. To import a single file as `SOURCE`, you must set the
-        `language` field.
-        
-        :param path: str
-          The absolute path of the object or directory. Importing a directory is only supported for the `DBC`
-          and `SOURCE` formats.
-        :param content: str (optional)
-          The base64-encoded content. This has a limit of 10 MB.
-          
-          If the limit (10MB) is exceeded, exception with error code **MAX_NOTEBOOK_SIZE_EXCEEDED** is thrown.
-          This parameter might be absent, and instead a posted file is used.
-        :param format: :class:`ImportFormat` (optional)
-          This specifies the format of the file to be imported.
-          
-          The value is case sensitive.
-          
-          - `AUTO`: The item is imported depending on an analysis of the item's extension and the header
-          content provided in the request. If the item is imported as a notebook, then the item's extension is
-          automatically removed. - `SOURCE`: The notebook or directory is imported as source code. - `HTML`:
-          The notebook is imported as an HTML file. - `JUPYTER`: The notebook is imported as a Jupyter/IPython
-          Notebook file. - `DBC`: The notebook is imported in Databricks archive format. Required for
-          directories. - `R_MARKDOWN`: The notebook is imported from R Markdown format.
-        :param language: :class:`Language` (optional)
-          The language of the object. This value is set only if the object type is `NOTEBOOK`.
-        :param overwrite: bool (optional)
-          The flag that specifies whether to overwrite existing object. It is `false` by default. For `DBC`
-          format, `overwrite` is not supported since it may contain a directory.
-        
-        
-        
+
+Imports a workspace object (for example, a notebook or file) or the contents of an entire directory.
+If `path` already exists and `overwrite` is set to `false`, this call returns an error
+`RESOURCE_ALREADY_EXISTS`. To import a directory, you can use either the `DBC` format or the `SOURCE`
+format with the `language` field unset. To import a single file as `SOURCE`, you must set the
+`language` field.
+
+:param path: str
+  The absolute path of the object or directory. Importing a directory is only supported for the `DBC`
+  and `SOURCE` formats.
+:param content: str (optional)
+  The base64-encoded content. This has a limit of 10 MB.
+  
+  If the limit (10MB) is exceeded, exception with error code **MAX_NOTEBOOK_SIZE_EXCEEDED** is thrown.
+  This parameter might be absent, and instead a posted file is used.
+:param format: :class:`ImportFormat` (optional)
+  This specifies the format of the file to be imported.
+  
+  The value is case sensitive.
+  
+  - `AUTO`: The item is imported depending on an analysis of the item's extension and the header
+  content provided in the request. If the item is imported as a notebook, then the item's extension is
+  automatically removed. - `SOURCE`: The notebook or directory is imported as source code. - `HTML`:
+  The notebook is imported as an HTML file. - `JUPYTER`: The notebook is imported as a Jupyter/IPython
+  Notebook file. - `DBC`: The notebook is imported in Databricks archive format. Required for
+  directories. - `R_MARKDOWN`: The notebook is imported from R Markdown format.
+:param language: :class:`Language` (optional)
+  The language of the object. This value is set only if the object type is `NOTEBOOK`.
+:param overwrite: bool (optional)
+  The flag that specifies whether to overwrite existing object. It is `false` by default. For `DBC`
+  format, `overwrite` is not supported since it may contain a directory.
+
+
+
 
     .. py:method:: list(path: str [, notebooks_modified_after: int, recursive: bool = False]) -> ObjectInfo
 
@@ -244,62 +244,62 @@
 
         List workspace objects
 
-        :param recursive: bool
-            Optionally invoke recursive traversal
+:param recursive: bool
+    Optionally invoke recursive traversal
+
+:returns: Iterator of workspaceObjectInfo
 
-        :returns: Iterator of workspaceObjectInfo
-        
 
     .. py:method:: mkdirs(path: str)
 
         Create a directory.
-        
-        Creates the specified directory (and necessary parent directories if they do not exist). If there is
-        an object (not a directory) at any prefix of the input path, this call returns an error
-        `RESOURCE_ALREADY_EXISTS`.
-        
-        Note that if this operation fails it may have succeeded in creating some of the necessary parent
-        directories.
-        
-        :param path: str
-          The absolute path of the directory. If the parent directories do not exist, it will also create
-          them. If the directory already exists, this command will do nothing and succeed.
-        
-        
-        
+
+Creates the specified directory (and necessary parent directories if they do not exist). If there is
+an object (not a directory) at any prefix of the input path, this call returns an error
+`RESOURCE_ALREADY_EXISTS`.
+
+Note that if this operation fails it may have succeeded in creating some of the necessary parent
+directories.
+
+:param path: str
+  The absolute path of the directory. If the parent directories do not exist, it will also create
+  them. If the directory already exists, this command will do nothing and succeed.
+
+
+
 
     .. py:method:: set_permissions(workspace_object_type: str, workspace_object_id: str [, access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]]]) -> WorkspaceObjectPermissions
 
         Set workspace object permissions.
-        
-        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-        permissions if none are specified. Objects can inherit permissions from their parent objects or root
-        object.
-        
-        :param workspace_object_type: str
-          The workspace object type for which to get or manage permissions.
-        :param workspace_object_id: str
-          The workspace object for which to get or manage permissions.
-        :param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional)
-        
-        :returns: :class:`WorkspaceObjectPermissions`
-        
+
+Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+permissions if none are specified. Objects can inherit permissions from their parent objects or root
+object.
+
+:param workspace_object_type: str
+  The workspace object type for which to get or manage permissions.
+:param workspace_object_id: str
+  The workspace object for which to get or manage permissions.
+:param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional)
+
+:returns: :class:`WorkspaceObjectPermissions`
+
 
     .. py:method:: update_permissions(workspace_object_type: str, workspace_object_id: str [, access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]]]) -> WorkspaceObjectPermissions
 
         Update workspace object permissions.
-        
-        Updates the permissions on a workspace object. Workspace objects can inherit permissions from their
-        parent objects or root object.
-        
-        :param workspace_object_type: str
-          The workspace object type for which to get or manage permissions.
-        :param workspace_object_id: str
-          The workspace object for which to get or manage permissions.
-        :param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional)
-        
-        :returns: :class:`WorkspaceObjectPermissions`
-        
+
+Updates the permissions on a workspace object. Workspace objects can inherit permissions from their
+parent objects or root object.
+
+:param workspace_object_type: str
+  The workspace object type for which to get or manage permissions.
+:param workspace_object_id: str
+  The workspace object for which to get or manage permissions.
+:param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional)
+
+:returns: :class:`WorkspaceObjectPermissions`
+
 
     .. py:method:: upload(path: str, content: bytes [, format: ImportFormat, language: Language, overwrite: bool = False])
 
@@ -325,19 +325,18 @@
             w.workspace.delete(notebook)
 
         
-        Uploads a workspace object (for example, a notebook or file) or the contents of an entire
-        directory (`DBC` format).
-
-        Errors:
-         * `RESOURCE_ALREADY_EXISTS`: if `path` already exists no `overwrite=True`.
-         * `INVALID_PARAMETER_VALUE`: if `format` and `content` values are not compatible.
-
-        :param path:     target location of the file on workspace.
-        :param content:  the contents as either raw binary data `bytes` or a file-like the file-like `io.BinaryIO` of the `path` contents.
-        :param format:   By default, `ImportFormat.SOURCE`. If using `ImportFormat.AUTO` the `path`
-                         is imported or exported as either a workspace file or a notebook, depending
-                         on an analysis of the `item`’s extension and the header content provided in
-                         the request. In addition, if the `path` is imported as a notebook, then
-                         the `item`’s extension is automatically removed.
-        :param language: Only required if using `ExportFormat.SOURCE`.
-        
\ No newline at end of file
+Uploads a workspace object (for example, a notebook or file) or the contents of an entire
+directory (`DBC` format).
+
+Errors:
+ * `RESOURCE_ALREADY_EXISTS`: if `path` already exists no `overwrite=True`.
+ * `INVALID_PARAMETER_VALUE`: if `format` and `content` values are not compatible.
+
+:param path:     target location of the file on workspace.
+:param content:  the contents as either raw binary data `bytes` or a file-like the file-like `io.BinaryIO` of the `path` contents.
+:param format:   By default, `ImportFormat.SOURCE`. If using `ImportFormat.AUTO` the `path`
+                 is imported or exported as either a workspace file or a notebook, depending
+                 on an analysis of the `item`’s extension and the header content provided in
+                 the request. In addition, if the `path` is imported as a notebook, then
+                 the `item`’s extension is automatically removed.
+:param language: Only required if using `ExportFormat.SOURCE`.
diff --git a/tests/integration/test_clusters.py b/tests/integration/test_clusters.py
index 930989943..f3a9c6c89 100644
--- a/tests/integration/test_clusters.py
+++ b/tests/integration/test_clusters.py
@@ -41,7 +41,7 @@ def test_create_cluster(w, env_or_skip, random):
 
 def test_error_unmarshall(w, random):
     with pytest.raises(DatabricksError) as exc_info:
-        w.clusters.get('__non_existing__')
+        w.clusters.get('123__non_existing__')
     err = exc_info.value
-    assert 'Cluster __non_existing__ does not exist' in str(err)
+    assert 'Cluster 123__non_existing__ does not exist' in str(err)
     assert 'INVALID_PARAMETER_VALUE' == err.error_code

From 41f5f4bc98afff43d6176302788e31a5b619343c Mon Sep 17 00:00:00 2001
From: Aravind Segu 
Date: Thu, 13 Feb 2025 00:26:17 -0800
Subject: [PATCH 097/136] [Feature] Introduce new Credential Strategies for
 Agents (#882)

## What changes are proposed in this pull request?

This PR introduces two new credential strategies for Agents,
(AgentEmbeddedCredentials, AgentUserCredentials).

Agents currently use the databricks.sdk in order to interact with
databricks resources. However the authentication method for these
resources is a little unique where we store the token for the
authentication in a Credential File on the Kubernetes Container.
Therefore in the past we added the Model Serving Credential Strategy to
the defaultCredentials list to read this file.

Now we want to introduce a new authentication where the user's token is
instead stored in a thread local variable. Agent users will initialize
clients as follows:

```
from databricks.sdk.credentials_provider import ModelServingUserCredentials

invokers_client = WorkspaceClient(credential_strategy = ModelServingUserCredentials())
definers_client = WorkspaceClient()

```

Then the users can use the invoker_client to interact with resources
with the invokers token or the definers_client to interact with
resources using the old method of authentication.

Additionally as the users will be using these clients to test their code
locally in Databricks Notebooks, if the code is not being run on model
serving environments, users need to be able to authenticate using the
DefaultCredential strategies.

More details:
https://docs.google.com/document/d/14qLVjyxIAk581w287TWElstIeh8-DR30ab9Z6B_Vydg/edit?usp=sharing

## How is this tested?

Added unit tests

---------

Signed-off-by: aravind-segu 
---
 databricks/sdk/credentials_provider.py | 87 +++++++++++++++++++++-----
 tests/test_model_serving_auth.py       | 50 ++++++++++++++-
 2 files changed, 119 insertions(+), 18 deletions(-)

diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py
index 9a64a4fc3..24d01f678 100644
--- a/databricks/sdk/credentials_provider.py
+++ b/databricks/sdk/credentials_provider.py
@@ -9,6 +9,7 @@
 import platform
 import subprocess
 import sys
+import threading
 import time
 from datetime import datetime
 from typing import Callable, Dict, List, Optional, Tuple, Union
@@ -723,14 +724,17 @@ def inner() -> Dict[str, str]:
 # This Code is derived from Mlflow DatabricksModelServingConfigProvider
 # https://github.com/mlflow/mlflow/blob/1219e3ef1aac7d337a618a352cd859b336cf5c81/mlflow/legacy_databricks_cli/configure/provider.py#L332
 class ModelServingAuthProvider():
+    USER_CREDENTIALS = "user_credentials"
+
     _MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH = "/var/credentials-secret/model-dependencies-oauth-token"
 
-    def __init__(self):
+    def __init__(self, credential_type: Optional[str]):
         self.expiry_time = -1
         self.current_token = None
         self.refresh_duration = 300 # 300 Seconds
+        self.credential_type = credential_type
 
-    def should_fetch_model_serving_environment_oauth(self) -> bool:
+    def should_fetch_model_serving_environment_oauth() -> bool:
         """
         Check whether this is the model serving environment
         Additionally check if the oauth token file path exists
@@ -739,15 +743,15 @@ def should_fetch_model_serving_environment_oauth(self) -> bool:
         is_in_model_serving_env = (os.environ.get("IS_IN_DB_MODEL_SERVING_ENV")
                                    or os.environ.get("IS_IN_DATABRICKS_MODEL_SERVING_ENV") or "false")
         return (is_in_model_serving_env == "true"
-                and os.path.isfile(self._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH))
+                and os.path.isfile(ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH))
 
-    def get_model_dependency_oauth_token(self, should_retry=True) -> str:
+    def _get_model_dependency_oauth_token(self, should_retry=True) -> str:
         # Use Cached value if it is valid
         if self.current_token is not None and self.expiry_time > time.time():
             return self.current_token
 
         try:
-            with open(self._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH) as f:
+            with open(ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH) as f:
                 oauth_dict = json.load(f)
                 self.current_token = oauth_dict["OAUTH_TOKEN"][0]["oauthTokenValue"]
                 self.expiry_time = time.time() + self.refresh_duration
@@ -757,32 +761,43 @@ def get_model_dependency_oauth_token(self, should_retry=True) -> str:
                 logger.warning("Unable to read oauth token on first attmept in Model Serving Environment",
                                exc_info=e)
                 time.sleep(0.5)
-                return self.get_model_dependency_oauth_token(should_retry=False)
+                return self._get_model_dependency_oauth_token(should_retry=False)
             else:
                 raise RuntimeError(
                     "Unable to read OAuth credentials from the file mounted in Databricks Model Serving"
                 ) from e
         return self.current_token
 
+    def _get_invokers_token(self):
+        current_thread = threading.current_thread()
+        thread_data = current_thread.__dict__
+        invokers_token = None
+        if "invokers_token" in thread_data:
+            invokers_token = thread_data["invokers_token"]
+
+        if invokers_token is None:
+            raise RuntimeError("Unable to read Invokers Token in Databricks Model Serving")
+
+        return invokers_token
+
     def get_databricks_host_token(self) -> Optional[Tuple[str, str]]:
-        if not self.should_fetch_model_serving_environment_oauth():
+        if not ModelServingAuthProvider.should_fetch_model_serving_environment_oauth():
             return None
 
         # read from DB_MODEL_SERVING_HOST_ENV_VAR if available otherwise MODEL_SERVING_HOST_ENV_VAR
         host = os.environ.get("DATABRICKS_MODEL_SERVING_HOST_URL") or os.environ.get(
             "DB_MODEL_SERVING_HOST_URL")
-        token = self.get_model_dependency_oauth_token()
 
-        return (host, token)
+        if self.credential_type == ModelServingAuthProvider.USER_CREDENTIALS:
+            return (host, self._get_invokers_token())
+        else:
+            return (host, self._get_model_dependency_oauth_token())
 
 
-@credentials_strategy('model-serving', [])
-def model_serving_auth(cfg: 'Config') -> Optional[CredentialsProvider]:
+def model_serving_auth_visitor(cfg: 'Config',
+                               credential_type: Optional[str] = None) -> Optional[CredentialsProvider]:
     try:
-        model_serving_auth_provider = ModelServingAuthProvider()
-        if not model_serving_auth_provider.should_fetch_model_serving_environment_oauth():
-            logger.debug("model-serving: Not in Databricks Model Serving, skipping")
-            return None
+        model_serving_auth_provider = ModelServingAuthProvider(credential_type)
         host, token = model_serving_auth_provider.get_databricks_host_token()
         if token is None:
             raise ValueError(
@@ -793,7 +808,6 @@ def model_serving_auth(cfg: 'Config') -> Optional[CredentialsProvider]:
     except Exception as e:
         logger.warning("Unable to get auth from Databricks Model Serving Environment", exc_info=e)
         return None
-
     logger.info("Using Databricks Model Serving Authentication")
 
     def inner() -> Dict[str, str]:
@@ -804,6 +818,15 @@ def inner() -> Dict[str, str]:
     return inner
 
 
+@credentials_strategy('model-serving', [])
+def model_serving_auth(cfg: 'Config') -> Optional[CredentialsProvider]:
+    if not ModelServingAuthProvider.should_fetch_model_serving_environment_oauth():
+        logger.debug("model-serving: Not in Databricks Model Serving, skipping")
+        return None
+
+    return model_serving_auth_visitor(cfg)
+
+
 class DefaultCredentials:
     """ Select the first applicable credential provider from the chain """
 
@@ -846,3 +869,35 @@ def __call__(self, cfg: 'Config') -> CredentialsProvider:
         raise ValueError(
             f'cannot configure default credentials, please check {auth_flow_url} to configure credentials for your preferred authentication method.'
         )
+
+
+class ModelServingUserCredentials(CredentialsStrategy):
+    """
+    This credential strategy is designed for authenticating the Databricks SDK in the model serving environment using user-specific rights. 
+    In the model serving environment, the strategy retrieves a downscoped user token from the thread-local variable. 
+    In any other environments, the class defaults to the DefaultCredentialStrategy. 
+    To use this credential strategy, instantiate the WorkspaceClient with the ModelServingUserCredentials strategy as follows:
+
+    invokers_client = WorkspaceClient(credential_strategy = ModelServingUserCredentials())
+    """
+
+    def __init__(self):
+        self.credential_type = ModelServingAuthProvider.USER_CREDENTIALS
+        self.default_credentials = DefaultCredentials()
+
+    def auth_type(self):
+        if ModelServingAuthProvider.should_fetch_model_serving_environment_oauth():
+            return "model_serving_" + self.credential_type
+        else:
+            return self.default_credentials.auth_type()
+
+    def __call__(self, cfg: 'Config') -> CredentialsProvider:
+        if ModelServingAuthProvider.should_fetch_model_serving_environment_oauth():
+            header_factory = model_serving_auth_visitor(cfg, self.credential_type)
+            if not header_factory:
+                raise ValueError(
+                    f"Unable to authenticate using {self.credential_type} in Databricks Model Serving Environment"
+                )
+            return header_factory
+        else:
+            return self.default_credentials(cfg)
diff --git a/tests/test_model_serving_auth.py b/tests/test_model_serving_auth.py
index 13f55668c..49aed33a5 100644
--- a/tests/test_model_serving_auth.py
+++ b/tests/test_model_serving_auth.py
@@ -1,8 +1,10 @@
+import threading
 import time
 
 import pytest
 
 from databricks.sdk.core import Config
+from databricks.sdk.credentials_provider import ModelServingUserCredentials
 
 from .conftest import raises
 
@@ -39,7 +41,6 @@ def test_model_serving_auth(env_values, del_env_values, oauth_file_name, monkeyp
     mocker.patch('databricks.sdk.config.Config._known_file_config_loader')
 
     cfg = Config()
-
     assert cfg.auth_type == 'model-serving'
     headers = cfg.authenticate()
     assert (cfg.host == 'x')
@@ -93,7 +94,6 @@ def test_model_serving_auth_refresh(monkeypatch, mocker):
     assert (cfg.host == 'x')
     assert headers.get(
         "Authorization") == 'Bearer databricks_sdk_unit_test_token' # Token defined in the test file
-
     # Simulate refreshing the token by patching to to a new file
     monkeypatch.setattr(
         "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH",
@@ -113,3 +113,49 @@ def test_model_serving_auth_refresh(monkeypatch, mocker):
     assert (cfg.host == 'x')
     # Read V2 now
     assert headers.get("Authorization") == 'Bearer databricks_sdk_unit_test_token_v2'
+
+
+def test_agent_user_credentials(monkeypatch, mocker):
+    monkeypatch.setenv('IS_IN_DB_MODEL_SERVING_ENV', 'true')
+    monkeypatch.setenv('DB_MODEL_SERVING_HOST_URL', 'x')
+    monkeypatch.setattr(
+        "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH",
+        "tests/testdata/model-serving-test-token")
+
+    invokers_token_val = "databricks_invokers_token"
+    current_thread = threading.current_thread()
+    thread_data = current_thread.__dict__
+    thread_data["invokers_token"] = invokers_token_val
+
+    cfg = Config(credentials_strategy=ModelServingUserCredentials())
+    assert cfg.auth_type == 'model_serving_user_credentials'
+
+    headers = cfg.authenticate()
+
+    assert (cfg.host == 'x')
+    assert headers.get("Authorization") == f'Bearer {invokers_token_val}'
+
+    # Test updates of invokers token
+    invokers_token_val = "databricks_invokers_token_v2"
+    current_thread = threading.current_thread()
+    thread_data = current_thread.__dict__
+    thread_data["invokers_token"] = invokers_token_val
+
+    headers = cfg.authenticate()
+    assert (cfg.host == 'x')
+    assert headers.get("Authorization") == f'Bearer {invokers_token_val}'
+
+
+# If this credential strategy is being used in a non model serving environments then use default credential strategy instead
+def test_agent_user_credentials_in_non_model_serving_environments(monkeypatch):
+
+    monkeypatch.setenv('DATABRICKS_HOST', 'x')
+    monkeypatch.setenv('DATABRICKS_TOKEN', 'token')
+
+    cfg = Config(credentials_strategy=ModelServingUserCredentials())
+    assert cfg.auth_type == 'pat' # Auth type is PAT as it is no longer in a model serving environment
+
+    headers = cfg.authenticate()
+
+    assert (cfg.host == 'https://x')
+    assert headers.get("Authorization") == f'Bearer token'

From 75bcfd3e978f3be338aabbf141dac1478a07014f Mon Sep 17 00:00:00 2001
From: Giorgi Kikolashvili <47174341+gkiko10@users.noreply.github.com>
Date: Thu, 13 Feb 2025 15:20:12 +0100
Subject: [PATCH 098/136] [Internal] GetRun logic paginates more arrays (#867)

## What changes are proposed in this pull request?

The existing code only paginates tasks and iterations. With this PR we
update the logic to also paginate job_clusters, job_parameters and
repair_history. This changes are needed for [Jobs API
2.2](https://docs.databricks.com/api/workspace/jobs/getrun)
compatibility.

## How is this tested?

I enabled API 2.2 calls by modifying URL string `/api/2.2/jobs/runs/get`
in databricks/sdk/service/jobs.py. Then I ran unit test from
tests/test_jobs_mixin.py
---
 databricks/sdk/mixins/jobs.py | 18 ++++++++-----
 tests/test_jobs_mixin.py      | 50 ++++++++++++++++++++++++-----------
 2 files changed, 46 insertions(+), 22 deletions(-)

diff --git a/databricks/sdk/mixins/jobs.py b/databricks/sdk/mixins/jobs.py
index 01fb013be..c38304966 100644
--- a/databricks/sdk/mixins/jobs.py
+++ b/databricks/sdk/mixins/jobs.py
@@ -11,9 +11,10 @@ def get_run(self,
                 include_history: Optional[bool] = None,
                 include_resolved_values: Optional[bool] = None,
                 page_token: Optional[str] = None) -> jobs.Run:
-        """
-        This method fetches the details of a run identified by `run_id`. If the run has multiple pages of tasks or iterations,
-        it will paginate through all pages and aggregate the results.
+        """Get a single job run.
+
+        Retrieve the metadata of a run. If a run has multiple pages of tasks, it will paginate through all pages of tasks, iterations, job_clusters, job_parameters, and repair history.
+
         :param run_id: int
           The canonical identifier of the run for which to retrieve the metadata. This field is required.
         :param include_history: bool (optional)
@@ -21,8 +22,9 @@ def get_run(self,
         :param include_resolved_values: bool (optional)
           Whether to include resolved parameter values in the response.
         :param page_token: str (optional)
-          To list the next page or the previous page of job tasks, set this field to the value of the
-          `next_page_token` or `prev_page_token` returned in the GetJob response.
+          To list the next page of job tasks, set this field to the value of the `next_page_token` returned in
+          the GetJob response.
+
         :returns: :class:`Run`
         """
         run = super().get_run(run_id,
@@ -34,6 +36,7 @@ def get_run(self,
         # When querying a ForEach task run, a page token is returned when there are more than 100 iterations. Only a single task is returned, corresponding to the ForEach task itself. Therefore, the client only reads the iterations from the next page and not the tasks.
         is_paginating_iterations = run.iterations is not None and len(run.iterations) > 0
 
+        # runs/get response includes next_page_token as long as there are more pages to fetch.
         while run.next_page_token is not None:
             next_run = super().get_run(run_id,
                                        include_history=include_history,
@@ -43,7 +46,10 @@ def get_run(self,
                 run.iterations.extend(next_run.iterations)
             else:
                 run.tasks.extend(next_run.tasks)
+            # Each new page of runs/get response includes the next page of the job_clusters, job_parameters, and repair history.
+            run.job_clusters.extend(next_run.job_clusters)
+            run.job_parameters.extend(next_run.job_parameters)
+            run.repair_history.extend(next_run.repair_history)
             run.next_page_token = next_run.next_page_token
 
-        run.prev_page_token = None
         return run
\ No newline at end of file
diff --git a/tests/test_jobs_mixin.py b/tests/test_jobs_mixin.py
index 9b5f27138..90f1c0b89 100644
--- a/tests/test_jobs_mixin.py
+++ b/tests/test_jobs_mixin.py
@@ -22,14 +22,28 @@ def test_get_run_with_no_pagination(config, requests_mock):
 
 
 def test_get_run_pagination_with_tasks(config, requests_mock):
+    from databricks.sdk.service import compute, jobs
+    cluster_spec = compute.ClusterSpec(spark_version="11.3.x-scala2.12",
+                                       custom_tags={"ResourceClass": "SingleNode"},
+                                       num_workers=0,
+                                       node_type_id="Standard_DS3_v2",
+                                       )
+    cluster1 = jobs.JobCluster(job_cluster_key="cluster1", new_cluster=cluster_spec)
+    cluster2 = jobs.JobCluster(job_cluster_key="cluster2", new_cluster=cluster_spec)
+    cluster3 = jobs.JobCluster(job_cluster_key="cluster3", new_cluster=cluster_spec)
+    cluster4 = jobs.JobCluster(job_cluster_key="cluster4", new_cluster=cluster_spec)
     run1 = {
         "tasks": [{
             "run_id": 0
         }, {
             "run_id": 1
         }],
+        "job_clusters": [cluster1.as_dict(), cluster2.as_dict(), ],
+        "job_parameters": [{
+            "name": "param1",
+            "value": "value1"
+        }],
         "next_page_token": "tokenToSecondPage",
-        "prev_page_token": "tokenToPreviousPage"
     }
     run2 = {
         "tasks": [{
@@ -37,10 +51,14 @@ def test_get_run_pagination_with_tasks(config, requests_mock):
         }, {
             "run_id": 3
         }],
+        "job_clusters": [cluster3.as_dict(), cluster4.as_dict(), ],
+        "job_parameters": [{
+            "name": "param2",
+            "value": "value2"
+        }],
         "next_page_token": "tokenToThirdPage",
-        "prev_page_token": "initialToken"
     }
-    run3 = {"tasks": [{"run_id": 4}], "next_page_token": None, "prev_page_token": "tokenToSecondPage"}
+    run3 = {"tasks": [{"run_id": 4}]}
     requests_mock.get(make_path_pattern(1337, "initialToken"), text=json.dumps(run1))
     requests_mock.get(make_path_pattern(1337, "tokenToSecondPage"), text=json.dumps(run2))
     requests_mock.get(make_path_pattern(1337, "tokenToThirdPage"), text=json.dumps(run3))
@@ -60,6 +78,17 @@ def test_get_run_pagination_with_tasks(config, requests_mock):
         }, {
             'run_id': 4
         }],
+        "job_clusters": [cluster1.as_dict(),
+                         cluster2.as_dict(),
+                         cluster3.as_dict(),
+                         cluster4.as_dict()],
+        "job_parameters": [{
+            "name": "param1",
+            "value": "value1"
+        }, {
+            "name": "param2",
+            "value": "value2"
+        }],
     }
 
 
@@ -74,7 +103,6 @@ def test_get_run_pagination_with_iterations(config, requests_mock):
             "run_id": 1
         }],
         "next_page_token": "tokenToSecondPage",
-        "prev_page_token": "tokenToPreviousPage"
     }
     run2 = {
         "tasks": [{
@@ -86,18 +114,8 @@ def test_get_run_pagination_with_iterations(config, requests_mock):
             "run_id": 3
         }],
         "next_page_token": "tokenToThirdPage",
-        "prev_page_token": "initialToken"
-    }
-    run3 = {
-        "tasks": [{
-            "run_id": 1337
-        }],
-        "iterations": [{
-            "run_id": 4
-        }],
-        "next_page_token": None,
-        "prev_page_token": "tokenToSecondPage"
     }
+    run3 = {"tasks": [{"run_id": 1337}], "iterations": [{"run_id": 4}], }
     requests_mock.get(make_path_pattern(1337, "initialToken"), text=json.dumps(run1))
     requests_mock.get(make_path_pattern(1337, "tokenToSecondPage"), text=json.dumps(run2))
     requests_mock.get(make_path_pattern(1337, "tokenToThirdPage"), text=json.dumps(run3))
@@ -120,4 +138,4 @@ def test_get_run_pagination_with_iterations(config, requests_mock):
         }, {
             'run_id': 4
         }],
-    }
\ No newline at end of file
+    }

From 1a1719a87219604f999a1d46ddca8225ddce65ea Mon Sep 17 00:00:00 2001
From: hectorcast-db 
Date: Thu, 13 Feb 2025 17:19:56 +0100
Subject: [PATCH 099/136] [Release] Release v0.44.1 (#887)

### New Features and Improvements

* Introduce new Credential Strategies for Agents
([#882](https://github.com/databricks/databricks-sdk-py/pull/882)).

### Bug Fixes

* Fix public documentation

### Internal Changes

* GetRun logic paginates more arrays
([#867](https://github.com/databricks/databricks-sdk-py/pull/867)).
---
 CHANGELOG.md                                  |   13 +
 databricks/sdk/version.py                     |    2 +-
 docs/account/billing/billable_usage.rst       |   43 +-
 docs/account/billing/budget_policy.rst        |  121 +-
 docs/account/billing/budgets.rst              |   95 +-
 docs/account/billing/log_delivery.rst         |  213 +--
 docs/account/billing/usage_dashboards.rst     |   47 +-
 .../account/catalog/metastore_assignments.rst |  101 +-
 docs/account/catalog/metastores.rst           |   79 +-
 docs/account/catalog/storage_credentials.rst  |  117 +-
 docs/account/iam/access_control.rst           |   73 +-
 docs/account/iam/groups.rst                   |  209 +--
 docs/account/iam/service_principals.rst       |  215 +--
 docs/account/iam/users.rst                    |  297 ++--
 docs/account/iam/workspace_assignment.rst     |   89 +-
 .../account/oauth2/custom_app_integration.rst |  131 +-
 docs/account/oauth2/federation_policy.rst     |  155 +--
 docs/account/oauth2/o_auth_published_apps.rst |   23 +-
 .../oauth2/published_app_integration.rst      |   91 +-
 .../service_principal_federation_policy.rst   |  175 +--
 .../oauth2/service_principal_secrets.rst      |   87 +-
 docs/account/provisioning/credentials.rst     |   89 +-
 docs/account/provisioning/encryption_keys.rst |  151 +-
 docs/account/provisioning/networks.rst        |  105 +-
 docs/account/provisioning/private_access.rst  |  255 ++--
 docs/account/provisioning/storage.rst         |   85 +-
 docs/account/provisioning/vpc_endpoints.rst   |  121 +-
 docs/account/provisioning/workspaces.rst      |  554 ++++----
 .../settings/csp_enablement_account.rst       |   71 +-
 .../settings/disable_legacy_features.rst      |   93 +-
 .../settings/enable_ip_access_lists.rst       |   87 +-
 .../settings/esm_enablement_account.rst       |   65 +-
 docs/account/settings/ip_access_lists.rst     |  239 ++--
 .../account/settings/network_connectivity.rst |  179 +--
 docs/account/settings/personal_compute.rst    |   97 +-
 docs/account/settings/settings.rst            |   36 +-
 docs/workspace/apps/apps.rst                  |  272 ++--
 .../workspace/catalog/artifact_allowlists.rst |   43 +-
 docs/workspace/catalog/catalogs.rst           |  195 +--
 docs/workspace/catalog/connections.rst        |  141 +-
 docs/workspace/catalog/credentials.rst        |  315 ++---
 docs/workspace/catalog/external_locations.rst |  225 +--
 docs/workspace/catalog/functions.rst          |  179 +--
 docs/workspace/catalog/grants.rst             |   89 +-
 docs/workspace/catalog/metastores.rst         |  255 ++--
 docs/workspace/catalog/model_versions.rst     |  203 +--
 docs/workspace/catalog/online_tables.rst      |   56 +-
 docs/workspace/catalog/quality_monitors.rst   |  431 +++---
 docs/workspace/catalog/registered_models.rst  |  331 ++---
 docs/workspace/catalog/resource_quotas.rst    |   59 +-
 docs/workspace/catalog/schemas.rst            |  169 +--
 .../workspace/catalog/storage_credentials.rst |  265 ++--
 docs/workspace/catalog/system_schemas.rst     |   77 +-
 docs/workspace/catalog/table_constraints.rst  |   93 +-
 docs/workspace/catalog/tables.rst             |  255 ++--
 .../catalog/temporary_table_credentials.rst   |   49 +-
 docs/workspace/catalog/volumes.rst            |  237 ++--
 docs/workspace/catalog/workspace_bindings.rst |  135 +-
 .../cleanrooms/clean_room_assets.rst          |  133 +-
 .../cleanrooms/clean_room_task_runs.rst       |   27 +-
 docs/workspace/cleanrooms/clean_rooms.rst     |  127 +-
 docs/workspace/compute/cluster_policies.rst   |  307 ++---
 docs/workspace/compute/clusters.rst           | 1216 ++++++++---------
 docs/workspace/compute/command_execution.rst  |  142 +-
 .../workspace/compute/global_init_scripts.rst |  151 +-
 docs/workspace/compute/instance_pools.rst     |  311 ++---
 docs/workspace/compute/instance_profiles.rst  |  163 +--
 docs/workspace/compute/libraries.rst          |  103 +-
 .../policy_compliance_for_clusters.rst        |  103 +-
 docs/workspace/compute/policy_families.rst    |   57 +-
 docs/workspace/dashboards/genie.rst           |  164 +--
 docs/workspace/dashboards/lakeview.rst        |  363 ++---
 .../dashboards/lakeview_embedded.rst          |   15 +-
 docs/workspace/dashboards/query_execution.rst |   53 +-
 docs/workspace/files/dbfs.rst                 |  220 +--
 docs/workspace/files/files.rst                |  231 ++--
 docs/workspace/iam/access_control.rst         |   23 +-
 .../iam/account_access_control_proxy.rst      |   73 +-
 docs/workspace/iam/current_user.rst           |    9 +-
 docs/workspace/iam/groups.rst                 |  209 +--
 docs/workspace/iam/permission_migration.rst   |   23 +-
 docs/workspace/iam/permissions.rst            |  199 +--
 docs/workspace/iam/service_principals.rst     |  215 +--
 docs/workspace/iam/users.rst                  |  347 ++---
 docs/workspace/jobs/jobs.rst                  | 1154 ++++++++--------
 .../jobs/policy_compliance_for_jobs.rst       |   93 +-
 .../marketplace/consumer_fulfillments.rst     |   41 +-
 .../marketplace/consumer_installations.rst    |  101 +-
 .../marketplace/consumer_listings.rst         |  111 +-
 .../consumer_personalization_requests.rst     |   61 +-
 .../marketplace/consumer_providers.rst        |   45 +-
 .../marketplace/provider_exchange_filters.rst |   61 +-
 .../marketplace/provider_exchanges.rst        |  139 +-
 docs/workspace/marketplace/provider_files.rst |   65 +-
 .../marketplace/provider_listings.rst         |   75 +-
 .../provider_personalization_requests.rst     |   41 +-
 ...provider_provider_analytics_dashboards.rst |   53 +-
 .../marketplace/provider_providers.rst        |   73 +-
 docs/workspace/ml/experiments.rst             |  923 ++++++-------
 docs/workspace/ml/model_registry.rst          | 1141 ++++++++--------
 docs/workspace/pipelines/pipelines.rst        |  596 ++++----
 docs/workspace/serving/serving_endpoints.rst  |  500 +++----
 .../serving/serving_endpoints_data_plane.rst  |   95 +-
 ...aibi_dashboard_embedding_access_policy.rst |   89 +-
 ...i_dashboard_embedding_approved_domains.rst |   91 +-
 .../settings/automatic_cluster_update.rst     |   69 +-
 .../settings/compliance_security_profile.rst  |   73 +-
 .../settings/credentials_manager.rst          |   27 +-
 docs/workspace/settings/default_namespace.rst |  133 +-
 .../settings/disable_legacy_access.rst        |   95 +-
 .../settings/disable_legacy_dbfs.rst          |   87 +-
 .../settings/enhanced_security_monitoring.rst |   77 +-
 docs/workspace/settings/ip_access_lists.rst   |  243 ++--
 .../settings/notification_destinations.rst    |   95 +-
 .../settings/restrict_workspace_admins.rst    |  113 +-
 docs/workspace/settings/settings.rst          |   68 +-
 docs/workspace/settings/token_management.rst  |  127 +-
 docs/workspace/settings/tokens.rst            |   59 +-
 docs/workspace/settings/workspace_conf.rst    |   25 +-
 docs/workspace/sharing/providers.rst          |  189 +--
 .../sharing/recipient_activation.rst          |   43 +-
 docs/workspace/sharing/recipients.rst         |  275 ++--
 docs/workspace/sharing/shares.rst             |  253 ++--
 docs/workspace/sql/alerts.rst                 |  105 +-
 docs/workspace/sql/alerts_legacy.rst          |  173 +--
 docs/workspace/sql/dashboard_widgets.rst      |   73 +-
 docs/workspace/sql/dashboards.rst             |  149 +-
 docs/workspace/sql/data_sources.rst           |   43 +-
 docs/workspace/sql/dbsql_permissions.rst      |  125 +-
 docs/workspace/sql/queries.rst                |  121 +-
 docs/workspace/sql/queries_legacy.rst         |  303 ++--
 docs/workspace/sql/query_history.rst          |   43 +-
 docs/workspace/sql/query_visualizations.rst   |   65 +-
 .../sql/query_visualizations_legacy.rst       |  131 +-
 docs/workspace/sql/redash_config.rst          |    5 +-
 docs/workspace/sql/statement_execution.rst    |  457 +++----
 docs/workspace/sql/warehouses.rst             |  486 +++----
 .../vectorsearch/vector_search_endpoints.rst  |   60 +-
 .../vectorsearch/vector_search_indexes.rst    |  271 ++--
 docs/workspace/workspace/git_credentials.rst  |  141 +-
 docs/workspace/workspace/repos.rst            |  219 +--
 docs/workspace/workspace/secrets.rst          |  375 ++---
 docs/workspace/workspace/workspace.rst        |  353 ++---
 143 files changed, 12550 insertions(+), 12408 deletions(-)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index c0e5d78ee..95a290655 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,18 @@
 # Version changelog
 
+## [Release] Release v0.44.1
+
+### New Features and Improvements
+
+ * Introduce new Credential Strategies for Agents ([#882](https://github.com/databricks/databricks-sdk-py/pull/882)).
+
+
+### Internal Changes
+
+ * GetRun logic paginates more arrays ([#867](https://github.com/databricks/databricks-sdk-py/pull/867)).
+
+
+
 ## [Release] Release v0.44.0
 
 ### Internal Changes
diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py
index a262ca730..c09c695fd 100644
--- a/databricks/sdk/version.py
+++ b/databricks/sdk/version.py
@@ -1 +1 @@
-__version__ = '0.44.0'
+__version__ = '0.44.1'
diff --git a/docs/account/billing/billable_usage.rst b/docs/account/billing/billable_usage.rst
index 95a584e56..181b91cc3 100644
--- a/docs/account/billing/billable_usage.rst
+++ b/docs/account/billing/billable_usage.rst
@@ -5,7 +5,7 @@
 .. py:class:: BillableUsageAPI
 
     This API allows you to download billable usage logs for the specified account and date range. This feature
-works with all account types.
+    works with all account types.
 
     .. py:method:: download(start_month: str, end_month: str [, personal_data: Optional[bool]]) -> DownloadResponse
 
@@ -21,23 +21,24 @@ works with all account types.
             resp = a.billable_usage.download(start_month="2024-08", end_month="2024-09")
 
         Return billable usage logs.
-
-Returns billable usage logs in CSV format for the specified account and date range. For the data
-schema, see [CSV file schema]. Note that this method might take multiple minutes to complete.
-
-**Warning**: Depending on the queried date range, the number of workspaces in the account, the size of
-the response and the internet speed of the caller, this API may hit a timeout after a few minutes. If
-you experience this, try to mitigate by calling the API with narrower date ranges.
-
-[CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema
-
-:param start_month: str
-  Format: `YYYY-MM`. First month to return billable usage logs for. This field is required.
-:param end_month: str
-  Format: `YYYY-MM`. Last month to return billable usage logs for. This field is required.
-:param personal_data: bool (optional)
-  Specify whether to include personally identifiable information in the billable usage logs, for
-  example the email addresses of cluster creators. Handle this information with care. Defaults to
-  false.
-
-:returns: :class:`DownloadResponse`
+        
+        Returns billable usage logs in CSV format for the specified account and date range. For the data
+        schema, see [CSV file schema]. Note that this method might take multiple minutes to complete.
+        
+        **Warning**: Depending on the queried date range, the number of workspaces in the account, the size of
+        the response and the internet speed of the caller, this API may hit a timeout after a few minutes. If
+        you experience this, try to mitigate by calling the API with narrower date ranges.
+        
+        [CSV file schema]: https://docs.databricks.com/administration-guide/account-settings/usage-analysis.html#schema
+        
+        :param start_month: str
+          Format: `YYYY-MM`. First month to return billable usage logs for. This field is required.
+        :param end_month: str
+          Format: `YYYY-MM`. Last month to return billable usage logs for. This field is required.
+        :param personal_data: bool (optional)
+          Specify whether to include personally identifiable information in the billable usage logs, for
+          example the email addresses of cluster creators. Handle this information with care. Defaults to
+          false.
+        
+        :returns: :class:`DownloadResponse`
+        
\ No newline at end of file
diff --git a/docs/account/billing/budget_policy.rst b/docs/account/billing/budget_policy.rst
index dac33ddd2..6f7d7ede1 100644
--- a/docs/account/billing/budget_policy.rst
+++ b/docs/account/billing/budget_policy.rst
@@ -9,79 +9,80 @@
     .. py:method:: create( [, custom_tags: Optional[List[compute.CustomPolicyTag]], policy_name: Optional[str], request_id: Optional[str]]) -> BudgetPolicy
 
         Create a budget policy.
-
-Creates a new policy.
-
-:param custom_tags: List[:class:`CustomPolicyTag`] (optional)
-  A list of tags defined by the customer. At most 40 entries are allowed per policy.
-:param policy_name: str (optional)
-  The name of the policy. - Must be unique among active policies. - Can contain only characters of
-  0-9, a-z, A-Z, -, =, ., :, /, @, _, +, whitespace.
-:param request_id: str (optional)
-  A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is
-  recommended. This request is only idempotent if a `request_id` is provided.
-
-:returns: :class:`BudgetPolicy`
-
+        
+        Creates a new policy.
+        
+        :param custom_tags: List[:class:`CustomPolicyTag`] (optional)
+          A list of tags defined by the customer. At most 40 entries are allowed per policy.
+        :param policy_name: str (optional)
+          The name of the policy. - Must be unique among active policies. - Can contain only characters of
+          0-9, a-z, A-Z, -, =, ., :, /, @, _, +, whitespace.
+        :param request_id: str (optional)
+          A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is
+          recommended. This request is only idempotent if a `request_id` is provided.
+        
+        :returns: :class:`BudgetPolicy`
+        
 
     .. py:method:: delete(policy_id: str)
 
         Delete a budget policy.
-
-Deletes a policy
-
-:param policy_id: str
-  The Id of the policy.
-
-
-
+        
+        Deletes a policy
+        
+        :param policy_id: str
+          The Id of the policy.
+        
+        
+        
 
     .. py:method:: get(policy_id: str) -> BudgetPolicy
 
         Get a budget policy.
-
-Retrieves a policy by it's ID.
-
-:param policy_id: str
-  The Id of the policy.
-
-:returns: :class:`BudgetPolicy`
-
+        
+        Retrieves a policy by it's ID.
+        
+        :param policy_id: str
+          The Id of the policy.
+        
+        :returns: :class:`BudgetPolicy`
+        
 
     .. py:method:: list( [, filter_by: Optional[Filter], page_size: Optional[int], page_token: Optional[str], sort_spec: Optional[SortSpec]]) -> Iterator[BudgetPolicy]
 
         List policies.
-
-Lists all policies. Policies are returned in the alphabetically ascending order of their names.
-
-:param filter_by: :class:`Filter` (optional)
-  A filter to apply to the list of policies.
-:param page_size: int (optional)
-  The maximum number of budget policies to return. If unspecified, at most 100 budget policies will be
-  returned. The maximum value is 1000; values above 1000 will be coerced to 1000.
-:param page_token: str (optional)
-  A page token, received from a previous `ListServerlessPolicies` call. Provide this to retrieve the
-  subsequent page. If unspecified, the first page will be returned.
-  
-  When paginating, all other parameters provided to `ListServerlessPoliciesRequest` must match the
-  call that provided the page token.
-:param sort_spec: :class:`SortSpec` (optional)
-  The sort specification.
-
-:returns: Iterator over :class:`BudgetPolicy`
-
+        
+        Lists all policies. Policies are returned in the alphabetically ascending order of their names.
+        
+        :param filter_by: :class:`Filter` (optional)
+          A filter to apply to the list of policies.
+        :param page_size: int (optional)
+          The maximum number of budget policies to return. If unspecified, at most 100 budget policies will be
+          returned. The maximum value is 1000; values above 1000 will be coerced to 1000.
+        :param page_token: str (optional)
+          A page token, received from a previous `ListServerlessPolicies` call. Provide this to retrieve the
+          subsequent page. If unspecified, the first page will be returned.
+          
+          When paginating, all other parameters provided to `ListServerlessPoliciesRequest` must match the
+          call that provided the page token.
+        :param sort_spec: :class:`SortSpec` (optional)
+          The sort specification.
+        
+        :returns: Iterator over :class:`BudgetPolicy`
+        
 
     .. py:method:: update(policy_id: str [, limit_config: Optional[LimitConfig], policy: Optional[BudgetPolicy]]) -> BudgetPolicy
 
         Update a budget policy.
-
-Updates a policy
-
-:param policy_id: str
-  The Id of the policy. This field is generated by Databricks and globally unique.
-:param limit_config: :class:`LimitConfig` (optional)
-  DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy
-:param policy: :class:`BudgetPolicy` (optional)
-  Contains the BudgetPolicy details.
-
-:returns: :class:`BudgetPolicy`
+        
+        Updates a policy
+        
+        :param policy_id: str
+          The Id of the policy. This field is generated by Databricks and globally unique.
+        :param limit_config: :class:`LimitConfig` (optional)
+          DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy
+        :param policy: :class:`BudgetPolicy` (optional)
+          Contains the BudgetPolicy details.
+        
+        :returns: :class:`BudgetPolicy`
+        
\ No newline at end of file
diff --git a/docs/account/billing/budgets.rst b/docs/account/billing/budgets.rst
index 9acd2288a..43c77d00b 100644
--- a/docs/account/billing/budgets.rst
+++ b/docs/account/billing/budgets.rst
@@ -5,8 +5,8 @@
 .. py:class:: BudgetsAPI
 
     These APIs manage budget configurations for this account. Budgets enable you to monitor usage across your
-account. You can set up budgets to either track account-wide spending, or apply filters to track the
-spending of specific teams, projects, or workspaces.
+    account. You can set up budgets to either track account-wide spending, or apply filters to track the
+    spending of specific teams, projects, or workspaces.
 
     .. py:method:: create(budget: CreateBudgetConfigurationBudget) -> CreateBudgetConfigurationResponse
 
@@ -47,28 +47,28 @@ spending of specific teams, projects, or workspaces.
             a.budgets.delete(budget_id=created.budget.budget_configuration_id)
 
         Create new budget.
-
-Create a new budget configuration for an account. For full details, see
-https://docs.databricks.com/en/admin/account-settings/budgets.html.
-
-:param budget: :class:`CreateBudgetConfigurationBudget`
-  Properties of the new budget configuration.
-
-:returns: :class:`CreateBudgetConfigurationResponse`
-
+        
+        Create a new budget configuration for an account. For full details, see
+        https://docs.databricks.com/en/admin/account-settings/budgets.html.
+        
+        :param budget: :class:`CreateBudgetConfigurationBudget`
+          Properties of the new budget configuration.
+        
+        :returns: :class:`CreateBudgetConfigurationResponse`
+        
 
     .. py:method:: delete(budget_id: str)
 
         Delete budget.
-
-Deletes a budget configuration for an account. Both account and budget configuration are specified by
-ID. This cannot be undone.
-
-:param budget_id: str
-  The Databricks budget configuration ID.
-
-
-
+        
+        Deletes a budget configuration for an account. Both account and budget configuration are specified by
+        ID. This cannot be undone.
+        
+        :param budget_id: str
+          The Databricks budget configuration ID.
+        
+        
+        
 
     .. py:method:: get(budget_id: str) -> GetBudgetConfigurationResponse
 
@@ -111,14 +111,14 @@ ID. This cannot be undone.
             a.budgets.delete(budget_id=created.budget.budget_configuration_id)
 
         Get budget.
-
-Gets a budget configuration for an account. Both account and budget configuration are specified by ID.
-
-:param budget_id: str
-  The budget configuration ID
-
-:returns: :class:`GetBudgetConfigurationResponse`
-
+        
+        Gets a budget configuration for an account. Both account and budget configuration are specified by ID.
+        
+        :param budget_id: str
+          The budget configuration ID
+        
+        :returns: :class:`GetBudgetConfigurationResponse`
+        
 
     .. py:method:: list( [, page_token: Optional[str]]) -> Iterator[BudgetConfiguration]
 
@@ -135,15 +135,15 @@ Gets a budget configuration for an account. Both account and budget configuratio
             all = a.budgets.list(billing.ListBudgetConfigurationsRequest())
 
         Get all budgets.
-
-Gets all budgets associated with this account.
-
-:param page_token: str (optional)
-  A page token received from a previous get all budget configurations call. This token can be used to
-  retrieve the subsequent page. Requests first page if absent.
-
-:returns: Iterator over :class:`BudgetConfiguration`
-
+        
+        Gets all budgets associated with this account.
+        
+        :param page_token: str (optional)
+          A page token received from a previous get all budget configurations call. This token can be used to
+          retrieve the subsequent page. Requests first page if absent.
+        
+        :returns: Iterator over :class:`BudgetConfiguration`
+        
 
     .. py:method:: update(budget_id: str, budget: UpdateBudgetConfigurationBudget) -> UpdateBudgetConfigurationResponse
 
@@ -205,13 +205,14 @@ Gets all budgets associated with this account.
             a.budgets.delete(budget_id=created.budget.budget_configuration_id)
 
         Modify budget.
-
-Updates a budget configuration for an account. Both account and budget configuration are specified by
-ID.
-
-:param budget_id: str
-  The Databricks budget configuration ID.
-:param budget: :class:`UpdateBudgetConfigurationBudget`
-  The updated budget. This will overwrite the budget specified by the budget ID.
-
-:returns: :class:`UpdateBudgetConfigurationResponse`
+        
+        Updates a budget configuration for an account. Both account and budget configuration are specified by
+        ID.
+        
+        :param budget_id: str
+          The Databricks budget configuration ID.
+        :param budget: :class:`UpdateBudgetConfigurationBudget`
+          The updated budget. This will overwrite the budget specified by the budget ID.
+        
+        :returns: :class:`UpdateBudgetConfigurationResponse`
+        
\ No newline at end of file
diff --git a/docs/account/billing/log_delivery.rst b/docs/account/billing/log_delivery.rst
index e8143a711..04ef4e349 100644
--- a/docs/account/billing/log_delivery.rst
+++ b/docs/account/billing/log_delivery.rst
@@ -5,51 +5,51 @@
 .. py:class:: LogDeliveryAPI
 
     These APIs manage log delivery configurations for this account. The two supported log types for this API
-are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This feature works with all
-account ID types.
-
-Log delivery works with all account types. However, if your account is on the E2 version of the platform
-or on a select custom plan that allows multiple workspaces per account, you can optionally configure
-different storage destinations for each workspace. Log delivery status is also provided to know the latest
-status of log delivery attempts. The high-level flow of billable usage delivery:
-
-1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy. Using
-Databricks APIs, call the Account API to create a [storage configuration object](:method:Storage/Create)
-that uses the bucket name. 2. **Create credentials**: In AWS, create the appropriate AWS IAM role. For
-full details, including the required IAM role policies and trust relationship, see [Billable usage log
-delivery]. Using Databricks APIs, call the Account API to create a [credential configuration
-object](:method:Credentials/Create) that uses the IAM role"s ARN. 3. **Create log delivery
-configuration**: Using Databricks APIs, call the Account API to [create a log delivery
-configuration](:method:LogDelivery/Create) that uses the credential and storage configuration objects from
-previous steps. You can specify if the logs should include all events of that log type in your account
-(_Account level_ delivery) or only events for a specific set of workspaces (_workspace level_ delivery).
-Account level log delivery applies to all current and future workspaces plus account level logs, while
-workspace level log delivery solely delivers logs related to the specified workspaces. You can create
-multiple types of delivery configurations per account.
-
-For billable usage delivery: * For more information about billable usage logs, see [Billable usage log
-delivery]. For the CSV schema, see the [Usage page]. * The delivery location is
-`//billable-usage/csv/`, where `` is the name of the optional delivery path
-prefix you set up during log delivery configuration. Files are named
-`workspaceId=-usageMonth=.csv`. * All billable usage logs apply to specific
-workspaces (_workspace level_ logs). You can aggregate usage for your entire account by creating an
-_account level_ delivery configuration that delivers logs for all current and future workspaces in your
-account. * The files are delivered daily by overwriting the month's CSV file for each workspace.
-
-For audit log delivery: * For more information about about audit log delivery, see [Audit log delivery],
-which includes information about the used JSON schema. * The delivery location is
-`//workspaceId=/date=/auditlogs_.json`.
-Files may get overwritten with the same content multiple times to achieve exactly-once delivery. * If the
-audit log delivery configuration included specific workspace IDs, only _workspace-level_ audit logs for
-those workspaces are delivered. If the log delivery configuration applies to the entire account (_account
-level_ delivery configuration), the audit log delivery includes workspace-level audit logs for all
-workspaces in the account as well as account-level audit logs. See [Audit log delivery] for details. *
-Auditable events are typically available in logs within 15 minutes.
-
-[Audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
-[Billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
-[Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html
-[create a new AWS S3 bucket]: https://docs.databricks.com/administration-guide/account-api/aws-storage.html
+    are _billable usage logs_ and _audit logs_. This feature is in Public Preview. This feature works with all
+    account ID types.
+    
+    Log delivery works with all account types. However, if your account is on the E2 version of the platform
+    or on a select custom plan that allows multiple workspaces per account, you can optionally configure
+    different storage destinations for each workspace. Log delivery status is also provided to know the latest
+    status of log delivery attempts. The high-level flow of billable usage delivery:
+    
+    1. **Create storage**: In AWS, [create a new AWS S3 bucket] with a specific bucket policy. Using
+    Databricks APIs, call the Account API to create a [storage configuration object](:method:Storage/Create)
+    that uses the bucket name. 2. **Create credentials**: In AWS, create the appropriate AWS IAM role. For
+    full details, including the required IAM role policies and trust relationship, see [Billable usage log
+    delivery]. Using Databricks APIs, call the Account API to create a [credential configuration
+    object](:method:Credentials/Create) that uses the IAM role"s ARN. 3. **Create log delivery
+    configuration**: Using Databricks APIs, call the Account API to [create a log delivery
+    configuration](:method:LogDelivery/Create) that uses the credential and storage configuration objects from
+    previous steps. You can specify if the logs should include all events of that log type in your account
+    (_Account level_ delivery) or only events for a specific set of workspaces (_workspace level_ delivery).
+    Account level log delivery applies to all current and future workspaces plus account level logs, while
+    workspace level log delivery solely delivers logs related to the specified workspaces. You can create
+    multiple types of delivery configurations per account.
+    
+    For billable usage delivery: * For more information about billable usage logs, see [Billable usage log
+    delivery]. For the CSV schema, see the [Usage page]. * The delivery location is
+    `//billable-usage/csv/`, where `` is the name of the optional delivery path
+    prefix you set up during log delivery configuration. Files are named
+    `workspaceId=-usageMonth=.csv`. * All billable usage logs apply to specific
+    workspaces (_workspace level_ logs). You can aggregate usage for your entire account by creating an
+    _account level_ delivery configuration that delivers logs for all current and future workspaces in your
+    account. * The files are delivered daily by overwriting the month's CSV file for each workspace.
+    
+    For audit log delivery: * For more information about about audit log delivery, see [Audit log delivery],
+    which includes information about the used JSON schema. * The delivery location is
+    `//workspaceId=/date=/auditlogs_.json`.
+    Files may get overwritten with the same content multiple times to achieve exactly-once delivery. * If the
+    audit log delivery configuration included specific workspace IDs, only _workspace-level_ audit logs for
+    those workspaces are delivered. If the log delivery configuration applies to the entire account (_account
+    level_ delivery configuration), the audit log delivery includes workspace-level audit logs for all
+    workspaces in the account as well as account-level audit logs. See [Audit log delivery] for details. *
+    Auditable events are typically available in logs within 15 minutes.
+    
+    [Audit log delivery]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
+    [Billable usage log delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
+    [Usage page]: https://docs.databricks.com/administration-guide/account-settings/usage.html
+    [create a new AWS S3 bucket]: https://docs.databricks.com/administration-guide/account-api/aws-storage.html
 
     .. py:method:: create( [, log_delivery_configuration: Optional[CreateLogDeliveryConfigurationParams]]) -> WrappedLogDeliveryConfiguration
 
@@ -88,32 +88,32 @@ Auditable events are typically available in logs within 15 minutes.
                                         status=billing.LogDeliveryConfigStatus.DISABLED)
 
         Create a new log delivery configuration.
-
-Creates a new Databricks log delivery configuration to enable delivery of the specified type of logs
-to your storage location. This requires that you already created a [credential
-object](:method:Credentials/Create) (which encapsulates a cross-account service IAM role) and a
-[storage configuration object](:method:Storage/Create) (which encapsulates an S3 bucket).
-
-For full details, including the required IAM role policies and bucket policies, see [Deliver and
-access billable usage logs] or [Configure audit logging].
-
-**Note**: There is a limit on the number of log delivery configurations available per account (each
-limit applies separately to each log type including billable usage and audit logs). You can create a
-maximum of two enabled account-level delivery configurations (configurations without a workspace
-filter) per type. Additionally, you can create two enabled workspace-level delivery configurations per
-workspace for each log type, which means that the same workspace ID can occur in the workspace filter
-for no more than two delivery configurations per log type.
-
-You cannot delete a log delivery configuration, but you can disable it (see [Enable or disable log
-delivery configuration](:method:LogDelivery/PatchStatus)).
-
-[Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
-[Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
-
-:param log_delivery_configuration: :class:`CreateLogDeliveryConfigurationParams` (optional)
-
-:returns: :class:`WrappedLogDeliveryConfiguration`
-
+        
+        Creates a new Databricks log delivery configuration to enable delivery of the specified type of logs
+        to your storage location. This requires that you already created a [credential
+        object](:method:Credentials/Create) (which encapsulates a cross-account service IAM role) and a
+        [storage configuration object](:method:Storage/Create) (which encapsulates an S3 bucket).
+        
+        For full details, including the required IAM role policies and bucket policies, see [Deliver and
+        access billable usage logs] or [Configure audit logging].
+        
+        **Note**: There is a limit on the number of log delivery configurations available per account (each
+        limit applies separately to each log type including billable usage and audit logs). You can create a
+        maximum of two enabled account-level delivery configurations (configurations without a workspace
+        filter) per type. Additionally, you can create two enabled workspace-level delivery configurations per
+        workspace for each log type, which means that the same workspace ID can occur in the workspace filter
+        for no more than two delivery configurations per log type.
+        
+        You cannot delete a log delivery configuration, but you can disable it (see [Enable or disable log
+        delivery configuration](:method:LogDelivery/PatchStatus)).
+        
+        [Configure audit logging]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
+        [Deliver and access billable usage logs]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html
+        
+        :param log_delivery_configuration: :class:`CreateLogDeliveryConfigurationParams` (optional)
+        
+        :returns: :class:`WrappedLogDeliveryConfiguration`
+        
 
     .. py:method:: get(log_delivery_configuration_id: str) -> WrappedLogDeliveryConfiguration
 
@@ -154,14 +154,14 @@ delivery configuration](:method:LogDelivery/PatchStatus)).
                                         status=billing.LogDeliveryConfigStatus.DISABLED)
 
         Get log delivery configuration.
-
-Gets a Databricks log delivery configuration object for an account, both specified by ID.
-
-:param log_delivery_configuration_id: str
-  Databricks log delivery configuration ID
-
-:returns: :class:`WrappedLogDeliveryConfiguration`
-
+        
+        Gets a Databricks log delivery configuration object for an account, both specified by ID.
+        
+        :param log_delivery_configuration_id: str
+          Databricks log delivery configuration ID
+        
+        :returns: :class:`WrappedLogDeliveryConfiguration`
+        
 
     .. py:method:: list( [, credentials_id: Optional[str], status: Optional[LogDeliveryConfigStatus], storage_configuration_id: Optional[str]]) -> Iterator[LogDeliveryConfiguration]
 
@@ -178,34 +178,35 @@ Gets a Databricks log delivery configuration object for an account, both specifi
             all = a.log_delivery.list(billing.ListLogDeliveryRequest())
 
         Get all log delivery configurations.
-
-Gets all Databricks log delivery configurations associated with an account specified by ID.
-
-:param credentials_id: str (optional)
-  Filter by credential configuration ID.
-:param status: :class:`LogDeliveryConfigStatus` (optional)
-  Filter by status `ENABLED` or `DISABLED`.
-:param storage_configuration_id: str (optional)
-  Filter by storage configuration ID.
-
-:returns: Iterator over :class:`LogDeliveryConfiguration`
-
+        
+        Gets all Databricks log delivery configurations associated with an account specified by ID.
+        
+        :param credentials_id: str (optional)
+          Filter by credential configuration ID.
+        :param status: :class:`LogDeliveryConfigStatus` (optional)
+          Filter by status `ENABLED` or `DISABLED`.
+        :param storage_configuration_id: str (optional)
+          Filter by storage configuration ID.
+        
+        :returns: Iterator over :class:`LogDeliveryConfiguration`
+        
 
     .. py:method:: patch_status(log_delivery_configuration_id: str, status: LogDeliveryConfigStatus)
 
         Enable or disable log delivery configuration.
-
-Enables or disables a log delivery configuration. Deletion of delivery configurations is not
-supported, so disable log delivery configurations that are no longer needed. Note that you can't
-re-enable a delivery configuration if this would violate the delivery configuration limits described
-under [Create log delivery](:method:LogDelivery/Create).
-
-:param log_delivery_configuration_id: str
-  Databricks log delivery configuration ID
-:param status: :class:`LogDeliveryConfigStatus`
-  Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults
-  to `ENABLED`. You can [enable or disable the
-  configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is
-  not supported, so disable a log delivery configuration that is no longer needed.
-
-
+        
+        Enables or disables a log delivery configuration. Deletion of delivery configurations is not
+        supported, so disable log delivery configurations that are no longer needed. Note that you can't
+        re-enable a delivery configuration if this would violate the delivery configuration limits described
+        under [Create log delivery](:method:LogDelivery/Create).
+        
+        :param log_delivery_configuration_id: str
+          Databricks log delivery configuration ID
+        :param status: :class:`LogDeliveryConfigStatus`
+          Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults
+          to `ENABLED`. You can [enable or disable the
+          configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is
+          not supported, so disable a log delivery configuration that is no longer needed.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/account/billing/usage_dashboards.rst b/docs/account/billing/usage_dashboards.rst
index 44a1c35eb..350ef1f08 100644
--- a/docs/account/billing/usage_dashboards.rst
+++ b/docs/account/billing/usage_dashboards.rst
@@ -5,34 +5,35 @@
 .. py:class:: UsageDashboardsAPI
 
     These APIs manage usage dashboards for this account. Usage dashboards enable you to gain insights into
-your usage with pre-built dashboards: visualize breakdowns, analyze tag attributions, and identify cost
-drivers.
+    your usage with pre-built dashboards: visualize breakdowns, analyze tag attributions, and identify cost
+    drivers.
 
     .. py:method:: create( [, dashboard_type: Optional[UsageDashboardType], workspace_id: Optional[int]]) -> CreateBillingUsageDashboardResponse
 
         Create new usage dashboard.
-
-Create a usage dashboard specified by workspaceId, accountId, and dashboard type.
-
-:param dashboard_type: :class:`UsageDashboardType` (optional)
-  Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage
-  dashboard shows usage data for all workspaces in the account.
-:param workspace_id: int (optional)
-  The workspace ID of the workspace in which the usage dashboard is created.
-
-:returns: :class:`CreateBillingUsageDashboardResponse`
-
+        
+        Create a usage dashboard specified by workspaceId, accountId, and dashboard type.
+        
+        :param dashboard_type: :class:`UsageDashboardType` (optional)
+          Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage
+          dashboard shows usage data for all workspaces in the account.
+        :param workspace_id: int (optional)
+          The workspace ID of the workspace in which the usage dashboard is created.
+        
+        :returns: :class:`CreateBillingUsageDashboardResponse`
+        
 
     .. py:method:: get( [, dashboard_type: Optional[UsageDashboardType], workspace_id: Optional[int]]) -> GetBillingUsageDashboardResponse
 
         Get usage dashboard.
-
-Get a usage dashboard specified by workspaceId, accountId, and dashboard type.
-
-:param dashboard_type: :class:`UsageDashboardType` (optional)
-  Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage
-  dashboard shows usage data for all workspaces in the account.
-:param workspace_id: int (optional)
-  The workspace ID of the workspace in which the usage dashboard is created.
-
-:returns: :class:`GetBillingUsageDashboardResponse`
+        
+        Get a usage dashboard specified by workspaceId, accountId, and dashboard type.
+        
+        :param dashboard_type: :class:`UsageDashboardType` (optional)
+          Workspace level usage dashboard shows usage data for the specified workspace ID. Global level usage
+          dashboard shows usage data for all workspaces in the account.
+        :param workspace_id: int (optional)
+          The workspace ID of the workspace in which the usage dashboard is created.
+        
+        :returns: :class:`GetBillingUsageDashboardResponse`
+        
\ No newline at end of file
diff --git a/docs/account/catalog/metastore_assignments.rst b/docs/account/catalog/metastore_assignments.rst
index 00ea12a65..f5b00c6b3 100644
--- a/docs/account/catalog/metastore_assignments.rst
+++ b/docs/account/catalog/metastore_assignments.rst
@@ -9,45 +9,45 @@
     .. py:method:: create(workspace_id: int, metastore_id: str [, metastore_assignment: Optional[CreateMetastoreAssignment]])
 
         Assigns a workspace to a metastore.
-
-Creates an assignment to a metastore for a workspace
-
-:param workspace_id: int
-  Workspace ID.
-:param metastore_id: str
-  Unity Catalog metastore ID
-:param metastore_assignment: :class:`CreateMetastoreAssignment` (optional)
-
-
-
+        
+        Creates an assignment to a metastore for a workspace
+        
+        :param workspace_id: int
+          Workspace ID.
+        :param metastore_id: str
+          Unity Catalog metastore ID
+        :param metastore_assignment: :class:`CreateMetastoreAssignment` (optional)
+        
+        
+        
 
     .. py:method:: delete(workspace_id: int, metastore_id: str)
 
         Delete a metastore assignment.
-
-Deletes a metastore assignment to a workspace, leaving the workspace with no metastore.
-
-:param workspace_id: int
-  Workspace ID.
-:param metastore_id: str
-  Unity Catalog metastore ID
-
-
-
+        
+        Deletes a metastore assignment to a workspace, leaving the workspace with no metastore.
+        
+        :param workspace_id: int
+          Workspace ID.
+        :param metastore_id: str
+          Unity Catalog metastore ID
+        
+        
+        
 
     .. py:method:: get(workspace_id: int) -> AccountsMetastoreAssignment
 
         Gets the metastore assignment for a workspace.
-
-Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is assigned
-a metastore, the mappig will be returned. If no metastore is assigned to the workspace, the assignment
-will not be found and a 404 returned.
-
-:param workspace_id: int
-  Workspace ID.
-
-:returns: :class:`AccountsMetastoreAssignment`
-
+        
+        Gets the metastore assignment, if any, for the workspace specified by ID. If the workspace is assigned
+        a metastore, the mappig will be returned. If no metastore is assigned to the workspace, the assignment
+        will not be found and a 404 returned.
+        
+        :param workspace_id: int
+          Workspace ID.
+        
+        :returns: :class:`AccountsMetastoreAssignment`
+        
 
     .. py:method:: list(metastore_id: str) -> Iterator[int]
 
@@ -65,26 +65,27 @@ will not be found and a 404 returned.
             ws = a.metastore_assignments.list(metastore_id=os.environ["TEST_METASTORE_ID"])
 
         Get all workspaces assigned to a metastore.
-
-Gets a list of all Databricks workspace IDs that have been assigned to given metastore.
-
-:param metastore_id: str
-  Unity Catalog metastore ID
-
-:returns: Iterator over int
-
+        
+        Gets a list of all Databricks workspace IDs that have been assigned to given metastore.
+        
+        :param metastore_id: str
+          Unity Catalog metastore ID
+        
+        :returns: Iterator over int
+        
 
     .. py:method:: update(workspace_id: int, metastore_id: str [, metastore_assignment: Optional[UpdateMetastoreAssignment]])
 
         Updates a metastore assignment to a workspaces.
-
-Updates an assignment to a metastore for a workspace. Currently, only the default catalog may be
-updated.
-
-:param workspace_id: int
-  Workspace ID.
-:param metastore_id: str
-  Unity Catalog metastore ID
-:param metastore_assignment: :class:`UpdateMetastoreAssignment` (optional)
-
-
+        
+        Updates an assignment to a metastore for a workspace. Currently, only the default catalog may be
+        updated.
+        
+        :param workspace_id: int
+          Workspace ID.
+        :param metastore_id: str
+          Unity Catalog metastore ID
+        :param metastore_assignment: :class:`UpdateMetastoreAssignment` (optional)
+        
+        
+        
\ No newline at end of file
diff --git a/docs/account/catalog/metastores.rst b/docs/account/catalog/metastores.rst
index 4a7b66ed6..15f39060d 100644
--- a/docs/account/catalog/metastores.rst
+++ b/docs/account/catalog/metastores.rst
@@ -5,62 +5,63 @@
 .. py:class:: AccountMetastoresAPI
 
     These APIs manage Unity Catalog metastores for an account. A metastore contains catalogs that can be
-associated with workspaces
+    associated with workspaces
 
     .. py:method:: create( [, metastore_info: Optional[CreateMetastore]]) -> AccountsMetastoreInfo
 
         Create metastore.
-
-Creates a Unity Catalog metastore.
-
-:param metastore_info: :class:`CreateMetastore` (optional)
-
-:returns: :class:`AccountsMetastoreInfo`
-
+        
+        Creates a Unity Catalog metastore.
+        
+        :param metastore_info: :class:`CreateMetastore` (optional)
+        
+        :returns: :class:`AccountsMetastoreInfo`
+        
 
     .. py:method:: delete(metastore_id: str [, force: Optional[bool]])
 
         Delete a metastore.
-
-Deletes a Unity Catalog metastore for an account, both specified by ID.
-
-:param metastore_id: str
-  Unity Catalog metastore ID
-:param force: bool (optional)
-  Force deletion even if the metastore is not empty. Default is false.
-
-
-
+        
+        Deletes a Unity Catalog metastore for an account, both specified by ID.
+        
+        :param metastore_id: str
+          Unity Catalog metastore ID
+        :param force: bool (optional)
+          Force deletion even if the metastore is not empty. Default is false.
+        
+        
+        
 
     .. py:method:: get(metastore_id: str) -> AccountsMetastoreInfo
 
         Get a metastore.
-
-Gets a Unity Catalog metastore from an account, both specified by ID.
-
-:param metastore_id: str
-  Unity Catalog metastore ID
-
-:returns: :class:`AccountsMetastoreInfo`
-
+        
+        Gets a Unity Catalog metastore from an account, both specified by ID.
+        
+        :param metastore_id: str
+          Unity Catalog metastore ID
+        
+        :returns: :class:`AccountsMetastoreInfo`
+        
 
     .. py:method:: list() -> Iterator[MetastoreInfo]
 
         Get all metastores associated with an account.
-
-Gets all Unity Catalog metastores associated with an account specified by ID.
-
-:returns: Iterator over :class:`MetastoreInfo`
-
+        
+        Gets all Unity Catalog metastores associated with an account specified by ID.
+        
+        :returns: Iterator over :class:`MetastoreInfo`
+        
 
     .. py:method:: update(metastore_id: str [, metastore_info: Optional[UpdateMetastore]]) -> AccountsMetastoreInfo
 
         Update a metastore.
-
-Updates an existing Unity Catalog metastore.
-
-:param metastore_id: str
-  Unity Catalog metastore ID
-:param metastore_info: :class:`UpdateMetastore` (optional)
-
-:returns: :class:`AccountsMetastoreInfo`
+        
+        Updates an existing Unity Catalog metastore.
+        
+        :param metastore_id: str
+          Unity Catalog metastore ID
+        :param metastore_info: :class:`UpdateMetastore` (optional)
+        
+        :returns: :class:`AccountsMetastoreInfo`
+        
\ No newline at end of file
diff --git a/docs/account/catalog/storage_credentials.rst b/docs/account/catalog/storage_credentials.rst
index 65271efcf..453b3a1eb 100644
--- a/docs/account/catalog/storage_credentials.rst
+++ b/docs/account/catalog/storage_credentials.rst
@@ -9,77 +9,78 @@
     .. py:method:: create(metastore_id: str [, credential_info: Optional[CreateStorageCredential]]) -> AccountsStorageCredentialInfo
 
         Create a storage credential.
-
-Creates a new storage credential. The request object is specific to the cloud:
-
-* **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure credentials *
-**GcpServiceAcountKey** for GCP credentials.
-
-The caller must be a metastore admin and have the **CREATE_STORAGE_CREDENTIAL** privilege on the
-metastore.
-
-:param metastore_id: str
-  Unity Catalog metastore ID
-:param credential_info: :class:`CreateStorageCredential` (optional)
-
-:returns: :class:`AccountsStorageCredentialInfo`
-
+        
+        Creates a new storage credential. The request object is specific to the cloud:
+        
+        * **AwsIamRole** for AWS credentials * **AzureServicePrincipal** for Azure credentials *
+        **GcpServiceAcountKey** for GCP credentials.
+        
+        The caller must be a metastore admin and have the **CREATE_STORAGE_CREDENTIAL** privilege on the
+        metastore.
+        
+        :param metastore_id: str
+          Unity Catalog metastore ID
+        :param credential_info: :class:`CreateStorageCredential` (optional)
+        
+        :returns: :class:`AccountsStorageCredentialInfo`
+        
 
     .. py:method:: delete(metastore_id: str, storage_credential_name: str [, force: Optional[bool]])
 
         Delete a storage credential.
-
-Deletes a storage credential from the metastore. The caller must be an owner of the storage
-credential.
-
-:param metastore_id: str
-  Unity Catalog metastore ID
-:param storage_credential_name: str
-  Name of the storage credential.
-:param force: bool (optional)
-  Force deletion even if the Storage Credential is not empty. Default is false.
-
-
-
+        
+        Deletes a storage credential from the metastore. The caller must be an owner of the storage
+        credential.
+        
+        :param metastore_id: str
+          Unity Catalog metastore ID
+        :param storage_credential_name: str
+          Name of the storage credential.
+        :param force: bool (optional)
+          Force deletion even if the Storage Credential is not empty. Default is false.
+        
+        
+        
 
     .. py:method:: get(metastore_id: str, storage_credential_name: str) -> AccountsStorageCredentialInfo
 
         Gets the named storage credential.
-
-Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the
-storage credential, or have a level of privilege on the storage credential.
-
-:param metastore_id: str
-  Unity Catalog metastore ID
-:param storage_credential_name: str
-  Name of the storage credential.
-
-:returns: :class:`AccountsStorageCredentialInfo`
-
+        
+        Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the
+        storage credential, or have a level of privilege on the storage credential.
+        
+        :param metastore_id: str
+          Unity Catalog metastore ID
+        :param storage_credential_name: str
+          Name of the storage credential.
+        
+        :returns: :class:`AccountsStorageCredentialInfo`
+        
 
     .. py:method:: list(metastore_id: str) -> Iterator[StorageCredentialInfo]
 
         Get all storage credentials assigned to a metastore.
-
-Gets a list of all storage credentials that have been assigned to given metastore.
-
-:param metastore_id: str
-  Unity Catalog metastore ID
-
-:returns: Iterator over :class:`StorageCredentialInfo`
-
+        
+        Gets a list of all storage credentials that have been assigned to given metastore.
+        
+        :param metastore_id: str
+          Unity Catalog metastore ID
+        
+        :returns: Iterator over :class:`StorageCredentialInfo`
+        
 
     .. py:method:: update(metastore_id: str, storage_credential_name: str [, credential_info: Optional[UpdateStorageCredential]]) -> AccountsStorageCredentialInfo
 
         Updates a storage credential.
-
-Updates a storage credential on the metastore. The caller must be the owner of the storage credential.
-If the caller is a metastore admin, only the __owner__ credential can be changed.
-
-:param metastore_id: str
-  Unity Catalog metastore ID
-:param storage_credential_name: str
-  Name of the storage credential.
-:param credential_info: :class:`UpdateStorageCredential` (optional)
-
-:returns: :class:`AccountsStorageCredentialInfo`
+        
+        Updates a storage credential on the metastore. The caller must be the owner of the storage credential.
+        If the caller is a metastore admin, only the __owner__ credential can be changed.
+        
+        :param metastore_id: str
+          Unity Catalog metastore ID
+        :param storage_credential_name: str
+          Name of the storage credential.
+        :param credential_info: :class:`UpdateStorageCredential` (optional)
+        
+        :returns: :class:`AccountsStorageCredentialInfo`
+        
\ No newline at end of file
diff --git a/docs/account/iam/access_control.rst b/docs/account/iam/access_control.rst
index 80ab61361..2537e262c 100644
--- a/docs/account/iam/access_control.rst
+++ b/docs/account/iam/access_control.rst
@@ -5,51 +5,52 @@
 .. py:class:: AccountAccessControlAPI
 
     These APIs manage access rules on resources in an account. Currently, only grant rules are supported. A
-grant rule specifies a role assigned to a set of principals. A list of rules attached to a resource is
-called a rule set.
+    grant rule specifies a role assigned to a set of principals. A list of rules attached to a resource is
+    called a rule set.
 
     .. py:method:: get_assignable_roles_for_resource(resource: str) -> GetAssignableRolesForResourceResponse
 
         Get assignable roles for a resource.
-
-Gets all the roles that can be granted on an account level resource. A role is grantable if the rule
-set on the resource can contain an access rule of the role.
-
-:param resource: str
-  The resource name for which assignable roles will be listed.
-
-:returns: :class:`GetAssignableRolesForResourceResponse`
-
+        
+        Gets all the roles that can be granted on an account level resource. A role is grantable if the rule
+        set on the resource can contain an access rule of the role.
+        
+        :param resource: str
+          The resource name for which assignable roles will be listed.
+        
+        :returns: :class:`GetAssignableRolesForResourceResponse`
+        
 
     .. py:method:: get_rule_set(name: str, etag: str) -> RuleSetResponse
 
         Get a rule set.
-
-Get a rule set by its name. A rule set is always attached to a resource and contains a list of access
-rules on the said resource. Currently only a default rule set for each resource is supported.
-
-:param name: str
-  The ruleset name associated with the request.
-:param etag: str
-  Etag used for versioning. The response is at least as fresh as the eTag provided. Etag is used for
-  optimistic concurrency control as a way to help prevent simultaneous updates of a rule set from
-  overwriting each other. It is strongly suggested that systems make use of the etag in the read ->
-  modify -> write pattern to perform rule set updates in order to avoid race conditions that is get an
-  etag from a GET rule set request, and pass it with the PUT update request to identify the rule set
-  version you are updating.
-
-:returns: :class:`RuleSetResponse`
-
+        
+        Get a rule set by its name. A rule set is always attached to a resource and contains a list of access
+        rules on the said resource. Currently only a default rule set for each resource is supported.
+        
+        :param name: str
+          The ruleset name associated with the request.
+        :param etag: str
+          Etag used for versioning. The response is at least as fresh as the eTag provided. Etag is used for
+          optimistic concurrency control as a way to help prevent simultaneous updates of a rule set from
+          overwriting each other. It is strongly suggested that systems make use of the etag in the read ->
+          modify -> write pattern to perform rule set updates in order to avoid race conditions that is get an
+          etag from a GET rule set request, and pass it with the PUT update request to identify the rule set
+          version you are updating.
+        
+        :returns: :class:`RuleSetResponse`
+        
 
     .. py:method:: update_rule_set(name: str, rule_set: RuleSetUpdateRequest) -> RuleSetResponse
 
         Update a rule set.
-
-Replace the rules of a rule set. First, use get to read the current version of the rule set before
-modifying it. This pattern helps prevent conflicts between concurrent updates.
-
-:param name: str
-  Name of the rule set.
-:param rule_set: :class:`RuleSetUpdateRequest`
-
-:returns: :class:`RuleSetResponse`
+        
+        Replace the rules of a rule set. First, use get to read the current version of the rule set before
+        modifying it. This pattern helps prevent conflicts between concurrent updates.
+        
+        :param name: str
+          Name of the rule set.
+        :param rule_set: :class:`RuleSetUpdateRequest`
+        
+        :returns: :class:`RuleSetResponse`
+        
\ No newline at end of file
diff --git a/docs/account/iam/groups.rst b/docs/account/iam/groups.rst
index e7c243fe5..a9a4afeac 100644
--- a/docs/account/iam/groups.rst
+++ b/docs/account/iam/groups.rst
@@ -5,131 +5,132 @@
 .. py:class:: AccountGroupsAPI
 
     Groups simplify identity management, making it easier to assign access to Databricks account, data, and
-other securable objects.
-
-It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups,
-instead of to users individually. All Databricks account identities can be assigned as members of groups,
-and members inherit permissions that are assigned to their group.
+    other securable objects.
+    
+    It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups,
+    instead of to users individually. All Databricks account identities can be assigned as members of groups,
+    and members inherit permissions that are assigned to their group.
 
     .. py:method:: create( [, display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], id: Optional[str], members: Optional[List[ComplexValue]], meta: Optional[ResourceMeta], roles: Optional[List[ComplexValue]], schemas: Optional[List[GroupSchema]]]) -> Group
 
         Create a new group.
-
-Creates a group in the Databricks account with a unique name, using the supplied group details.
-
-:param display_name: str (optional)
-  String that represents a human-readable group name
-:param entitlements: List[:class:`ComplexValue`] (optional)
-  Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
-  values.
-  
-  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-:param external_id: str (optional)
-:param groups: List[:class:`ComplexValue`] (optional)
-:param id: str (optional)
-  Databricks group ID
-:param members: List[:class:`ComplexValue`] (optional)
-:param meta: :class:`ResourceMeta` (optional)
-  Container for the group identifier. Workspace local versus account.
-:param roles: List[:class:`ComplexValue`] (optional)
-  Corresponds to AWS instance profile/arn role.
-:param schemas: List[:class:`GroupSchema`] (optional)
-  The schema of the group.
-
-:returns: :class:`Group`
-
+        
+        Creates a group in the Databricks account with a unique name, using the supplied group details.
+        
+        :param display_name: str (optional)
+          String that represents a human-readable group name
+        :param entitlements: List[:class:`ComplexValue`] (optional)
+          Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
+          values.
+          
+          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+        :param external_id: str (optional)
+        :param groups: List[:class:`ComplexValue`] (optional)
+        :param id: str (optional)
+          Databricks group ID
+        :param members: List[:class:`ComplexValue`] (optional)
+        :param meta: :class:`ResourceMeta` (optional)
+          Container for the group identifier. Workspace local versus account.
+        :param roles: List[:class:`ComplexValue`] (optional)
+          Corresponds to AWS instance profile/arn role.
+        :param schemas: List[:class:`GroupSchema`] (optional)
+          The schema of the group.
+        
+        :returns: :class:`Group`
+        
 
     .. py:method:: delete(id: str)
 
         Delete a group.
-
-Deletes a group from the Databricks account.
-
-:param id: str
-  Unique ID for a group in the Databricks account.
-
-
-
+        
+        Deletes a group from the Databricks account.
+        
+        :param id: str
+          Unique ID for a group in the Databricks account.
+        
+        
+        
 
     .. py:method:: get(id: str) -> Group
 
         Get group details.
-
-Gets the information for a specific group in the Databricks account.
-
-:param id: str
-  Unique ID for a group in the Databricks account.
-
-:returns: :class:`Group`
-
+        
+        Gets the information for a specific group in the Databricks account.
+        
+        :param id: str
+          Unique ID for a group in the Databricks account.
+        
+        :returns: :class:`Group`
+        
 
     .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[Group]
 
         List group details.
-
-Gets all details of the groups associated with the Databricks account.
-
-:param attributes: str (optional)
-  Comma-separated list of attributes to return in response.
-:param count: int (optional)
-  Desired number of results per page. Default is 10000.
-:param excluded_attributes: str (optional)
-  Comma-separated list of attributes to exclude in response.
-:param filter: str (optional)
-  Query by which the results have to be filtered. Supported operators are equals(`eq`),
-  contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
-  formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
-  only support simple expressions.
-  
-  [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
-:param sort_by: str (optional)
-  Attribute to sort the results.
-:param sort_order: :class:`ListSortOrder` (optional)
-  The order to sort the results.
-:param start_index: int (optional)
-  Specifies the index of the first result. First item is number 1.
-
-:returns: Iterator over :class:`Group`
-
+        
+        Gets all details of the groups associated with the Databricks account.
+        
+        :param attributes: str (optional)
+          Comma-separated list of attributes to return in response.
+        :param count: int (optional)
+          Desired number of results per page. Default is 10000.
+        :param excluded_attributes: str (optional)
+          Comma-separated list of attributes to exclude in response.
+        :param filter: str (optional)
+          Query by which the results have to be filtered. Supported operators are equals(`eq`),
+          contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
+          formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
+          only support simple expressions.
+          
+          [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
+        :param sort_by: str (optional)
+          Attribute to sort the results.
+        :param sort_order: :class:`ListSortOrder` (optional)
+          The order to sort the results.
+        :param start_index: int (optional)
+          Specifies the index of the first result. First item is number 1.
+        
+        :returns: Iterator over :class:`Group`
+        
 
     .. py:method:: patch(id: str [, operations: Optional[List[Patch]], schemas: Optional[List[PatchSchema]]])
 
         Update group details.
-
-Partially updates the details of a group.
-
-:param id: str
-  Unique ID for a group in the Databricks account.
-:param operations: List[:class:`Patch`] (optional)
-:param schemas: List[:class:`PatchSchema`] (optional)
-  The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
-
-
-
+        
+        Partially updates the details of a group.
+        
+        :param id: str
+          Unique ID for a group in the Databricks account.
+        :param operations: List[:class:`Patch`] (optional)
+        :param schemas: List[:class:`PatchSchema`] (optional)
+          The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
+        
+        
+        
 
     .. py:method:: update(id: str [, display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], members: Optional[List[ComplexValue]], meta: Optional[ResourceMeta], roles: Optional[List[ComplexValue]], schemas: Optional[List[GroupSchema]]])
 
         Replace a group.
-
-Updates the details of a group by replacing the entire group entity.
-
-:param id: str
-  Databricks group ID
-:param display_name: str (optional)
-  String that represents a human-readable group name
-:param entitlements: List[:class:`ComplexValue`] (optional)
-  Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
-  values.
-  
-  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-:param external_id: str (optional)
-:param groups: List[:class:`ComplexValue`] (optional)
-:param members: List[:class:`ComplexValue`] (optional)
-:param meta: :class:`ResourceMeta` (optional)
-  Container for the group identifier. Workspace local versus account.
-:param roles: List[:class:`ComplexValue`] (optional)
-  Corresponds to AWS instance profile/arn role.
-:param schemas: List[:class:`GroupSchema`] (optional)
-  The schema of the group.
-
-
+        
+        Updates the details of a group by replacing the entire group entity.
+        
+        :param id: str
+          Databricks group ID
+        :param display_name: str (optional)
+          String that represents a human-readable group name
+        :param entitlements: List[:class:`ComplexValue`] (optional)
+          Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
+          values.
+          
+          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+        :param external_id: str (optional)
+        :param groups: List[:class:`ComplexValue`] (optional)
+        :param members: List[:class:`ComplexValue`] (optional)
+        :param meta: :class:`ResourceMeta` (optional)
+          Container for the group identifier. Workspace local versus account.
+        :param roles: List[:class:`ComplexValue`] (optional)
+          Corresponds to AWS instance profile/arn role.
+        :param schemas: List[:class:`GroupSchema`] (optional)
+          The schema of the group.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/account/iam/service_principals.rst b/docs/account/iam/service_principals.rst
index 0f2f5b156..0631386a1 100644
--- a/docs/account/iam/service_principals.rst
+++ b/docs/account/iam/service_principals.rst
@@ -5,10 +5,10 @@
 .. py:class:: AccountServicePrincipalsAPI
 
     Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
-Databricks recommends creating service principals to run production jobs or modify production data. If all
-processes that act on production data run with service principals, interactive users do not need any
-write, delete, or modify privileges in production. This eliminates the risk of a user overwriting
-production data by accident.
+    Databricks recommends creating service principals to run production jobs or modify production data. If all
+    processes that act on production data run with service principals, interactive users do not need any
+    write, delete, or modify privileges in production. This eliminates the risk of a user overwriting
+    production data by accident.
 
     .. py:method:: create( [, active: Optional[bool], application_id: Optional[str], display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], id: Optional[str], roles: Optional[List[ComplexValue]], schemas: Optional[List[ServicePrincipalSchema]]]) -> ServicePrincipal
 
@@ -29,43 +29,43 @@ production data by accident.
             a.service_principals.delete(id=sp_create.id)
 
         Create a service principal.
-
-Creates a new service principal in the Databricks account.
-
-:param active: bool (optional)
-  If this user is active
-:param application_id: str (optional)
-  UUID relating to the service principal
-:param display_name: str (optional)
-  String that represents a concatenation of given and family names.
-:param entitlements: List[:class:`ComplexValue`] (optional)
-  Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
-  supported values.
-  
-  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-:param external_id: str (optional)
-:param groups: List[:class:`ComplexValue`] (optional)
-:param id: str (optional)
-  Databricks service principal ID.
-:param roles: List[:class:`ComplexValue`] (optional)
-  Corresponds to AWS instance profile/arn role.
-:param schemas: List[:class:`ServicePrincipalSchema`] (optional)
-  The schema of the List response.
-
-:returns: :class:`ServicePrincipal`
-
+        
+        Creates a new service principal in the Databricks account.
+        
+        :param active: bool (optional)
+          If this user is active
+        :param application_id: str (optional)
+          UUID relating to the service principal
+        :param display_name: str (optional)
+          String that represents a concatenation of given and family names.
+        :param entitlements: List[:class:`ComplexValue`] (optional)
+          Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
+          supported values.
+          
+          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+        :param external_id: str (optional)
+        :param groups: List[:class:`ComplexValue`] (optional)
+        :param id: str (optional)
+          Databricks service principal ID.
+        :param roles: List[:class:`ComplexValue`] (optional)
+          Corresponds to AWS instance profile/arn role.
+        :param schemas: List[:class:`ServicePrincipalSchema`] (optional)
+          The schema of the List response.
+        
+        :returns: :class:`ServicePrincipal`
+        
 
     .. py:method:: delete(id: str)
 
         Delete a service principal.
-
-Delete a single service principal in the Databricks account.
-
-:param id: str
-  Unique ID for a service principal in the Databricks account.
-
-
-
+        
+        Delete a single service principal in the Databricks account.
+        
+        :param id: str
+          Unique ID for a service principal in the Databricks account.
+        
+        
+        
 
     .. py:method:: get(id: str) -> ServicePrincipal
 
@@ -88,14 +88,14 @@ Delete a single service principal in the Databricks account.
             a.service_principals.delete(id=sp_create.id)
 
         Get service principal details.
-
-Gets the details for a single service principal define in the Databricks account.
-
-:param id: str
-  Unique ID for a service principal in the Databricks account.
-
-:returns: :class:`ServicePrincipal`
-
+        
+        Gets the details for a single service principal define in the Databricks account.
+        
+        :param id: str
+          Unique ID for a service principal in the Databricks account.
+        
+        :returns: :class:`ServicePrincipal`
+        
 
     .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[ServicePrincipal]
 
@@ -120,31 +120,31 @@ Gets the details for a single service principal define in the Databricks account
             a.service_principals.delete(id=sp_create.id)
 
         List service principals.
-
-Gets the set of service principals associated with a Databricks account.
-
-:param attributes: str (optional)
-  Comma-separated list of attributes to return in response.
-:param count: int (optional)
-  Desired number of results per page. Default is 10000.
-:param excluded_attributes: str (optional)
-  Comma-separated list of attributes to exclude in response.
-:param filter: str (optional)
-  Query by which the results have to be filtered. Supported operators are equals(`eq`),
-  contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
-  formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
-  only support simple expressions.
-  
-  [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
-:param sort_by: str (optional)
-  Attribute to sort the results.
-:param sort_order: :class:`ListSortOrder` (optional)
-  The order to sort the results.
-:param start_index: int (optional)
-  Specifies the index of the first result. First item is number 1.
-
-:returns: Iterator over :class:`ServicePrincipal`
-
+        
+        Gets the set of service principals associated with a Databricks account.
+        
+        :param attributes: str (optional)
+          Comma-separated list of attributes to return in response.
+        :param count: int (optional)
+          Desired number of results per page. Default is 10000.
+        :param excluded_attributes: str (optional)
+          Comma-separated list of attributes to exclude in response.
+        :param filter: str (optional)
+          Query by which the results have to be filtered. Supported operators are equals(`eq`),
+          contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
+          formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
+          only support simple expressions.
+          
+          [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
+        :param sort_by: str (optional)
+          Attribute to sort the results.
+        :param sort_order: :class:`ListSortOrder` (optional)
+          The order to sort the results.
+        :param start_index: int (optional)
+          Specifies the index of the first result. First item is number 1.
+        
+        :returns: Iterator over :class:`ServicePrincipal`
+        
 
     .. py:method:: patch(id: str [, operations: Optional[List[Patch]], schemas: Optional[List[PatchSchema]]])
 
@@ -172,17 +172,17 @@ Gets the set of service principals associated with a Databricks account.
             a.service_principals.delete(id=sp_create.id)
 
         Update service principal details.
-
-Partially updates the details of a single service principal in the Databricks account.
-
-:param id: str
-  Unique ID for a service principal in the Databricks account.
-:param operations: List[:class:`Patch`] (optional)
-:param schemas: List[:class:`PatchSchema`] (optional)
-  The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
-
-
-
+        
+        Partially updates the details of a single service principal in the Databricks account.
+        
+        :param id: str
+          Unique ID for a service principal in the Databricks account.
+        :param operations: List[:class:`Patch`] (optional)
+        :param schemas: List[:class:`PatchSchema`] (optional)
+          The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
+        
+        
+        
 
     .. py:method:: update(id: str [, active: Optional[bool], application_id: Optional[str], display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], roles: Optional[List[ComplexValue]], schemas: Optional[List[ServicePrincipalSchema]]])
 
@@ -207,29 +207,30 @@ Partially updates the details of a single service principal in the Databricks ac
             a.service_principals.delete(id=sp_create.id)
 
         Replace service principal.
-
-Updates the details of a single service principal.
-
-This action replaces the existing service principal with the same name.
-
-:param id: str
-  Databricks service principal ID.
-:param active: bool (optional)
-  If this user is active
-:param application_id: str (optional)
-  UUID relating to the service principal
-:param display_name: str (optional)
-  String that represents a concatenation of given and family names.
-:param entitlements: List[:class:`ComplexValue`] (optional)
-  Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
-  supported values.
-  
-  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-:param external_id: str (optional)
-:param groups: List[:class:`ComplexValue`] (optional)
-:param roles: List[:class:`ComplexValue`] (optional)
-  Corresponds to AWS instance profile/arn role.
-:param schemas: List[:class:`ServicePrincipalSchema`] (optional)
-  The schema of the List response.
-
-
+        
+        Updates the details of a single service principal.
+        
+        This action replaces the existing service principal with the same name.
+        
+        :param id: str
+          Databricks service principal ID.
+        :param active: bool (optional)
+          If this user is active
+        :param application_id: str (optional)
+          UUID relating to the service principal
+        :param display_name: str (optional)
+          String that represents a concatenation of given and family names.
+        :param entitlements: List[:class:`ComplexValue`] (optional)
+          Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
+          supported values.
+          
+          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+        :param external_id: str (optional)
+        :param groups: List[:class:`ComplexValue`] (optional)
+        :param roles: List[:class:`ComplexValue`] (optional)
+          Corresponds to AWS instance profile/arn role.
+        :param schemas: List[:class:`ServicePrincipalSchema`] (optional)
+          The schema of the List response.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/account/iam/users.rst b/docs/account/iam/users.rst
index 77c96d67c..4b8b5bb08 100644
--- a/docs/account/iam/users.rst
+++ b/docs/account/iam/users.rst
@@ -5,14 +5,14 @@
 .. py:class:: AccountUsersAPI
 
     User identities recognized by Databricks and represented by email addresses.
-
-Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity
-provider to your Databricks account. SCIM streamlines onboarding a new employee or team by using your
-identity provider to create users and groups in Databricks account and give them the proper level of
-access. When a user leaves your organization or no longer needs access to Databricks account, admins can
-terminate the user in your identity provider and that user’s account will also be removed from
-Databricks account. This ensures a consistent offboarding process and prevents unauthorized users from
-accessing sensitive data.
+    
+    Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity
+    provider to your Databricks account. SCIM streamlines onboarding a new employee or team by using your
+    identity provider to create users and groups in Databricks account and give them the proper level of
+    access. When a user leaves your organization or no longer needs access to Databricks account, admins can
+    terminate the user in your identity provider and that user’s account will also be removed from
+    Databricks account. This ensures a consistent offboarding process and prevents unauthorized users from
+    accessing sensitive data.
 
     .. py:method:: create( [, active: Optional[bool], display_name: Optional[str], emails: Optional[List[ComplexValue]], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], id: Optional[str], name: Optional[Name], roles: Optional[List[ComplexValue]], schemas: Optional[List[UserSchema]], user_name: Optional[str]]) -> User
 
@@ -33,40 +33,40 @@ accessing sensitive data.
             a.users.delete(id=user.id)
 
         Create a new user.
-
-Creates a new user in the Databricks account. This new user will also be added to the Databricks
-account.
-
-:param active: bool (optional)
-  If this user is active
-:param display_name: str (optional)
-  String that represents a concatenation of given and family names. For example `John Smith`. This
-  field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
-  Account SCIM APIs to update `displayName`.
-  
-  [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
-:param emails: List[:class:`ComplexValue`] (optional)
-  All the emails associated with the Databricks user.
-:param entitlements: List[:class:`ComplexValue`] (optional)
-  Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
-  
-  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-:param external_id: str (optional)
-  External ID is not currently supported. It is reserved for future use.
-:param groups: List[:class:`ComplexValue`] (optional)
-:param id: str (optional)
-  Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
-  be ignored.
-:param name: :class:`Name` (optional)
-:param roles: List[:class:`ComplexValue`] (optional)
-  Corresponds to AWS instance profile/arn role.
-:param schemas: List[:class:`UserSchema`] (optional)
-  The schema of the user.
-:param user_name: str (optional)
-  Email address of the Databricks user.
-
-:returns: :class:`User`
-
+        
+        Creates a new user in the Databricks account. This new user will also be added to the Databricks
+        account.
+        
+        :param active: bool (optional)
+          If this user is active
+        :param display_name: str (optional)
+          String that represents a concatenation of given and family names. For example `John Smith`. This
+          field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
+          Account SCIM APIs to update `displayName`.
+          
+          [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
+        :param emails: List[:class:`ComplexValue`] (optional)
+          All the emails associated with the Databricks user.
+        :param entitlements: List[:class:`ComplexValue`] (optional)
+          Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
+          
+          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+        :param external_id: str (optional)
+          External ID is not currently supported. It is reserved for future use.
+        :param groups: List[:class:`ComplexValue`] (optional)
+        :param id: str (optional)
+          Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
+          be ignored.
+        :param name: :class:`Name` (optional)
+        :param roles: List[:class:`ComplexValue`] (optional)
+          Corresponds to AWS instance profile/arn role.
+        :param schemas: List[:class:`UserSchema`] (optional)
+          The schema of the user.
+        :param user_name: str (optional)
+          Email address of the Databricks user.
+        
+        :returns: :class:`User`
+        
 
     .. py:method:: delete(id: str)
 
@@ -86,15 +86,15 @@ account.
             a.users.delete(id=user.id)
 
         Delete a user.
-
-Deletes a user. Deleting a user from a Databricks account also removes objects associated with the
-user.
-
-:param id: str
-  Unique ID for a user in the Databricks account.
-
-
-
+        
+        Deletes a user. Deleting a user from a Databricks account also removes objects associated with the
+        user.
+        
+        :param id: str
+          Unique ID for a user in the Databricks account.
+        
+        
+        
 
     .. py:method:: get(id: str [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[GetSortOrder], start_index: Optional[int]]) -> User
 
@@ -117,64 +117,64 @@ user.
             a.users.delete(id=user.id)
 
         Get user details.
-
-Gets information for a specific user in Databricks account.
-
-:param id: str
-  Unique ID for a user in the Databricks account.
-:param attributes: str (optional)
-  Comma-separated list of attributes to return in response.
-:param count: int (optional)
-  Desired number of results per page. Default is 10000.
-:param excluded_attributes: str (optional)
-  Comma-separated list of attributes to exclude in response.
-:param filter: str (optional)
-  Query by which the results have to be filtered. Supported operators are equals(`eq`),
-  contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
-  formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
-  only support simple expressions.
-  
-  [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
-:param sort_by: str (optional)
-  Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
-  `name.givenName`, and `emails`.
-:param sort_order: :class:`GetSortOrder` (optional)
-  The order to sort the results.
-:param start_index: int (optional)
-  Specifies the index of the first result. First item is number 1.
-
-:returns: :class:`User`
-
+        
+        Gets information for a specific user in Databricks account.
+        
+        :param id: str
+          Unique ID for a user in the Databricks account.
+        :param attributes: str (optional)
+          Comma-separated list of attributes to return in response.
+        :param count: int (optional)
+          Desired number of results per page. Default is 10000.
+        :param excluded_attributes: str (optional)
+          Comma-separated list of attributes to exclude in response.
+        :param filter: str (optional)
+          Query by which the results have to be filtered. Supported operators are equals(`eq`),
+          contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
+          formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
+          only support simple expressions.
+          
+          [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
+        :param sort_by: str (optional)
+          Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
+          `name.givenName`, and `emails`.
+        :param sort_order: :class:`GetSortOrder` (optional)
+          The order to sort the results.
+        :param start_index: int (optional)
+          Specifies the index of the first result. First item is number 1.
+        
+        :returns: :class:`User`
+        
 
     .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[User]
 
         List users.
-
-Gets details for all the users associated with a Databricks account.
-
-:param attributes: str (optional)
-  Comma-separated list of attributes to return in response.
-:param count: int (optional)
-  Desired number of results per page. Default is 10000.
-:param excluded_attributes: str (optional)
-  Comma-separated list of attributes to exclude in response.
-:param filter: str (optional)
-  Query by which the results have to be filtered. Supported operators are equals(`eq`),
-  contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
-  formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
-  only support simple expressions.
-  
-  [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
-:param sort_by: str (optional)
-  Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
-  `name.givenName`, and `emails`.
-:param sort_order: :class:`ListSortOrder` (optional)
-  The order to sort the results.
-:param start_index: int (optional)
-  Specifies the index of the first result. First item is number 1.
-
-:returns: Iterator over :class:`User`
-
+        
+        Gets details for all the users associated with a Databricks account.
+        
+        :param attributes: str (optional)
+          Comma-separated list of attributes to return in response.
+        :param count: int (optional)
+          Desired number of results per page. Default is 10000.
+        :param excluded_attributes: str (optional)
+          Comma-separated list of attributes to exclude in response.
+        :param filter: str (optional)
+          Query by which the results have to be filtered. Supported operators are equals(`eq`),
+          contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
+          formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
+          only support simple expressions.
+          
+          [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
+        :param sort_by: str (optional)
+          Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
+          `name.givenName`, and `emails`.
+        :param sort_order: :class:`ListSortOrder` (optional)
+          The order to sort the results.
+        :param start_index: int (optional)
+          Specifies the index of the first result. First item is number 1.
+        
+        :returns: Iterator over :class:`User`
+        
 
     .. py:method:: patch(id: str [, operations: Optional[List[Patch]], schemas: Optional[List[PatchSchema]]])
 
@@ -203,50 +203,51 @@ Gets details for all the users associated with a Databricks account.
             a.users.delete(id=user.id)
 
         Update user details.
-
-Partially updates a user resource by applying the supplied operations on specific user attributes.
-
-:param id: str
-  Unique ID for a user in the Databricks account.
-:param operations: List[:class:`Patch`] (optional)
-:param schemas: List[:class:`PatchSchema`] (optional)
-  The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
-
-
-
+        
+        Partially updates a user resource by applying the supplied operations on specific user attributes.
+        
+        :param id: str
+          Unique ID for a user in the Databricks account.
+        :param operations: List[:class:`Patch`] (optional)
+        :param schemas: List[:class:`PatchSchema`] (optional)
+          The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
+        
+        
+        
 
     .. py:method:: update(id: str [, active: Optional[bool], display_name: Optional[str], emails: Optional[List[ComplexValue]], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], name: Optional[Name], roles: Optional[List[ComplexValue]], schemas: Optional[List[UserSchema]], user_name: Optional[str]])
 
         Replace a user.
-
-Replaces a user's information with the data supplied in request.
-
-:param id: str
-  Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
-  be ignored.
-:param active: bool (optional)
-  If this user is active
-:param display_name: str (optional)
-  String that represents a concatenation of given and family names. For example `John Smith`. This
-  field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
-  Account SCIM APIs to update `displayName`.
-  
-  [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
-:param emails: List[:class:`ComplexValue`] (optional)
-  All the emails associated with the Databricks user.
-:param entitlements: List[:class:`ComplexValue`] (optional)
-  Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
-  
-  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-:param external_id: str (optional)
-  External ID is not currently supported. It is reserved for future use.
-:param groups: List[:class:`ComplexValue`] (optional)
-:param name: :class:`Name` (optional)
-:param roles: List[:class:`ComplexValue`] (optional)
-  Corresponds to AWS instance profile/arn role.
-:param schemas: List[:class:`UserSchema`] (optional)
-  The schema of the user.
-:param user_name: str (optional)
-  Email address of the Databricks user.
-
-
+        
+        Replaces a user's information with the data supplied in request.
+        
+        :param id: str
+          Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
+          be ignored.
+        :param active: bool (optional)
+          If this user is active
+        :param display_name: str (optional)
+          String that represents a concatenation of given and family names. For example `John Smith`. This
+          field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
+          Account SCIM APIs to update `displayName`.
+          
+          [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
+        :param emails: List[:class:`ComplexValue`] (optional)
+          All the emails associated with the Databricks user.
+        :param entitlements: List[:class:`ComplexValue`] (optional)
+          Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
+          
+          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+        :param external_id: str (optional)
+          External ID is not currently supported. It is reserved for future use.
+        :param groups: List[:class:`ComplexValue`] (optional)
+        :param name: :class:`Name` (optional)
+        :param roles: List[:class:`ComplexValue`] (optional)
+          Corresponds to AWS instance profile/arn role.
+        :param schemas: List[:class:`UserSchema`] (optional)
+          The schema of the user.
+        :param user_name: str (optional)
+          Email address of the Databricks user.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst
index a6e912d93..697f0a5da 100644
--- a/docs/account/iam/workspace_assignment.rst
+++ b/docs/account/iam/workspace_assignment.rst
@@ -5,34 +5,34 @@
 .. py:class:: WorkspaceAssignmentAPI
 
     The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your
-account.
+    account.
 
     .. py:method:: delete(workspace_id: int, principal_id: int)
 
         Delete permissions assignment.
-
-Deletes the workspace permissions assignment in a given account and workspace for the specified
-principal.
-
-:param workspace_id: int
-  The workspace ID for the account.
-:param principal_id: int
-  The ID of the user, service principal, or group.
-
-
-
+        
+        Deletes the workspace permissions assignment in a given account and workspace for the specified
+        principal.
+        
+        :param workspace_id: int
+          The workspace ID for the account.
+        :param principal_id: int
+          The ID of the user, service principal, or group.
+        
+        
+        
 
     .. py:method:: get(workspace_id: int) -> WorkspacePermissions
 
         List workspace permissions.
-
-Get an array of workspace permissions for the specified account and workspace.
-
-:param workspace_id: int
-  The workspace ID.
-
-:returns: :class:`WorkspacePermissions`
-
+        
+        Get an array of workspace permissions for the specified account and workspace.
+        
+        :param workspace_id: int
+          The workspace ID.
+        
+        :returns: :class:`WorkspacePermissions`
+        
 
     .. py:method:: list(workspace_id: int) -> Iterator[PermissionAssignment]
 
@@ -52,14 +52,14 @@ Get an array of workspace permissions for the specified account and workspace.
             all = a.workspace_assignment.list(list=workspace_id)
 
         Get permission assignments.
-
-Get the permission assignments for the specified Databricks account and Databricks workspace.
-
-:param workspace_id: int
-  The workspace ID for the account.
-
-:returns: Iterator over :class:`PermissionAssignment`
-
+        
+        Get the permission assignments for the specified Databricks account and Databricks workspace.
+        
+        :param workspace_id: int
+          The workspace ID for the account.
+        
+        :returns: Iterator over :class:`PermissionAssignment`
+        
 
     .. py:method:: update(workspace_id: int, principal_id: int [, permissions: Optional[List[WorkspacePermission]]]) -> PermissionAssignment
 
@@ -87,19 +87,20 @@ Get the permission assignments for the specified Databricks account and Databric
                                               permissions=[iam.WorkspacePermission.USER])
 
         Create or update permissions assignment.
-
-Creates or updates the workspace permissions assignment in a given account and workspace for the
-specified principal.
-
-:param workspace_id: int
-  The workspace ID.
-:param principal_id: int
-  The ID of the user, service principal, or group.
-:param permissions: List[:class:`WorkspacePermission`] (optional)
-  Array of permissions assignments to update on the workspace. Valid values are "USER" and "ADMIN"
-  (case-sensitive). If both "USER" and "ADMIN" are provided, "ADMIN" takes precedence. Other values
-  will be ignored. Note that excluding this field, or providing unsupported values, will have the same
-  effect as providing an empty list, which will result in the deletion of all permissions for the
-  principal.
-
-:returns: :class:`PermissionAssignment`
+        
+        Creates or updates the workspace permissions assignment in a given account and workspace for the
+        specified principal.
+        
+        :param workspace_id: int
+          The workspace ID.
+        :param principal_id: int
+          The ID of the user, service principal, or group.
+        :param permissions: List[:class:`WorkspacePermission`] (optional)
+          Array of permissions assignments to update on the workspace. Valid values are "USER" and "ADMIN"
+          (case-sensitive). If both "USER" and "ADMIN" are provided, "ADMIN" takes precedence. Other values
+          will be ignored. Note that excluding this field, or providing unsupported values, will have the same
+          effect as providing an empty list, which will result in the deletion of all permissions for the
+          principal.
+        
+        :returns: :class:`PermissionAssignment`
+        
\ No newline at end of file
diff --git a/docs/account/oauth2/custom_app_integration.rst b/docs/account/oauth2/custom_app_integration.rst
index 6d85306f9..7043a343b 100644
--- a/docs/account/oauth2/custom_app_integration.rst
+++ b/docs/account/oauth2/custom_app_integration.rst
@@ -5,88 +5,89 @@
 .. py:class:: CustomAppIntegrationAPI
 
     These APIs enable administrators to manage custom OAuth app integrations, which is required for
-adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
+    adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
 
     .. py:method:: create( [, confidential: Optional[bool], name: Optional[str], redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy], user_authorized_scopes: Optional[List[str]]]) -> CreateCustomAppIntegrationOutput
 
         Create Custom OAuth App Integration.
-
-Create Custom OAuth App Integration.
-
-You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get.
-
-:param confidential: bool (optional)
-  This field indicates whether an OAuth client secret is required to authenticate this client.
-:param name: str (optional)
-  Name of the custom OAuth app
-:param redirect_urls: List[str] (optional)
-  List of OAuth redirect urls
-:param scopes: List[str] (optional)
-  OAuth scopes granted to the application. Supported scopes: all-apis, sql, offline_access, openid,
-  profile, email.
-:param token_access_policy: :class:`TokenAccessPolicy` (optional)
-  Token access policy
-:param user_authorized_scopes: List[str] (optional)
-  Scopes that will need to be consented by end user to mint the access token. If the user does not
-  authorize the access token will not be minted. Must be a subset of scopes.
-
-:returns: :class:`CreateCustomAppIntegrationOutput`
-
+        
+        Create Custom OAuth App Integration.
+        
+        You can retrieve the custom OAuth app integration via :method:CustomAppIntegration/get.
+        
+        :param confidential: bool (optional)
+          This field indicates whether an OAuth client secret is required to authenticate this client.
+        :param name: str (optional)
+          Name of the custom OAuth app
+        :param redirect_urls: List[str] (optional)
+          List of OAuth redirect urls
+        :param scopes: List[str] (optional)
+          OAuth scopes granted to the application. Supported scopes: all-apis, sql, offline_access, openid,
+          profile, email.
+        :param token_access_policy: :class:`TokenAccessPolicy` (optional)
+          Token access policy
+        :param user_authorized_scopes: List[str] (optional)
+          Scopes that will need to be consented by end user to mint the access token. If the user does not
+          authorize the access token will not be minted. Must be a subset of scopes.
+        
+        :returns: :class:`CreateCustomAppIntegrationOutput`
+        
 
     .. py:method:: delete(integration_id: str)
 
         Delete Custom OAuth App Integration.
-
-Delete an existing Custom OAuth App Integration. You can retrieve the custom OAuth app integration via
-:method:CustomAppIntegration/get.
-
-:param integration_id: str
-
-
-
+        
+        Delete an existing Custom OAuth App Integration. You can retrieve the custom OAuth app integration via
+        :method:CustomAppIntegration/get.
+        
+        :param integration_id: str
+        
+        
+        
 
     .. py:method:: get(integration_id: str) -> GetCustomAppIntegrationOutput
 
         Get OAuth Custom App Integration.
-
-Gets the Custom OAuth App Integration for the given integration id.
-
-:param integration_id: str
-  The OAuth app integration ID.
-
-:returns: :class:`GetCustomAppIntegrationOutput`
-
+        
+        Gets the Custom OAuth App Integration for the given integration id.
+        
+        :param integration_id: str
+          The OAuth app integration ID.
+        
+        :returns: :class:`GetCustomAppIntegrationOutput`
+        
 
     .. py:method:: list( [, include_creator_username: Optional[bool], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[GetCustomAppIntegrationOutput]
 
         Get custom oauth app integrations.
-
-Get the list of custom OAuth app integrations for the specified Databricks account
-
-:param include_creator_username: bool (optional)
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`GetCustomAppIntegrationOutput`
-
+        
+        Get the list of custom OAuth app integrations for the specified Databricks account
+        
+        :param include_creator_username: bool (optional)
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`GetCustomAppIntegrationOutput`
+        
 
     .. py:method:: update(integration_id: str [, redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy], user_authorized_scopes: Optional[List[str]]])
 
         Updates Custom OAuth App Integration.
-
-Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration
-via :method:CustomAppIntegration/get.
-
-:param integration_id: str
-:param redirect_urls: List[str] (optional)
-  List of OAuth redirect urls to be updated in the custom OAuth app integration
-:param scopes: List[str] (optional)
-  List of OAuth scopes to be updated in the custom OAuth app integration, similar to redirect URIs
-  this will fully replace the existing values instead of appending
-:param token_access_policy: :class:`TokenAccessPolicy` (optional)
-  Token access policy to be updated in the custom OAuth app integration
-:param user_authorized_scopes: List[str] (optional)
-  Scopes that will need to be consented by end user to mint the access token. If the user does not
-  authorize the access token will not be minted. Must be a subset of scopes.
-
-
+        
+        Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration
+        via :method:CustomAppIntegration/get.
+        
+        :param integration_id: str
+        :param redirect_urls: List[str] (optional)
+          List of OAuth redirect urls to be updated in the custom OAuth app integration
+        :param scopes: List[str] (optional)
+          List of OAuth scopes to be updated in the custom OAuth app integration, similar to redirect URIs
+          this will fully replace the existing values instead of appending
+        :param token_access_policy: :class:`TokenAccessPolicy` (optional)
+          Token access policy to be updated in the custom OAuth app integration
+        :param user_authorized_scopes: List[str] (optional)
+          Scopes that will need to be consented by end user to mint the access token. If the user does not
+          authorize the access token will not be minted. Must be a subset of scopes.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/account/oauth2/federation_policy.rst b/docs/account/oauth2/federation_policy.rst
index c1ea393c7..c95bf563c 100644
--- a/docs/account/oauth2/federation_policy.rst
+++ b/docs/account/oauth2/federation_policy.rst
@@ -5,100 +5,101 @@
 .. py:class:: AccountFederationPolicyAPI
 
     These APIs manage account federation policies.
-
-Account federation policies allow users and service principals in your Databricks account to securely
-access Databricks APIs using tokens from your trusted identity providers (IdPs).
-
-With token federation, your users and service principals can exchange tokens from your IdP for Databricks
-OAuth tokens, which can be used to access Databricks APIs. Token federation eliminates the need to manage
-Databricks secrets, and allows you to centralize management of token issuance policies in your IdP.
-Databricks token federation is typically used in combination with [SCIM], so users in your IdP are
-synchronized into your Databricks account.
-
-Token federation is configured in your Databricks account using an account federation policy. An account
-federation policy specifies: * which IdP, or issuer, your Databricks account should accept tokens from *
-how to determine which Databricks user, or subject, a token is issued for
-
-To configure a federation policy, you provide the following: * The required token __issuer__, as specified
-in the “iss” claim of your tokens. The issuer is an https URL that identifies your IdP. * The allowed
-token __audiences__, as specified in the “aud” claim of your tokens. This identifier is intended to
-represent the recipient of the token. As long as the audience in the token matches at least one audience
-in the policy, the token is considered a match. If unspecified, the default value is your Databricks
-account id. * The __subject claim__, which indicates which token claim contains the Databricks username of
-the user the token was issued for. If unspecified, the default value is “sub”. * Optionally, the
-public keys used to validate the signature of your tokens, in JWKS format. If unspecified (recommended),
-Databricks automatically fetches the public keys from your issuer’s well known endpoint. Databricks
-strongly recommends relying on your issuer’s well known endpoint for discovering public keys.
-
-An example federation policy is: ``` issuer: "https://idp.mycompany.com/oidc" audiences: ["databricks"]
-subject_claim: "sub" ```
-
-An example JWT token body that matches this policy and could be used to authenticate to Databricks as user
-`username@mycompany.com` is: ``` { "iss": "https://idp.mycompany.com/oidc", "aud": "databricks", "sub":
-"username@mycompany.com" } ```
-
-You may also need to configure your IdP to generate tokens for your users to exchange with Databricks, if
-your users do not already have the ability to generate tokens that are compatible with your federation
-policy.
-
-You do not need to configure an OAuth application in Databricks to use token federation.
-
-[SCIM]: https://docs.databricks.com/admin/users-groups/scim/index.html
+    
+    Account federation policies allow users and service principals in your Databricks account to securely
+    access Databricks APIs using tokens from your trusted identity providers (IdPs).
+    
+    With token federation, your users and service principals can exchange tokens from your IdP for Databricks
+    OAuth tokens, which can be used to access Databricks APIs. Token federation eliminates the need to manage
+    Databricks secrets, and allows you to centralize management of token issuance policies in your IdP.
+    Databricks token federation is typically used in combination with [SCIM], so users in your IdP are
+    synchronized into your Databricks account.
+    
+    Token federation is configured in your Databricks account using an account federation policy. An account
+    federation policy specifies: * which IdP, or issuer, your Databricks account should accept tokens from *
+    how to determine which Databricks user, or subject, a token is issued for
+    
+    To configure a federation policy, you provide the following: * The required token __issuer__, as specified
+    in the “iss” claim of your tokens. The issuer is an https URL that identifies your IdP. * The allowed
+    token __audiences__, as specified in the “aud” claim of your tokens. This identifier is intended to
+    represent the recipient of the token. As long as the audience in the token matches at least one audience
+    in the policy, the token is considered a match. If unspecified, the default value is your Databricks
+    account id. * The __subject claim__, which indicates which token claim contains the Databricks username of
+    the user the token was issued for. If unspecified, the default value is “sub”. * Optionally, the
+    public keys used to validate the signature of your tokens, in JWKS format. If unspecified (recommended),
+    Databricks automatically fetches the public keys from your issuer’s well known endpoint. Databricks
+    strongly recommends relying on your issuer’s well known endpoint for discovering public keys.
+    
+    An example federation policy is: ``` issuer: "https://idp.mycompany.com/oidc" audiences: ["databricks"]
+    subject_claim: "sub" ```
+    
+    An example JWT token body that matches this policy and could be used to authenticate to Databricks as user
+    `username@mycompany.com` is: ``` { "iss": "https://idp.mycompany.com/oidc", "aud": "databricks", "sub":
+    "username@mycompany.com" } ```
+    
+    You may also need to configure your IdP to generate tokens for your users to exchange with Databricks, if
+    your users do not already have the ability to generate tokens that are compatible with your federation
+    policy.
+    
+    You do not need to configure an OAuth application in Databricks to use token federation.
+    
+    [SCIM]: https://docs.databricks.com/admin/users-groups/scim/index.html
 
     .. py:method:: create( [, policy: Optional[FederationPolicy], policy_id: Optional[str]]) -> FederationPolicy
 
         Create account federation policy.
-
-:param policy: :class:`FederationPolicy` (optional)
-:param policy_id: str (optional)
-  The identifier for the federation policy. The identifier must contain only lowercase alphanumeric
-  characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks.
-
-:returns: :class:`FederationPolicy`
-
+        
+        :param policy: :class:`FederationPolicy` (optional)
+        :param policy_id: str (optional)
+          The identifier for the federation policy. The identifier must contain only lowercase alphanumeric
+          characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks.
+        
+        :returns: :class:`FederationPolicy`
+        
 
     .. py:method:: delete(policy_id: str)
 
         Delete account federation policy.
-
-:param policy_id: str
-  The identifier for the federation policy.
-
-
-
+        
+        :param policy_id: str
+          The identifier for the federation policy.
+        
+        
+        
 
     .. py:method:: get(policy_id: str) -> FederationPolicy
 
         Get account federation policy.
-
-:param policy_id: str
-  The identifier for the federation policy.
-
-:returns: :class:`FederationPolicy`
-
+        
+        :param policy_id: str
+          The identifier for the federation policy.
+        
+        :returns: :class:`FederationPolicy`
+        
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FederationPolicy]
 
         List account federation policies.
-
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`FederationPolicy`
-
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`FederationPolicy`
+        
 
     .. py:method:: update(policy_id: str [, policy: Optional[FederationPolicy], update_mask: Optional[str]]) -> FederationPolicy
 
         Update account federation policy.
-
-:param policy_id: str
-  The identifier for the federation policy.
-:param policy: :class:`FederationPolicy` (optional)
-:param update_mask: str (optional)
-  The field mask specifies which fields of the policy to update. To specify multiple fields in the
-  field mask, use comma as the separator (no space). The special value '*' indicates that all fields
-  should be updated (full replacement). If unspecified, all fields that are set in the policy provided
-  in the update request will overwrite the corresponding fields in the existing policy. Example value:
-  'description,oidc_policy.audiences'.
-
-:returns: :class:`FederationPolicy`
+        
+        :param policy_id: str
+          The identifier for the federation policy.
+        :param policy: :class:`FederationPolicy` (optional)
+        :param update_mask: str (optional)
+          The field mask specifies which fields of the policy to update. To specify multiple fields in the
+          field mask, use comma as the separator (no space). The special value '*' indicates that all fields
+          should be updated (full replacement). If unspecified, all fields that are set in the policy provided
+          in the update request will overwrite the corresponding fields in the existing policy. Example value:
+          'description,oidc_policy.audiences'.
+        
+        :returns: :class:`FederationPolicy`
+        
\ No newline at end of file
diff --git a/docs/account/oauth2/o_auth_published_apps.rst b/docs/account/oauth2/o_auth_published_apps.rst
index e0dc2e303..18c07c326 100644
--- a/docs/account/oauth2/o_auth_published_apps.rst
+++ b/docs/account/oauth2/o_auth_published_apps.rst
@@ -5,18 +5,19 @@
 .. py:class:: OAuthPublishedAppsAPI
 
     These APIs enable administrators to view all the available published OAuth applications in Databricks.
-Administrators can add the published OAuth applications to their account through the OAuth Published App
-Integration APIs.
+    Administrators can add the published OAuth applications to their account through the OAuth Published App
+    Integration APIs.
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[PublishedAppOutput]
 
         Get all the published OAuth apps.
-
-Get all the available published OAuth apps in Databricks.
-
-:param page_size: int (optional)
-  The max number of OAuth published apps to return in one page.
-:param page_token: str (optional)
-  A token that can be used to get the next page of results.
-
-:returns: Iterator over :class:`PublishedAppOutput`
+        
+        Get all the available published OAuth apps in Databricks.
+        
+        :param page_size: int (optional)
+          The max number of OAuth published apps to return in one page.
+        :param page_token: str (optional)
+          A token that can be used to get the next page of results.
+        
+        :returns: Iterator over :class:`PublishedAppOutput`
+        
\ No newline at end of file
diff --git a/docs/account/oauth2/published_app_integration.rst b/docs/account/oauth2/published_app_integration.rst
index 11135e341..f59f2c4aa 100644
--- a/docs/account/oauth2/published_app_integration.rst
+++ b/docs/account/oauth2/published_app_integration.rst
@@ -5,68 +5,69 @@
 .. py:class:: PublishedAppIntegrationAPI
 
     These APIs enable administrators to manage published OAuth app integrations, which is required for
-adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.
+    adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud.
 
     .. py:method:: create( [, app_id: Optional[str], token_access_policy: Optional[TokenAccessPolicy]]) -> CreatePublishedAppIntegrationOutput
 
         Create Published OAuth App Integration.
-
-Create Published OAuth App Integration.
-
-You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get.
-
-:param app_id: str (optional)
-  App id of the OAuth published app integration. For example power-bi, tableau-deskop
-:param token_access_policy: :class:`TokenAccessPolicy` (optional)
-  Token access policy
-
-:returns: :class:`CreatePublishedAppIntegrationOutput`
-
+        
+        Create Published OAuth App Integration.
+        
+        You can retrieve the published OAuth app integration via :method:PublishedAppIntegration/get.
+        
+        :param app_id: str (optional)
+          App id of the OAuth published app integration. For example power-bi, tableau-deskop
+        :param token_access_policy: :class:`TokenAccessPolicy` (optional)
+          Token access policy
+        
+        :returns: :class:`CreatePublishedAppIntegrationOutput`
+        
 
     .. py:method:: delete(integration_id: str)
 
         Delete Published OAuth App Integration.
-
-Delete an existing Published OAuth App Integration. You can retrieve the published OAuth app
-integration via :method:PublishedAppIntegration/get.
-
-:param integration_id: str
-
-
-
+        
+        Delete an existing Published OAuth App Integration. You can retrieve the published OAuth app
+        integration via :method:PublishedAppIntegration/get.
+        
+        :param integration_id: str
+        
+        
+        
 
     .. py:method:: get(integration_id: str) -> GetPublishedAppIntegrationOutput
 
         Get OAuth Published App Integration.
-
-Gets the Published OAuth App Integration for the given integration id.
-
-:param integration_id: str
-
-:returns: :class:`GetPublishedAppIntegrationOutput`
-
+        
+        Gets the Published OAuth App Integration for the given integration id.
+        
+        :param integration_id: str
+        
+        :returns: :class:`GetPublishedAppIntegrationOutput`
+        
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[GetPublishedAppIntegrationOutput]
 
         Get published oauth app integrations.
-
-Get the list of published OAuth app integrations for the specified Databricks account
-
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`GetPublishedAppIntegrationOutput`
-
+        
+        Get the list of published OAuth app integrations for the specified Databricks account
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`GetPublishedAppIntegrationOutput`
+        
 
     .. py:method:: update(integration_id: str [, token_access_policy: Optional[TokenAccessPolicy]])
 
         Updates Published OAuth App Integration.
-
-Updates an existing published OAuth App Integration. You can retrieve the published OAuth app
-integration via :method:PublishedAppIntegration/get.
-
-:param integration_id: str
-:param token_access_policy: :class:`TokenAccessPolicy` (optional)
-  Token access policy to be updated in the published OAuth app integration
-
-
+        
+        Updates an existing published OAuth App Integration. You can retrieve the published OAuth app
+        integration via :method:PublishedAppIntegration/get.
+        
+        :param integration_id: str
+        :param token_access_policy: :class:`TokenAccessPolicy` (optional)
+          Token access policy to be updated in the published OAuth app integration
+        
+        
+        
\ No newline at end of file
diff --git a/docs/account/oauth2/service_principal_federation_policy.rst b/docs/account/oauth2/service_principal_federation_policy.rst
index 66ed4505f..2e0577ba4 100644
--- a/docs/account/oauth2/service_principal_federation_policy.rst
+++ b/docs/account/oauth2/service_principal_federation_policy.rst
@@ -5,110 +5,111 @@
 .. py:class:: ServicePrincipalFederationPolicyAPI
 
     These APIs manage service principal federation policies.
-
-Service principal federation, also known as Workload Identity Federation, allows your automated workloads
-running outside of Databricks to securely access Databricks APIs without the need for Databricks secrets.
-With Workload Identity Federation, your application (or workload) authenticates to Databricks as a
-Databricks service principal, using tokens provided by the workload runtime.
-
-Databricks strongly recommends using Workload Identity Federation to authenticate to Databricks from
-automated workloads, over alternatives such as OAuth client secrets or Personal Access Tokens, whenever
-possible. Workload Identity Federation is supported by many popular services, including Github Actions,
-Azure DevOps, GitLab, Terraform Cloud, and Kubernetes clusters, among others.
-
-Workload identity federation is configured in your Databricks account using a service principal federation
-policy. A service principal federation policy specifies: * which IdP, or issuer, the service principal is
-allowed to authenticate from * which workload identity, or subject, is allowed to authenticate as the
-Databricks service principal
-
-To configure a federation policy, you provide the following: * The required token __issuer__, as specified
-in the “iss” claim of workload identity tokens. The issuer is an https URL that identifies the
-workload identity provider. * The required token __subject__, as specified in the “sub” claim of
-workload identity tokens. The subject uniquely identifies the workload in the workload runtime
-environment. * The allowed token __audiences__, as specified in the “aud” claim of workload identity
-tokens. The audience is intended to represent the recipient of the token. As long as the audience in the
-token matches at least one audience in the policy, the token is considered a match. If unspecified, the
-default value is your Databricks account id. * Optionally, the public keys used to validate the signature
-of the workload identity tokens, in JWKS format. If unspecified (recommended), Databricks automatically
-fetches the public keys from the issuer’s well known endpoint. Databricks strongly recommends relying on
-the issuer’s well known endpoint for discovering public keys.
-
-An example service principal federation policy, for a Github Actions workload, is: ``` issuer:
-"https://token.actions.githubusercontent.com" audiences: ["https://github.com/my-github-org"] subject:
-"repo:my-github-org/my-repo:environment:prod" ```
-
-An example JWT token body that matches this policy and could be used to authenticate to Databricks is: ```
-{ "iss": "https://token.actions.githubusercontent.com", "aud": "https://github.com/my-github-org", "sub":
-"repo:my-github-org/my-repo:environment:prod" } ```
-
-You may also need to configure the workload runtime to generate tokens for your workloads.
-
-You do not need to configure an OAuth application in Databricks to use token federation.
+    
+    Service principal federation, also known as Workload Identity Federation, allows your automated workloads
+    running outside of Databricks to securely access Databricks APIs without the need for Databricks secrets.
+    With Workload Identity Federation, your application (or workload) authenticates to Databricks as a
+    Databricks service principal, using tokens provided by the workload runtime.
+    
+    Databricks strongly recommends using Workload Identity Federation to authenticate to Databricks from
+    automated workloads, over alternatives such as OAuth client secrets or Personal Access Tokens, whenever
+    possible. Workload Identity Federation is supported by many popular services, including Github Actions,
+    Azure DevOps, GitLab, Terraform Cloud, and Kubernetes clusters, among others.
+    
+    Workload identity federation is configured in your Databricks account using a service principal federation
+    policy. A service principal federation policy specifies: * which IdP, or issuer, the service principal is
+    allowed to authenticate from * which workload identity, or subject, is allowed to authenticate as the
+    Databricks service principal
+    
+    To configure a federation policy, you provide the following: * The required token __issuer__, as specified
+    in the “iss” claim of workload identity tokens. The issuer is an https URL that identifies the
+    workload identity provider. * The required token __subject__, as specified in the “sub” claim of
+    workload identity tokens. The subject uniquely identifies the workload in the workload runtime
+    environment. * The allowed token __audiences__, as specified in the “aud” claim of workload identity
+    tokens. The audience is intended to represent the recipient of the token. As long as the audience in the
+    token matches at least one audience in the policy, the token is considered a match. If unspecified, the
+    default value is your Databricks account id. * Optionally, the public keys used to validate the signature
+    of the workload identity tokens, in JWKS format. If unspecified (recommended), Databricks automatically
+    fetches the public keys from the issuer’s well known endpoint. Databricks strongly recommends relying on
+    the issuer’s well known endpoint for discovering public keys.
+    
+    An example service principal federation policy, for a Github Actions workload, is: ``` issuer:
+    "https://token.actions.githubusercontent.com" audiences: ["https://github.com/my-github-org"] subject:
+    "repo:my-github-org/my-repo:environment:prod" ```
+    
+    An example JWT token body that matches this policy and could be used to authenticate to Databricks is: ```
+    { "iss": "https://token.actions.githubusercontent.com", "aud": "https://github.com/my-github-org", "sub":
+    "repo:my-github-org/my-repo:environment:prod" } ```
+    
+    You may also need to configure the workload runtime to generate tokens for your workloads.
+    
+    You do not need to configure an OAuth application in Databricks to use token federation.
 
     .. py:method:: create(service_principal_id: int [, policy: Optional[FederationPolicy], policy_id: Optional[str]]) -> FederationPolicy
 
         Create service principal federation policy.
-
-:param service_principal_id: int
-  The service principal id for the federation policy.
-:param policy: :class:`FederationPolicy` (optional)
-:param policy_id: str (optional)
-  The identifier for the federation policy. The identifier must contain only lowercase alphanumeric
-  characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks.
-
-:returns: :class:`FederationPolicy`
-
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy: :class:`FederationPolicy` (optional)
+        :param policy_id: str (optional)
+          The identifier for the federation policy. The identifier must contain only lowercase alphanumeric
+          characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks.
+        
+        :returns: :class:`FederationPolicy`
+        
 
     .. py:method:: delete(service_principal_id: int, policy_id: str)
 
         Delete service principal federation policy.
-
-:param service_principal_id: int
-  The service principal id for the federation policy.
-:param policy_id: str
-  The identifier for the federation policy.
-
-
-
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy_id: str
+          The identifier for the federation policy.
+        
+        
+        
 
     .. py:method:: get(service_principal_id: int, policy_id: str) -> FederationPolicy
 
         Get service principal federation policy.
-
-:param service_principal_id: int
-  The service principal id for the federation policy.
-:param policy_id: str
-  The identifier for the federation policy.
-
-:returns: :class:`FederationPolicy`
-
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy_id: str
+          The identifier for the federation policy.
+        
+        :returns: :class:`FederationPolicy`
+        
 
     .. py:method:: list(service_principal_id: int [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FederationPolicy]
 
         List service principal federation policies.
-
-:param service_principal_id: int
-  The service principal id for the federation policy.
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`FederationPolicy`
-
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`FederationPolicy`
+        
 
     .. py:method:: update(service_principal_id: int, policy_id: str [, policy: Optional[FederationPolicy], update_mask: Optional[str]]) -> FederationPolicy
 
         Update service principal federation policy.
-
-:param service_principal_id: int
-  The service principal id for the federation policy.
-:param policy_id: str
-  The identifier for the federation policy.
-:param policy: :class:`FederationPolicy` (optional)
-:param update_mask: str (optional)
-  The field mask specifies which fields of the policy to update. To specify multiple fields in the
-  field mask, use comma as the separator (no space). The special value '*' indicates that all fields
-  should be updated (full replacement). If unspecified, all fields that are set in the policy provided
-  in the update request will overwrite the corresponding fields in the existing policy. Example value:
-  'description,oidc_policy.audiences'.
-
-:returns: :class:`FederationPolicy`
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy_id: str
+          The identifier for the federation policy.
+        :param policy: :class:`FederationPolicy` (optional)
+        :param update_mask: str (optional)
+          The field mask specifies which fields of the policy to update. To specify multiple fields in the
+          field mask, use comma as the separator (no space). The special value '*' indicates that all fields
+          should be updated (full replacement). If unspecified, all fields that are set in the policy provided
+          in the update request will overwrite the corresponding fields in the existing policy. Example value:
+          'description,oidc_policy.audiences'.
+        
+        :returns: :class:`FederationPolicy`
+        
\ No newline at end of file
diff --git a/docs/account/oauth2/service_principal_secrets.rst b/docs/account/oauth2/service_principal_secrets.rst
index 3e0bb9b74..955d6da53 100644
--- a/docs/account/oauth2/service_principal_secrets.rst
+++ b/docs/account/oauth2/service_principal_secrets.rst
@@ -5,58 +5,59 @@
 .. py:class:: ServicePrincipalSecretsAPI
 
     These APIs enable administrators to manage service principal secrets.
-
-You can use the generated secrets to obtain OAuth access tokens for a service principal, which can then be
-used to access Databricks Accounts and Workspace APIs. For more information, see [Authentication using
-OAuth tokens for service principals],
-
-In addition, the generated secrets can be used to configure the Databricks Terraform Provider to
-authenticate with the service principal. For more information, see [Databricks Terraform Provider].
-
-[Authentication using OAuth tokens for service principals]: https://docs.databricks.com/dev-tools/authentication-oauth.html
-[Databricks Terraform Provider]: https://github.com/databricks/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-service-principal
+    
+    You can use the generated secrets to obtain OAuth access tokens for a service principal, which can then be
+    used to access Databricks Accounts and Workspace APIs. For more information, see [Authentication using
+    OAuth tokens for service principals],
+    
+    In addition, the generated secrets can be used to configure the Databricks Terraform Provider to
+    authenticate with the service principal. For more information, see [Databricks Terraform Provider].
+    
+    [Authentication using OAuth tokens for service principals]: https://docs.databricks.com/dev-tools/authentication-oauth.html
+    [Databricks Terraform Provider]: https://github.com/databricks/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-service-principal
 
     .. py:method:: create(service_principal_id: int) -> CreateServicePrincipalSecretResponse
 
         Create service principal secret.
-
-Create a secret for the given service principal.
-
-:param service_principal_id: int
-  The service principal ID.
-
-:returns: :class:`CreateServicePrincipalSecretResponse`
-
+        
+        Create a secret for the given service principal.
+        
+        :param service_principal_id: int
+          The service principal ID.
+        
+        :returns: :class:`CreateServicePrincipalSecretResponse`
+        
 
     .. py:method:: delete(service_principal_id: int, secret_id: str)
 
         Delete service principal secret.
-
-Delete a secret from the given service principal.
-
-:param service_principal_id: int
-  The service principal ID.
-:param secret_id: str
-  The secret ID.
-
-
-
+        
+        Delete a secret from the given service principal.
+        
+        :param service_principal_id: int
+          The service principal ID.
+        :param secret_id: str
+          The secret ID.
+        
+        
+        
 
     .. py:method:: list(service_principal_id: int [, page_token: Optional[str]]) -> Iterator[SecretInfo]
 
         List service principal secrets.
-
-List all secrets associated with the given service principal. This operation only returns information
-about the secrets themselves and does not include the secret values.
-
-:param service_principal_id: int
-  The service principal ID.
-:param page_token: str (optional)
-  An opaque page token which was the `next_page_token` in the response of the previous request to list
-  the secrets for this service principal. Provide this token to retrieve the next page of secret
-  entries. When providing a `page_token`, all other parameters provided to the request must match the
-  previous request. To list all of the secrets for a service principal, it is necessary to continue
-  requesting pages of entries until the response contains no `next_page_token`. Note that the number
-  of entries returned must not be used to determine when the listing is complete.
-
-:returns: Iterator over :class:`SecretInfo`
+        
+        List all secrets associated with the given service principal. This operation only returns information
+        about the secrets themselves and does not include the secret values.
+        
+        :param service_principal_id: int
+          The service principal ID.
+        :param page_token: str (optional)
+          An opaque page token which was the `next_page_token` in the response of the previous request to list
+          the secrets for this service principal. Provide this token to retrieve the next page of secret
+          entries. When providing a `page_token`, all other parameters provided to the request must match the
+          previous request. To list all of the secrets for a service principal, it is necessary to continue
+          requesting pages of entries until the response contains no `next_page_token`. Note that the number
+          of entries returned must not be used to determine when the listing is complete.
+        
+        :returns: Iterator over :class:`SecretInfo`
+        
\ No newline at end of file
diff --git a/docs/account/provisioning/credentials.rst b/docs/account/provisioning/credentials.rst
index a411febab..5255a6a29 100644
--- a/docs/account/provisioning/credentials.rst
+++ b/docs/account/provisioning/credentials.rst
@@ -5,9 +5,9 @@
 .. py:class:: CredentialsAPI
 
     These APIs manage credential configurations for this workspace. Databricks needs access to a cross-account
-service IAM role in your AWS account so that Databricks can deploy clusters in the appropriate VPC for the
-new workspace. A credential configuration encapsulates this role information, and its ID is used when
-creating a new workspace.
+    service IAM role in your AWS account so that Databricks can deploy clusters in the appropriate VPC for the
+    new workspace. A credential configuration encapsulates this role information, and its ID is used when
+    creating a new workspace.
 
     .. py:method:: create(credentials_name: str, aws_credentials: CreateCredentialAwsCredentials) -> Credential
 
@@ -33,39 +33,39 @@ creating a new workspace.
             a.credentials.delete(credentials_id=role.credentials_id)
 
         Create credential configuration.
-
-Creates a Databricks credential configuration that represents cloud cross-account credentials for a
-specified account. Databricks uses this to set up network infrastructure properly to host Databricks
-clusters. For your AWS IAM role, you need to trust the External ID (the Databricks Account API account
-ID) in the returned credential object, and configure the required access policy.
-
-Save the response's `credentials_id` field, which is the ID for your new credential configuration
-object.
-
-For information about how to create a new workspace with this API, see [Create a new workspace using
-the Account API]
-
-[Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
-
-:param credentials_name: str
-  The human-readable name of the credential configuration object.
-:param aws_credentials: :class:`CreateCredentialAwsCredentials`
-
-:returns: :class:`Credential`
-
+        
+        Creates a Databricks credential configuration that represents cloud cross-account credentials for a
+        specified account. Databricks uses this to set up network infrastructure properly to host Databricks
+        clusters. For your AWS IAM role, you need to trust the External ID (the Databricks Account API account
+        ID) in the returned credential object, and configure the required access policy.
+        
+        Save the response's `credentials_id` field, which is the ID for your new credential configuration
+        object.
+        
+        For information about how to create a new workspace with this API, see [Create a new workspace using
+        the Account API]
+        
+        [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
+        
+        :param credentials_name: str
+          The human-readable name of the credential configuration object.
+        :param aws_credentials: :class:`CreateCredentialAwsCredentials`
+        
+        :returns: :class:`Credential`
+        
 
     .. py:method:: delete(credentials_id: str)
 
         Delete credential configuration.
-
-Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot
-delete a credential that is associated with any workspace.
-
-:param credentials_id: str
-  Databricks Account API credential configuration ID
-
-
-
+        
+        Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot
+        delete a credential that is associated with any workspace.
+        
+        :param credentials_id: str
+          Databricks Account API credential configuration ID
+        
+        
+        
 
     .. py:method:: get(credentials_id: str) -> Credential
 
@@ -93,14 +93,14 @@ delete a credential that is associated with any workspace.
             a.credentials.delete(credentials_id=role.credentials_id)
 
         Get credential configuration.
-
-Gets a Databricks credential configuration object for an account, both specified by ID.
-
-:param credentials_id: str
-  Databricks Account API credential configuration ID
-
-:returns: :class:`Credential`
-
+        
+        Gets a Databricks credential configuration object for an account, both specified by ID.
+        
+        :param credentials_id: str
+          Databricks Account API credential configuration ID
+        
+        :returns: :class:`Credential`
+        
 
     .. py:method:: list() -> Iterator[Credential]
 
@@ -116,7 +116,8 @@ Gets a Databricks credential configuration object for an account, both specified
             configs = a.credentials.list()
 
         Get all credential configurations.
-
-Gets all Databricks credential configurations associated with an account specified by ID.
-
-:returns: Iterator over :class:`Credential`
+        
+        Gets all Databricks credential configurations associated with an account specified by ID.
+        
+        :returns: Iterator over :class:`Credential`
+        
\ No newline at end of file
diff --git a/docs/account/provisioning/encryption_keys.rst b/docs/account/provisioning/encryption_keys.rst
index ad26bb033..c711727c5 100644
--- a/docs/account/provisioning/encryption_keys.rst
+++ b/docs/account/provisioning/encryption_keys.rst
@@ -5,18 +5,18 @@
 .. py:class:: EncryptionKeysAPI
 
     These APIs manage encryption key configurations for this workspace (optional). A key configuration
-encapsulates the AWS KMS key information and some information about how the key configuration can be used.
-There are two possible uses for key configurations:
-
-* Managed services: A key configuration can be used to encrypt a workspace's notebook and secret data in
-the control plane, as well as Databricks SQL queries and query history. * Storage: A key configuration can
-be used to encrypt a workspace's DBFS and EBS data in the data plane.
-
-In both of these cases, the key configuration's ID is used when creating a new workspace. This Preview
-feature is available if your account is on the E2 version of the platform. Updating a running workspace
-with workspace storage encryption requires that the workspace is on the E2 version of the platform. If you
-have an older workspace, it might not be on the E2 version of the platform. If you are not sure, contact
-your Databricks representative.
+    encapsulates the AWS KMS key information and some information about how the key configuration can be used.
+    There are two possible uses for key configurations:
+    
+    * Managed services: A key configuration can be used to encrypt a workspace's notebook and secret data in
+    the control plane, as well as Databricks SQL queries and query history. * Storage: A key configuration can
+    be used to encrypt a workspace's DBFS and EBS data in the data plane.
+    
+    In both of these cases, the key configuration's ID is used when creating a new workspace. This Preview
+    feature is available if your account is on the E2 version of the platform. Updating a running workspace
+    with workspace storage encryption requires that the workspace is on the E2 version of the platform. If you
+    have an older workspace, it might not be on the E2 version of the platform. If you are not sure, contact
+    your Databricks representative.
 
     .. py:method:: create(use_cases: List[KeyUseCase] [, aws_key_info: Optional[CreateAwsKeyInfo], gcp_key_info: Optional[CreateGcpKeyInfo]]) -> CustomerManagedKey
 
@@ -40,41 +40,41 @@ your Databricks representative.
             a.encryption_keys.delete(customer_managed_key_id=created.customer_managed_key_id)
 
         Create encryption key configuration.
-
-Creates a customer-managed key configuration object for an account, specified by ID. This operation
-uploads a reference to a customer-managed key to Databricks. If the key is assigned as a workspace's
-customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks
-and secrets in the control plane, in addition to Databricks SQL queries and query history. If it is
-specified as a workspace's customer-managed key for workspace storage, the key encrypts the
-workspace's root S3 bucket (which contains the workspace's root DBFS and system data) and, optionally,
-cluster EBS volume data.
-
-**Important**: Customer-managed keys are supported only for some deployment types, subscription types,
-and AWS regions that currently support creation of Databricks workspaces.
-
-This operation is available only if your account is on the E2 version of the platform or on a select
-custom plan that allows multiple workspaces per account.
-
-:param use_cases: List[:class:`KeyUseCase`]
-  The cases that the key can be used for.
-:param aws_key_info: :class:`CreateAwsKeyInfo` (optional)
-:param gcp_key_info: :class:`CreateGcpKeyInfo` (optional)
-
-:returns: :class:`CustomerManagedKey`
-
+        
+        Creates a customer-managed key configuration object for an account, specified by ID. This operation
+        uploads a reference to a customer-managed key to Databricks. If the key is assigned as a workspace's
+        customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks
+        and secrets in the control plane, in addition to Databricks SQL queries and query history. If it is
+        specified as a workspace's customer-managed key for workspace storage, the key encrypts the
+        workspace's root S3 bucket (which contains the workspace's root DBFS and system data) and, optionally,
+        cluster EBS volume data.
+        
+        **Important**: Customer-managed keys are supported only for some deployment types, subscription types,
+        and AWS regions that currently support creation of Databricks workspaces.
+        
+        This operation is available only if your account is on the E2 version of the platform or on a select
+        custom plan that allows multiple workspaces per account.
+        
+        :param use_cases: List[:class:`KeyUseCase`]
+          The cases that the key can be used for.
+        :param aws_key_info: :class:`CreateAwsKeyInfo` (optional)
+        :param gcp_key_info: :class:`CreateGcpKeyInfo` (optional)
+        
+        :returns: :class:`CustomerManagedKey`
+        
 
     .. py:method:: delete(customer_managed_key_id: str)
 
         Delete encryption key configuration.
-
-Deletes a customer-managed key configuration object for an account. You cannot delete a configuration
-that is associated with a running workspace.
-
-:param customer_managed_key_id: str
-  Databricks encryption key configuration ID.
-
-
-
+        
+        Deletes a customer-managed key configuration object for an account. You cannot delete a configuration
+        that is associated with a running workspace.
+        
+        :param customer_managed_key_id: str
+          Databricks encryption key configuration ID.
+        
+        
+        
 
     .. py:method:: get(customer_managed_key_id: str) -> CustomerManagedKey
 
@@ -100,25 +100,25 @@ that is associated with a running workspace.
             a.encryption_keys.delete(customer_managed_key_id=created.customer_managed_key_id)
 
         Get encryption key configuration.
-
-Gets a customer-managed key configuration object for an account, specified by ID. This operation
-uploads a reference to a customer-managed key to Databricks. If assigned as a workspace's
-customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks
-and secrets in the control plane, in addition to Databricks SQL queries and query history. If it is
-specified as a workspace's customer-managed key for storage, the key encrypts the workspace's root S3
-bucket (which contains the workspace's root DBFS and system data) and, optionally, cluster EBS volume
-data.
-
-**Important**: Customer-managed keys are supported only for some deployment types, subscription types,
-and AWS regions.
-
-This operation is available only if your account is on the E2 version of the platform.",
-
-:param customer_managed_key_id: str
-  Databricks encryption key configuration ID.
-
-:returns: :class:`CustomerManagedKey`
-
+        
+        Gets a customer-managed key configuration object for an account, specified by ID. This operation
+        uploads a reference to a customer-managed key to Databricks. If assigned as a workspace's
+        customer-managed key for managed services, Databricks uses the key to encrypt the workspaces notebooks
+        and secrets in the control plane, in addition to Databricks SQL queries and query history. If it is
+        specified as a workspace's customer-managed key for storage, the key encrypts the workspace's root S3
+        bucket (which contains the workspace's root DBFS and system data) and, optionally, cluster EBS volume
+        data.
+        
+        **Important**: Customer-managed keys are supported only for some deployment types, subscription types,
+        and AWS regions.
+        
+        This operation is available only if your account is on the E2 version of the platform.",
+        
+        :param customer_managed_key_id: str
+          Databricks encryption key configuration ID.
+        
+        :returns: :class:`CustomerManagedKey`
+        
 
     .. py:method:: list() -> Iterator[CustomerManagedKey]
 
@@ -134,16 +134,17 @@ This operation is available only if your account is on the E2 version of the pla
             all = a.encryption_keys.list()
 
         Get all encryption key configurations.
-
-Gets all customer-managed key configuration objects for an account. If the key is specified as a
-workspace's managed services customer-managed key, Databricks uses the key to encrypt the workspace's
-notebooks and secrets in the control plane, in addition to Databricks SQL queries and query history.
-If the key is specified as a workspace's storage customer-managed key, the key is used to encrypt the
-workspace's root S3 bucket and optionally can encrypt cluster EBS volumes data in the data plane.
-
-**Important**: Customer-managed keys are supported only for some deployment types, subscription types,
-and AWS regions.
-
-This operation is available only if your account is on the E2 version of the platform.
-
-:returns: Iterator over :class:`CustomerManagedKey`
+        
+        Gets all customer-managed key configuration objects for an account. If the key is specified as a
+        workspace's managed services customer-managed key, Databricks uses the key to encrypt the workspace's
+        notebooks and secrets in the control plane, in addition to Databricks SQL queries and query history.
+        If the key is specified as a workspace's storage customer-managed key, the key is used to encrypt the
+        workspace's root S3 bucket and optionally can encrypt cluster EBS volumes data in the data plane.
+        
+        **Important**: Customer-managed keys are supported only for some deployment types, subscription types,
+        and AWS regions.
+        
+        This operation is available only if your account is on the E2 version of the platform.
+        
+        :returns: Iterator over :class:`CustomerManagedKey`
+        
\ No newline at end of file
diff --git a/docs/account/provisioning/networks.rst b/docs/account/provisioning/networks.rst
index bfe9abfd4..e7491f202 100644
--- a/docs/account/provisioning/networks.rst
+++ b/docs/account/provisioning/networks.rst
@@ -5,7 +5,7 @@
 .. py:class:: NetworksAPI
 
     These APIs manage network configurations for customer-managed VPCs (optional). Its ID is used when
-creating a new workspace if you use customer-managed VPCs.
+    creating a new workspace if you use customer-managed VPCs.
 
     .. py:method:: create(network_name: str [, gcp_network_info: Optional[GcpNetworkInfo], security_group_ids: Optional[List[str]], subnet_ids: Optional[List[str]], vpc_endpoints: Optional[NetworkVpcEndpoints], vpc_id: Optional[str]]) -> Network
 
@@ -27,47 +27,47 @@ creating a new workspace if you use customer-managed VPCs.
                                      security_group_ids=[hex(time.time_ns())[2:]])
 
         Create network configuration.
-
-Creates a Databricks network configuration that represents an VPC and its resources. The VPC will be
-used for new Databricks clusters. This requires a pre-existing VPC and subnets.
-
-:param network_name: str
-  The human-readable name of the network configuration.
-:param gcp_network_info: :class:`GcpNetworkInfo` (optional)
-  The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and
-  secondary IP ranges).
-:param security_group_ids: List[str] (optional)
-  IDs of one to five security groups associated with this network. Security group IDs **cannot** be
-  used in multiple network configurations.
-:param subnet_ids: List[str] (optional)
-  IDs of at least two subnets associated with this network. Subnet IDs **cannot** be used in multiple
-  network configurations.
-:param vpc_endpoints: :class:`NetworkVpcEndpoints` (optional)
-  If specified, contains the VPC endpoints used to allow cluster communication from this VPC over [AWS
-  PrivateLink].
-  
-  [AWS PrivateLink]: https://aws.amazon.com/privatelink/
-:param vpc_id: str (optional)
-  The ID of the VPC associated with this network. VPC IDs can be used in multiple network
-  configurations.
-
-:returns: :class:`Network`
-
+        
+        Creates a Databricks network configuration that represents an VPC and its resources. The VPC will be
+        used for new Databricks clusters. This requires a pre-existing VPC and subnets.
+        
+        :param network_name: str
+          The human-readable name of the network configuration.
+        :param gcp_network_info: :class:`GcpNetworkInfo` (optional)
+          The Google Cloud specific information for this network (for example, the VPC ID, subnet ID, and
+          secondary IP ranges).
+        :param security_group_ids: List[str] (optional)
+          IDs of one to five security groups associated with this network. Security group IDs **cannot** be
+          used in multiple network configurations.
+        :param subnet_ids: List[str] (optional)
+          IDs of at least two subnets associated with this network. Subnet IDs **cannot** be used in multiple
+          network configurations.
+        :param vpc_endpoints: :class:`NetworkVpcEndpoints` (optional)
+          If specified, contains the VPC endpoints used to allow cluster communication from this VPC over [AWS
+          PrivateLink].
+          
+          [AWS PrivateLink]: https://aws.amazon.com/privatelink/
+        :param vpc_id: str (optional)
+          The ID of the VPC associated with this network. VPC IDs can be used in multiple network
+          configurations.
+        
+        :returns: :class:`Network`
+        
 
     .. py:method:: delete(network_id: str)
 
         Delete a network configuration.
-
-Deletes a Databricks network configuration, which represents a cloud VPC and its resources. You cannot
-delete a network that is associated with a workspace.
-
-This operation is available only if your account is on the E2 version of the platform.
-
-:param network_id: str
-  Databricks Account API network configuration ID.
-
-
-
+        
+        Deletes a Databricks network configuration, which represents a cloud VPC and its resources. You cannot
+        delete a network that is associated with a workspace.
+        
+        This operation is available only if your account is on the E2 version of the platform.
+        
+        :param network_id: str
+          Databricks Account API network configuration ID.
+        
+        
+        
 
     .. py:method:: get(network_id: str) -> Network
 
@@ -91,14 +91,14 @@ This operation is available only if your account is on the E2 version of the pla
             by_id = a.networks.get(network_id=netw.network_id)
 
         Get a network configuration.
-
-Gets a Databricks network configuration, which represents a cloud VPC and its resources.
-
-:param network_id: str
-  Databricks Account API network configuration ID.
-
-:returns: :class:`Network`
-
+        
+        Gets a Databricks network configuration, which represents a cloud VPC and its resources.
+        
+        :param network_id: str
+          Databricks Account API network configuration ID.
+        
+        :returns: :class:`Network`
+        
 
     .. py:method:: list() -> Iterator[Network]
 
@@ -114,9 +114,10 @@ Gets a Databricks network configuration, which represents a cloud VPC and its re
             configs = a.networks.list()
 
         Get all network configurations.
-
-Gets a list of all Databricks network configurations for an account, specified by ID.
-
-This operation is available only if your account is on the E2 version of the platform.
-
-:returns: Iterator over :class:`Network`
+        
+        Gets a list of all Databricks network configurations for an account, specified by ID.
+        
+        This operation is available only if your account is on the E2 version of the platform.
+        
+        :returns: Iterator over :class:`Network`
+        
\ No newline at end of file
diff --git a/docs/account/provisioning/private_access.rst b/docs/account/provisioning/private_access.rst
index c51b7567f..10022068e 100644
--- a/docs/account/provisioning/private_access.rst
+++ b/docs/account/provisioning/private_access.rst
@@ -27,68 +27,68 @@
             a.private_access.delete(private_access_settings_id=created.private_access_settings_id)
 
         Create private access settings.
-
-Creates a private access settings object, which specifies how your workspace is accessed over [AWS
-PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object
-referenced by ID in the workspace's `private_access_settings_id` property.
-
-You can share one private access settings with multiple workspaces in a single account. However,
-private access settings are specific to AWS regions, so only workspaces in the same AWS region can use
-a given private access settings object.
-
-Before configuring PrivateLink, read the [Databricks article about PrivateLink].
-
-[AWS PrivateLink]: https://aws.amazon.com/privatelink
-[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-
-:param private_access_settings_name: str
-  The human-readable name of the private access settings object.
-:param region: str
-  The cloud region for workspaces associated with this private access settings object.
-:param allowed_vpc_endpoint_ids: List[str] (optional)
-  An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering
-  the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in
-  AWS.
-  
-  Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints
-  that in your account that can connect to your workspace over AWS PrivateLink.
-  
-  If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this
-  control only works for PrivateLink connections. To control how your workspace is accessed via public
-  internet, see [IP access lists].
-  
-  [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html
-:param private_access_level: :class:`PrivateAccessLevel` (optional)
-  The private access level controls which VPC endpoints can connect to the UI or API of any workspace
-  that attaches this private access settings object. * `ACCOUNT` level access (the default) allows
-  only VPC endpoints that are registered in your Databricks account connect to your workspace. *
-  `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details,
-  see `allowed_vpc_endpoint_ids`.
-:param public_access_enabled: bool (optional)
-  Determines if the workspace can be accessed over public internet. For fully private workspaces, you
-  can optionally specify `false`, but only if you implement both the front-end and the back-end
-  PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled.
-
-:returns: :class:`PrivateAccessSettings`
-
+        
+        Creates a private access settings object, which specifies how your workspace is accessed over [AWS
+        PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object
+        referenced by ID in the workspace's `private_access_settings_id` property.
+        
+        You can share one private access settings with multiple workspaces in a single account. However,
+        private access settings are specific to AWS regions, so only workspaces in the same AWS region can use
+        a given private access settings object.
+        
+        Before configuring PrivateLink, read the [Databricks article about PrivateLink].
+        
+        [AWS PrivateLink]: https://aws.amazon.com/privatelink
+        [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
+        
+        :param private_access_settings_name: str
+          The human-readable name of the private access settings object.
+        :param region: str
+          The cloud region for workspaces associated with this private access settings object.
+        :param allowed_vpc_endpoint_ids: List[str] (optional)
+          An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering
+          the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in
+          AWS.
+          
+          Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints
+          that in your account that can connect to your workspace over AWS PrivateLink.
+          
+          If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this
+          control only works for PrivateLink connections. To control how your workspace is accessed via public
+          internet, see [IP access lists].
+          
+          [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html
+        :param private_access_level: :class:`PrivateAccessLevel` (optional)
+          The private access level controls which VPC endpoints can connect to the UI or API of any workspace
+          that attaches this private access settings object. * `ACCOUNT` level access (the default) allows
+          only VPC endpoints that are registered in your Databricks account connect to your workspace. *
+          `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details,
+          see `allowed_vpc_endpoint_ids`.
+        :param public_access_enabled: bool (optional)
+          Determines if the workspace can be accessed over public internet. For fully private workspaces, you
+          can optionally specify `false`, but only if you implement both the front-end and the back-end
+          PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled.
+        
+        :returns: :class:`PrivateAccessSettings`
+        
 
     .. py:method:: delete(private_access_settings_id: str)
 
         Delete a private access settings object.
-
-Deletes a private access settings object, which determines how your workspace is accessed over [AWS
-PrivateLink].
-
-Before configuring PrivateLink, read the [Databricks article about PrivateLink].",
-
-[AWS PrivateLink]: https://aws.amazon.com/privatelink
-[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-
-:param private_access_settings_id: str
-  Databricks Account API private access settings ID.
-
-
-
+        
+        Deletes a private access settings object, which determines how your workspace is accessed over [AWS
+        PrivateLink].
+        
+        Before configuring PrivateLink, read the [Databricks article about PrivateLink].",
+        
+        [AWS PrivateLink]: https://aws.amazon.com/privatelink
+        [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
+        
+        :param private_access_settings_id: str
+          Databricks Account API private access settings ID.
+        
+        
+        
 
     .. py:method:: get(private_access_settings_id: str) -> PrivateAccessSettings
 
@@ -113,20 +113,20 @@ Before configuring PrivateLink, read the [Databricks article about PrivateLink].
             a.private_access.delete(private_access_settings_id=created.private_access_settings_id)
 
         Get a private access settings object.
-
-Gets a private access settings object, which specifies how your workspace is accessed over [AWS
-PrivateLink].
-
-Before configuring PrivateLink, read the [Databricks article about PrivateLink].",
-
-[AWS PrivateLink]: https://aws.amazon.com/privatelink
-[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-
-:param private_access_settings_id: str
-  Databricks Account API private access settings ID.
-
-:returns: :class:`PrivateAccessSettings`
-
+        
+        Gets a private access settings object, which specifies how your workspace is accessed over [AWS
+        PrivateLink].
+        
+        Before configuring PrivateLink, read the [Databricks article about PrivateLink].",
+        
+        [AWS PrivateLink]: https://aws.amazon.com/privatelink
+        [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
+        
+        :param private_access_settings_id: str
+          Databricks Account API private access settings ID.
+        
+        :returns: :class:`PrivateAccessSettings`
+        
 
     .. py:method:: list() -> Iterator[PrivateAccessSettings]
 
@@ -142,11 +142,11 @@ Before configuring PrivateLink, read the [Databricks article about PrivateLink].
             all = a.private_access.list()
 
         Get all private access settings objects.
-
-Gets a list of all private access settings objects for an account, specified by ID.
-
-:returns: Iterator over :class:`PrivateAccessSettings`
-
+        
+        Gets a list of all private access settings objects for an account, specified by ID.
+        
+        :returns: Iterator over :class:`PrivateAccessSettings`
+        
 
     .. py:method:: replace(private_access_settings_id: str, private_access_settings_name: str, region: str [, allowed_vpc_endpoint_ids: Optional[List[str]], private_access_level: Optional[PrivateAccessLevel], public_access_enabled: Optional[bool]])
 
@@ -173,53 +173,54 @@ Gets a list of all private access settings objects for an account, specified by
             a.private_access.delete(private_access_settings_id=created.private_access_settings_id)
 
         Replace private access settings.
-
-Updates an existing private access settings object, which specifies how your workspace is accessed
-over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object
-referenced by ID in the workspace's `private_access_settings_id` property.
-
-This operation completely overwrites your existing private access settings object attached to your
-workspaces. All workspaces attached to the private access settings are affected by any change. If
-`public_access_enabled`, `private_access_level`, or `allowed_vpc_endpoint_ids` are updated, effects of
-these changes might take several minutes to propagate to the workspace API.
-
-You can share one private access settings object with multiple workspaces in a single account.
-However, private access settings are specific to AWS regions, so only workspaces in the same AWS
-region can use a given private access settings object.
-
-Before configuring PrivateLink, read the [Databricks article about PrivateLink].
-
-[AWS PrivateLink]: https://aws.amazon.com/privatelink
-[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-
-:param private_access_settings_id: str
-  Databricks Account API private access settings ID.
-:param private_access_settings_name: str
-  The human-readable name of the private access settings object.
-:param region: str
-  The cloud region for workspaces associated with this private access settings object.
-:param allowed_vpc_endpoint_ids: List[str] (optional)
-  An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering
-  the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in
-  AWS.
-  
-  Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints
-  that in your account that can connect to your workspace over AWS PrivateLink.
-  
-  If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this
-  control only works for PrivateLink connections. To control how your workspace is accessed via public
-  internet, see [IP access lists].
-  
-  [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html
-:param private_access_level: :class:`PrivateAccessLevel` (optional)
-  The private access level controls which VPC endpoints can connect to the UI or API of any workspace
-  that attaches this private access settings object. * `ACCOUNT` level access (the default) allows
-  only VPC endpoints that are registered in your Databricks account connect to your workspace. *
-  `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details,
-  see `allowed_vpc_endpoint_ids`.
-:param public_access_enabled: bool (optional)
-  Determines if the workspace can be accessed over public internet. For fully private workspaces, you
-  can optionally specify `false`, but only if you implement both the front-end and the back-end
-  PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled.
-
-
+        
+        Updates an existing private access settings object, which specifies how your workspace is accessed
+        over [AWS PrivateLink]. To use AWS PrivateLink, a workspace must have a private access settings object
+        referenced by ID in the workspace's `private_access_settings_id` property.
+        
+        This operation completely overwrites your existing private access settings object attached to your
+        workspaces. All workspaces attached to the private access settings are affected by any change. If
+        `public_access_enabled`, `private_access_level`, or `allowed_vpc_endpoint_ids` are updated, effects of
+        these changes might take several minutes to propagate to the workspace API.
+        
+        You can share one private access settings object with multiple workspaces in a single account.
+        However, private access settings are specific to AWS regions, so only workspaces in the same AWS
+        region can use a given private access settings object.
+        
+        Before configuring PrivateLink, read the [Databricks article about PrivateLink].
+        
+        [AWS PrivateLink]: https://aws.amazon.com/privatelink
+        [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
+        
+        :param private_access_settings_id: str
+          Databricks Account API private access settings ID.
+        :param private_access_settings_name: str
+          The human-readable name of the private access settings object.
+        :param region: str
+          The cloud region for workspaces associated with this private access settings object.
+        :param allowed_vpc_endpoint_ids: List[str] (optional)
+          An array of Databricks VPC endpoint IDs. This is the Databricks ID that is returned when registering
+          the VPC endpoint configuration in your Databricks account. This is not the ID of the VPC endpoint in
+          AWS.
+          
+          Only used when `private_access_level` is set to `ENDPOINT`. This is an allow list of VPC endpoints
+          that in your account that can connect to your workspace over AWS PrivateLink.
+          
+          If hybrid access to your workspace is enabled by setting `public_access_enabled` to `true`, this
+          control only works for PrivateLink connections. To control how your workspace is accessed via public
+          internet, see [IP access lists].
+          
+          [IP access lists]: https://docs.databricks.com/security/network/ip-access-list.html
+        :param private_access_level: :class:`PrivateAccessLevel` (optional)
+          The private access level controls which VPC endpoints can connect to the UI or API of any workspace
+          that attaches this private access settings object. * `ACCOUNT` level access (the default) allows
+          only VPC endpoints that are registered in your Databricks account connect to your workspace. *
+          `ENDPOINT` level access allows only specified VPC endpoints connect to your workspace. For details,
+          see `allowed_vpc_endpoint_ids`.
+        :param public_access_enabled: bool (optional)
+          Determines if the workspace can be accessed over public internet. For fully private workspaces, you
+          can optionally specify `false`, but only if you implement both the front-end and the back-end
+          PrivateLink connections. Otherwise, specify `true`, which means that public access is enabled.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/account/provisioning/storage.rst b/docs/account/provisioning/storage.rst
index c538ca1d7..611a8cdc6 100644
--- a/docs/account/provisioning/storage.rst
+++ b/docs/account/provisioning/storage.rst
@@ -5,9 +5,9 @@
 .. py:class:: StorageAPI
 
     These APIs manage storage configurations for this workspace. A root storage S3 bucket in your account is
-required to store objects like cluster logs, notebook revisions, and job results. You can also use the
-root storage S3 bucket for storage of non-production DBFS data. A storage configuration encapsulates this
-bucket information, and its ID is used when creating a new workspace.
+    required to store objects like cluster logs, notebook revisions, and job results. You can also use the
+    root storage S3 bucket for storage of non-production DBFS data. A storage configuration encapsulates this
+    bucket information, and its ID is used when creating a new workspace.
 
     .. py:method:: create(storage_configuration_name: str, root_bucket_info: RootBucketInfo) -> StorageConfiguration
 
@@ -32,37 +32,37 @@ bucket information, and its ID is used when creating a new workspace.
             a.storage.delete(storage_configuration_id=storage.storage_configuration_id)
 
         Create new storage configuration.
-
-Creates new storage configuration for an account, specified by ID. Uploads a storage configuration
-object that represents the root AWS S3 bucket in your account. Databricks stores related workspace
-assets including DBFS, cluster logs, and job results. For the AWS S3 bucket, you need to configure the
-required bucket policy.
-
-For information about how to create a new workspace with this API, see [Create a new workspace using
-the Account API]
-
-[Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
-
-:param storage_configuration_name: str
-  The human-readable name of the storage configuration.
-:param root_bucket_info: :class:`RootBucketInfo`
-  Root S3 bucket information.
-
-:returns: :class:`StorageConfiguration`
-
+        
+        Creates new storage configuration for an account, specified by ID. Uploads a storage configuration
+        object that represents the root AWS S3 bucket in your account. Databricks stores related workspace
+        assets including DBFS, cluster logs, and job results. For the AWS S3 bucket, you need to configure the
+        required bucket policy.
+        
+        For information about how to create a new workspace with this API, see [Create a new workspace using
+        the Account API]
+        
+        [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
+        
+        :param storage_configuration_name: str
+          The human-readable name of the storage configuration.
+        :param root_bucket_info: :class:`RootBucketInfo`
+          Root S3 bucket information.
+        
+        :returns: :class:`StorageConfiguration`
+        
 
     .. py:method:: delete(storage_configuration_id: str)
 
         Delete storage configuration.
-
-Deletes a Databricks storage configuration. You cannot delete a storage configuration that is
-associated with any workspace.
-
-:param storage_configuration_id: str
-  Databricks Account API storage configuration ID.
-
-
-
+        
+        Deletes a Databricks storage configuration. You cannot delete a storage configuration that is
+        associated with any workspace.
+        
+        :param storage_configuration_id: str
+          Databricks Account API storage configuration ID.
+        
+        
+        
 
     .. py:method:: get(storage_configuration_id: str) -> StorageConfiguration
 
@@ -84,14 +84,14 @@ associated with any workspace.
             by_id = a.storage.get(storage_configuration_id=storage.storage_configuration_id)
 
         Get storage configuration.
-
-Gets a Databricks storage configuration for an account, both specified by ID.
-
-:param storage_configuration_id: str
-  Databricks Account API storage configuration ID.
-
-:returns: :class:`StorageConfiguration`
-
+        
+        Gets a Databricks storage configuration for an account, both specified by ID.
+        
+        :param storage_configuration_id: str
+          Databricks Account API storage configuration ID.
+        
+        :returns: :class:`StorageConfiguration`
+        
 
     .. py:method:: list() -> Iterator[StorageConfiguration]
 
@@ -107,7 +107,8 @@ Gets a Databricks storage configuration for an account, both specified by ID.
             configs = a.storage.list()
 
         Get all storage configurations.
-
-Gets a list of all Databricks storage configurations for your account, specified by ID.
-
-:returns: Iterator over :class:`StorageConfiguration`
+        
+        Gets a list of all Databricks storage configurations for your account, specified by ID.
+        
+        :returns: Iterator over :class:`StorageConfiguration`
+        
\ No newline at end of file
diff --git a/docs/account/provisioning/vpc_endpoints.rst b/docs/account/provisioning/vpc_endpoints.rst
index 2b9657b5e..d2622dc0f 100644
--- a/docs/account/provisioning/vpc_endpoints.rst
+++ b/docs/account/provisioning/vpc_endpoints.rst
@@ -28,50 +28,50 @@
             a.vpc_endpoints.delete(vpc_endpoint_id=created.vpc_endpoint_id)
 
         Create VPC endpoint configuration.
-
-Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to
-communicate privately with Databricks over [AWS PrivateLink].
-
-After you create the VPC endpoint configuration, the Databricks [endpoint service] automatically
-accepts the VPC endpoint.
-
-Before configuring PrivateLink, read the [Databricks article about PrivateLink].
-
-[AWS PrivateLink]: https://aws.amazon.com/privatelink
-[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-[VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/vpc-endpoints.html
-[endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/privatelink-share-your-services.html
-
-:param vpc_endpoint_name: str
-  The human-readable name of the storage configuration.
-:param aws_vpc_endpoint_id: str (optional)
-  The ID of the VPC endpoint object in AWS.
-:param gcp_vpc_endpoint_info: :class:`GcpVpcEndpointInfo` (optional)
-  The Google Cloud specific information for this Private Service Connect endpoint.
-:param region: str (optional)
-  The AWS region in which this VPC endpoint object exists.
-
-:returns: :class:`VpcEndpoint`
-
+        
+        Creates a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to
+        communicate privately with Databricks over [AWS PrivateLink].
+        
+        After you create the VPC endpoint configuration, the Databricks [endpoint service] automatically
+        accepts the VPC endpoint.
+        
+        Before configuring PrivateLink, read the [Databricks article about PrivateLink].
+        
+        [AWS PrivateLink]: https://aws.amazon.com/privatelink
+        [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
+        [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/vpc-endpoints.html
+        [endpoint service]: https://docs.aws.amazon.com/vpc/latest/privatelink/privatelink-share-your-services.html
+        
+        :param vpc_endpoint_name: str
+          The human-readable name of the storage configuration.
+        :param aws_vpc_endpoint_id: str (optional)
+          The ID of the VPC endpoint object in AWS.
+        :param gcp_vpc_endpoint_info: :class:`GcpVpcEndpointInfo` (optional)
+          The Google Cloud specific information for this Private Service Connect endpoint.
+        :param region: str (optional)
+          The AWS region in which this VPC endpoint object exists.
+        
+        :returns: :class:`VpcEndpoint`
+        
 
     .. py:method:: delete(vpc_endpoint_id: str)
 
         Delete VPC endpoint configuration.
-
-Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate
-privately with Databricks over [AWS PrivateLink].
-
-Before configuring PrivateLink, read the [Databricks article about PrivateLink].
-
-[AWS PrivateLink]: https://aws.amazon.com/privatelink
-[AWS VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html
-[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-
-:param vpc_endpoint_id: str
-  Databricks VPC endpoint ID.
-
-
-
+        
+        Deletes a VPC endpoint configuration, which represents an [AWS VPC endpoint] that can communicate
+        privately with Databricks over [AWS PrivateLink].
+        
+        Before configuring PrivateLink, read the [Databricks article about PrivateLink].
+        
+        [AWS PrivateLink]: https://aws.amazon.com/privatelink
+        [AWS VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html
+        [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
+        
+        :param vpc_endpoint_id: str
+          Databricks VPC endpoint ID.
+        
+        
+        
 
     .. py:method:: get(vpc_endpoint_id: str) -> VpcEndpoint
 
@@ -97,18 +97,18 @@ Before configuring PrivateLink, read the [Databricks article about PrivateLink].
             a.vpc_endpoints.delete(vpc_endpoint_id=created.vpc_endpoint_id)
 
         Get a VPC endpoint configuration.
-
-Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate
-privately with Databricks over [AWS PrivateLink].
-
-[AWS PrivateLink]: https://aws.amazon.com/privatelink
-[VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html
-
-:param vpc_endpoint_id: str
-  Databricks VPC endpoint ID.
-
-:returns: :class:`VpcEndpoint`
-
+        
+        Gets a VPC endpoint configuration, which represents a [VPC endpoint] object in AWS used to communicate
+        privately with Databricks over [AWS PrivateLink].
+        
+        [AWS PrivateLink]: https://aws.amazon.com/privatelink
+        [VPC endpoint]: https://docs.aws.amazon.com/vpc/latest/privatelink/concepts.html
+        
+        :param vpc_endpoint_id: str
+          Databricks VPC endpoint ID.
+        
+        :returns: :class:`VpcEndpoint`
+        
 
     .. py:method:: list() -> Iterator[VpcEndpoint]
 
@@ -124,11 +124,12 @@ privately with Databricks over [AWS PrivateLink].
             all = a.vpc_endpoints.list()
 
         Get all VPC endpoint configurations.
-
-Gets a list of all VPC endpoints for an account, specified by ID.
-
-Before configuring PrivateLink, read the [Databricks article about PrivateLink].
-
-[Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-
-:returns: Iterator over :class:`VpcEndpoint`
+        
+        Gets a list of all VPC endpoints for an account, specified by ID.
+        
+        Before configuring PrivateLink, read the [Databricks article about PrivateLink].
+        
+        [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
+        
+        :returns: Iterator over :class:`VpcEndpoint`
+        
\ No newline at end of file
diff --git a/docs/account/provisioning/workspaces.rst b/docs/account/provisioning/workspaces.rst
index 3e312984a..ad8a75942 100644
--- a/docs/account/provisioning/workspaces.rst
+++ b/docs/account/provisioning/workspaces.rst
@@ -5,11 +5,11 @@
 .. py:class:: WorkspacesAPI
 
     These APIs manage workspaces for this account. A Databricks workspace is an environment for accessing all
-of your Databricks assets. The workspace organizes objects (notebooks, libraries, and experiments) into
-folders, and provides access to data and computational resources such as clusters and jobs.
-
-These endpoints are available if your account is on the E2 version of the platform or on a select custom
-plan that allows multiple workspaces per account.
+    of your Databricks assets. The workspace organizes objects (notebooks, libraries, and experiments) into
+    folders, and provides access to data and computational resources such as clusters and jobs.
+    
+    These endpoints are available if your account is on the E2 version of the platform or on a select custom
+    plan that allows multiple workspaces per account.
 
     .. py:method:: create(workspace_name: str [, aws_region: Optional[str], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], is_no_public_ip_enabled: Optional[bool], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str]]) -> Wait[Workspace]
 
@@ -46,109 +46,109 @@ plan that allows multiple workspaces per account.
             a.workspaces.delete(workspace_id=waiter.workspace_id)
 
         Create a new workspace.
-
-Creates a new workspace.
-
-**Important**: This operation is asynchronous. A response with HTTP status code 200 means the request
-has been accepted and is in progress, but does not mean that the workspace deployed successfully and
-is running. The initial workspace status is typically `PROVISIONING`. Use the workspace ID
-(`workspace_id`) field in the response to identify the new workspace and make repeated `GET` requests
-with the workspace ID and check its status. The workspace becomes available when the status changes to
-`RUNNING`.
-
-:param workspace_name: str
-  The workspace's human-readable name.
-:param aws_region: str (optional)
-  The AWS region of the workspace's data plane.
-:param cloud: str (optional)
-  The cloud provider which the workspace uses. For Google Cloud workspaces, always set this field to
-  `gcp`.
-:param cloud_resource_container: :class:`CloudResourceContainer` (optional)
-  The general workspace configurations that are specific to cloud providers.
-:param credentials_id: str (optional)
-  ID of the workspace's credential configuration object.
-:param custom_tags: Dict[str,str] (optional)
-  The custom tags key-value pairing that is attached to this workspace. The key-value pair is a string
-  of utf-8 characters. The value can be an empty string, with maximum length of 255 characters. The
-  key can be of maximum length of 127 characters, and cannot be empty.
-:param deployment_name: str (optional)
-  The deployment name defines part of the subdomain for the workspace. The workspace URL for the web
-  application and REST APIs is `.cloud.databricks.com`. For example, if the
-  deployment name is `abcsales`, your workspace URL will be `https://abcsales.cloud.databricks.com`.
-  Hyphens are allowed. This property supports only the set of characters that are allowed in a
-  subdomain.
-  
-  To set this value, you must have a deployment name prefix. Contact your Databricks account team to
-  add an account deployment name prefix to your account.
-  
-  Workspace deployment names follow the account prefix and a hyphen. For example, if your account's
-  deployment prefix is `acme` and the workspace deployment name is `workspace-1`, the JSON response
-  for the `deployment_name` field becomes `acme-workspace-1`. The workspace URL would be
-  `acme-workspace-1.cloud.databricks.com`.
-  
-  You can also set the `deployment_name` to the reserved keyword `EMPTY` if you want the deployment
-  name to only include the deployment prefix. For example, if your account's deployment prefix is
-  `acme` and the workspace deployment name is `EMPTY`, the `deployment_name` becomes `acme` only and
-  the workspace URL is `acme.cloud.databricks.com`.
-  
-  This value must be unique across all non-deleted deployments across all AWS regions.
-  
-  If a new workspace omits this property, the server generates a unique deployment name for you with
-  the pattern `dbc-xxxxxxxx-xxxx`.
-:param gcp_managed_network_config: :class:`GcpManagedNetworkConfig` (optional)
-  The network settings for the workspace. The configurations are only for Databricks-managed VPCs. It
-  is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP range
-  configurations must be mutually exclusive. An attempt to create a workspace fails if Databricks
-  detects an IP range overlap.
-  
-  Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and all IP
-  addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`,
-  `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`.
-  
-  The sizes of these IP ranges affect the maximum number of nodes for the workspace.
-  
-  **Important**: Confirm the IP ranges used by your Databricks workspace before creating the
-  workspace. You cannot change them after your workspace is deployed. If the IP address ranges for
-  your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To
-  determine the address range sizes that you need, Databricks provides a calculator as a Microsoft
-  Excel spreadsheet. See [calculate subnet sizes for a new workspace].
-  
-  [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html
-:param gke_config: :class:`GkeConfig` (optional)
-  The configurations for the GKE cluster of a Databricks workspace.
-:param is_no_public_ip_enabled: bool (optional)
-  Whether no public IP is enabled for the workspace.
-:param location: str (optional)
-  The Google Cloud region of the workspace data plane in your Google account. For example, `us-east4`.
-:param managed_services_customer_managed_key_id: str (optional)
-  The ID of the workspace's managed services encryption key configuration object. This is used to help
-  protect and control access to the workspace's notebooks, secrets, Databricks SQL queries, and query
-  history. The provided key configuration object property `use_cases` must contain `MANAGED_SERVICES`.
-:param network_id: str (optional)
-:param pricing_tier: :class:`PricingTier` (optional)
-  The pricing tier of the workspace. For pricing tier information, see [AWS Pricing].
-  
-  [AWS Pricing]: https://databricks.com/product/aws-pricing
-:param private_access_settings_id: str (optional)
-  ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be
-  specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace connection),
-  back-end (data plane to control plane connection), or both connection types.
-  
-  Before configuring PrivateLink, read the [Databricks article about PrivateLink].",
-  
-  [AWS PrivateLink]: https://aws.amazon.com/privatelink/
-  [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
-:param storage_configuration_id: str (optional)
-  The ID of the workspace's storage configuration object.
-:param storage_customer_managed_key_id: str (optional)
-  The ID of the workspace's storage encryption key configuration object. This is used to encrypt the
-  workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS volumes. The
-  provided key configuration object property `use_cases` must contain `STORAGE`.
-
-:returns:
-  Long-running operation waiter for :class:`Workspace`.
-  See :method:wait_get_workspace_running for more details.
-
+        
+        Creates a new workspace.
+        
+        **Important**: This operation is asynchronous. A response with HTTP status code 200 means the request
+        has been accepted and is in progress, but does not mean that the workspace deployed successfully and
+        is running. The initial workspace status is typically `PROVISIONING`. Use the workspace ID
+        (`workspace_id`) field in the response to identify the new workspace and make repeated `GET` requests
+        with the workspace ID and check its status. The workspace becomes available when the status changes to
+        `RUNNING`.
+        
+        :param workspace_name: str
+          The workspace's human-readable name.
+        :param aws_region: str (optional)
+          The AWS region of the workspace's data plane.
+        :param cloud: str (optional)
+          The cloud provider which the workspace uses. For Google Cloud workspaces, always set this field to
+          `gcp`.
+        :param cloud_resource_container: :class:`CloudResourceContainer` (optional)
+          The general workspace configurations that are specific to cloud providers.
+        :param credentials_id: str (optional)
+          ID of the workspace's credential configuration object.
+        :param custom_tags: Dict[str,str] (optional)
+          The custom tags key-value pairing that is attached to this workspace. The key-value pair is a string
+          of utf-8 characters. The value can be an empty string, with maximum length of 255 characters. The
+          key can be of maximum length of 127 characters, and cannot be empty.
+        :param deployment_name: str (optional)
+          The deployment name defines part of the subdomain for the workspace. The workspace URL for the web
+          application and REST APIs is `.cloud.databricks.com`. For example, if the
+          deployment name is `abcsales`, your workspace URL will be `https://abcsales.cloud.databricks.com`.
+          Hyphens are allowed. This property supports only the set of characters that are allowed in a
+          subdomain.
+          
+          To set this value, you must have a deployment name prefix. Contact your Databricks account team to
+          add an account deployment name prefix to your account.
+          
+          Workspace deployment names follow the account prefix and a hyphen. For example, if your account's
+          deployment prefix is `acme` and the workspace deployment name is `workspace-1`, the JSON response
+          for the `deployment_name` field becomes `acme-workspace-1`. The workspace URL would be
+          `acme-workspace-1.cloud.databricks.com`.
+          
+          You can also set the `deployment_name` to the reserved keyword `EMPTY` if you want the deployment
+          name to only include the deployment prefix. For example, if your account's deployment prefix is
+          `acme` and the workspace deployment name is `EMPTY`, the `deployment_name` becomes `acme` only and
+          the workspace URL is `acme.cloud.databricks.com`.
+          
+          This value must be unique across all non-deleted deployments across all AWS regions.
+          
+          If a new workspace omits this property, the server generates a unique deployment name for you with
+          the pattern `dbc-xxxxxxxx-xxxx`.
+        :param gcp_managed_network_config: :class:`GcpManagedNetworkConfig` (optional)
+          The network settings for the workspace. The configurations are only for Databricks-managed VPCs. It
+          is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP range
+          configurations must be mutually exclusive. An attempt to create a workspace fails if Databricks
+          detects an IP range overlap.
+          
+          Specify custom IP ranges in CIDR format. The IP ranges for these fields must not overlap, and all IP
+          addresses must be entirely within the following ranges: `10.0.0.0/8`, `100.64.0.0/10`,
+          `172.16.0.0/12`, `192.168.0.0/16`, and `240.0.0.0/4`.
+          
+          The sizes of these IP ranges affect the maximum number of nodes for the workspace.
+          
+          **Important**: Confirm the IP ranges used by your Databricks workspace before creating the
+          workspace. You cannot change them after your workspace is deployed. If the IP address ranges for
+          your Databricks are too small, IP exhaustion can occur, causing your Databricks jobs to fail. To
+          determine the address range sizes that you need, Databricks provides a calculator as a Microsoft
+          Excel spreadsheet. See [calculate subnet sizes for a new workspace].
+          
+          [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html
+        :param gke_config: :class:`GkeConfig` (optional)
+          The configurations for the GKE cluster of a Databricks workspace.
+        :param is_no_public_ip_enabled: bool (optional)
+          Whether no public IP is enabled for the workspace.
+        :param location: str (optional)
+          The Google Cloud region of the workspace data plane in your Google account. For example, `us-east4`.
+        :param managed_services_customer_managed_key_id: str (optional)
+          The ID of the workspace's managed services encryption key configuration object. This is used to help
+          protect and control access to the workspace's notebooks, secrets, Databricks SQL queries, and query
+          history. The provided key configuration object property `use_cases` must contain `MANAGED_SERVICES`.
+        :param network_id: str (optional)
+        :param pricing_tier: :class:`PricingTier` (optional)
+          The pricing tier of the workspace. For pricing tier information, see [AWS Pricing].
+          
+          [AWS Pricing]: https://databricks.com/product/aws-pricing
+        :param private_access_settings_id: str (optional)
+          ID of the workspace's private access settings object. Only used for PrivateLink. This ID must be
+          specified for customers using [AWS PrivateLink] for either front-end (user-to-workspace connection),
+          back-end (data plane to control plane connection), or both connection types.
+          
+          Before configuring PrivateLink, read the [Databricks article about PrivateLink].",
+          
+          [AWS PrivateLink]: https://aws.amazon.com/privatelink/
+          [Databricks article about PrivateLink]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/privatelink.html
+        :param storage_configuration_id: str (optional)
+          The ID of the workspace's storage configuration object.
+        :param storage_customer_managed_key_id: str (optional)
+          The ID of the workspace's storage encryption key configuration object. This is used to encrypt the
+          workspace's root S3 bucket (root DBFS and system data) and, optionally, cluster EBS volumes. The
+          provided key configuration object property `use_cases` must contain `STORAGE`.
+        
+        :returns:
+          Long-running operation waiter for :class:`Workspace`.
+          See :method:wait_get_workspace_running for more details.
+        
 
     .. py:method:: create_and_wait(workspace_name: str [, aws_region: Optional[str], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], is_no_public_ip_enabled: Optional[bool], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace
 
@@ -156,19 +156,19 @@ with the workspace ID and check its status. The workspace becomes available when
     .. py:method:: delete(workspace_id: int)
 
         Delete a workspace.
-
-Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate.
-However, it might take a few minutes for all workspaces resources to be deleted, depending on the size
-and number of workspace resources.
-
-This operation is available only if your account is on the E2 version of the platform or on a select
-custom plan that allows multiple workspaces per account.
-
-:param workspace_id: int
-  Workspace ID.
-
-
-
+        
+        Terminates and deletes a Databricks workspace. From an API perspective, deletion is immediate.
+        However, it might take a few minutes for all workspaces resources to be deleted, depending on the size
+        and number of workspace resources.
+        
+        This operation is available only if your account is on the E2 version of the platform or on a select
+        custom plan that allows multiple workspaces per account.
+        
+        :param workspace_id: int
+          Workspace ID.
+        
+        
+        
 
     .. py:method:: get(workspace_id: int) -> Workspace
 
@@ -186,25 +186,25 @@ custom plan that allows multiple workspaces per account.
             by_id = a.workspaces.get(workspace_id=created.workspace_id)
 
         Get a workspace.
-
-Gets information including status for a Databricks workspace, specified by ID. In the response, the
-`workspace_status` field indicates the current status. After initial workspace creation (which is
-asynchronous), make repeated `GET` requests with the workspace ID and check its status. The workspace
-becomes available when the status changes to `RUNNING`.
-
-For information about how to create a new workspace with this API **including error handling**, see
-[Create a new workspace using the Account API].
-
-This operation is available only if your account is on the E2 version of the platform or on a select
-custom plan that allows multiple workspaces per account.
-
-[Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
-
-:param workspace_id: int
-  Workspace ID.
-
-:returns: :class:`Workspace`
-
+        
+        Gets information including status for a Databricks workspace, specified by ID. In the response, the
+        `workspace_status` field indicates the current status. After initial workspace creation (which is
+        asynchronous), make repeated `GET` requests with the workspace ID and check its status. The workspace
+        becomes available when the status changes to `RUNNING`.
+        
+        For information about how to create a new workspace with this API **including error handling**, see
+        [Create a new workspace using the Account API].
+        
+        This operation is available only if your account is on the E2 version of the platform or on a select
+        custom plan that allows multiple workspaces per account.
+        
+        [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
+        
+        :param workspace_id: int
+          Workspace ID.
+        
+        :returns: :class:`Workspace`
+        
 
     .. py:method:: list() -> Iterator[Workspace]
 
@@ -220,14 +220,14 @@ custom plan that allows multiple workspaces per account.
             all = a.workspaces.list()
 
         Get all workspaces.
-
-Gets a list of all workspaces associated with an account, specified by ID.
-
-This operation is available only if your account is on the E2 version of the platform or on a select
-custom plan that allows multiple workspaces per account.
-
-:returns: Iterator over :class:`Workspace`
-
+        
+        Gets a list of all workspaces associated with an account, specified by ID.
+        
+        This operation is available only if your account is on the E2 version of the platform or on a select
+        custom plan that allows multiple workspaces per account.
+        
+        :returns: Iterator over :class:`Workspace`
+        
 
     .. py:method:: update(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str]]) -> Wait[Workspace]
 
@@ -257,135 +257,135 @@ custom plan that allows multiple workspaces per account.
             a.credentials.delete(credentials_id=update_role.credentials_id)
 
         Update workspace configuration.
-
-Updates a workspace configuration for either a running workspace or a failed workspace. The elements
-that can be updated varies between these two use cases.
-
-### Update a failed workspace You can update a Databricks workspace configuration for failed workspace
-deployment for some fields, but not all fields. For a failed workspace, this request supports updates
-to the following fields only: - Credential configuration ID - Storage configuration ID - Network
-configuration ID. Used only to add or change a network configuration for a customer-managed VPC. For a
-failed workspace only, you can convert a workspace with Databricks-managed VPC to use a
-customer-managed VPC by adding this ID. You cannot downgrade a workspace with a customer-managed VPC
-to be a Databricks-managed VPC. You can update the network configuration for a failed or running
-workspace to add PrivateLink support, though you must also add a private access settings object. - Key
-configuration ID for managed services (control plane storage, such as notebook source and Databricks
-SQL queries). Used only if you use customer-managed keys for managed services. - Key configuration ID
-for workspace storage (root S3 bucket and, optionally, EBS volumes). Used only if you use
-customer-managed keys for workspace storage. **Important**: If the workspace was ever in the running
-state, even if briefly before becoming a failed workspace, you cannot add a new key configuration ID
-for workspace storage. - Private access settings ID to add PrivateLink support. You can add or update
-the private access settings ID to upgrade a workspace to add support for front-end, back-end, or both
-types of connectivity. You cannot remove (downgrade) any existing front-end or back-end PrivateLink
-support on a workspace. - Custom tags. Given you provide an empty custom tags, the update would not be
-applied. - Network connectivity configuration ID to add serverless stable IP support. You can add or
-update the network connectivity configuration ID to ensure the workspace uses the same set of stable
-IP CIDR blocks to access your resources. You cannot remove a network connectivity configuration from
-the workspace once attached, you can only switch to another one.
-
-After calling the `PATCH` operation to update the workspace configuration, make repeated `GET`
-requests with the workspace ID and check the workspace status. The workspace is successful if the
-status changes to `RUNNING`.
-
-For information about how to create a new workspace with this API **including error handling**, see
-[Create a new workspace using the Account API].
-
-### Update a running workspace You can update a Databricks workspace configuration for running
-workspaces for some fields, but not all fields. For a running workspace, this request supports
-updating the following fields only: - Credential configuration ID - Network configuration ID. Used
-only if you already use a customer-managed VPC. You cannot convert a running workspace from a
-Databricks-managed VPC to a customer-managed VPC. You can use a network configuration update in this
-API for a failed or running workspace to add support for PrivateLink, although you also need to add a
-private access settings object. - Key configuration ID for managed services (control plane storage,
-such as notebook source and Databricks SQL queries). Databricks does not directly encrypt the data
-with the customer-managed key (CMK). Databricks uses both the CMK and the Databricks managed key (DMK)
-that is unique to your workspace to encrypt the Data Encryption Key (DEK). Databricks uses the DEK to
-encrypt your workspace's managed services persisted data. If the workspace does not already have a CMK
-for managed services, adding this ID enables managed services encryption for new or updated data.
-Existing managed services data that existed before adding the key remains not encrypted with the DEK
-until it is modified. If the workspace already has customer-managed keys for managed services, this
-request rotates (changes) the CMK keys and the DEK is re-encrypted with the DMK and the new CMK. - Key
-configuration ID for workspace storage (root S3 bucket and, optionally, EBS volumes). You can set this
-only if the workspace does not already have a customer-managed key configuration for workspace
-storage. - Private access settings ID to add PrivateLink support. You can add or update the private
-access settings ID to upgrade a workspace to add support for front-end, back-end, or both types of
-connectivity. You cannot remove (downgrade) any existing front-end or back-end PrivateLink support on
-a workspace. - Custom tags. Given you provide an empty custom tags, the update would not be applied. -
-Network connectivity configuration ID to add serverless stable IP support. You can add or update the
-network connectivity configuration ID to ensure the workspace uses the same set of stable IP CIDR
-blocks to access your resources. You cannot remove a network connectivity configuration from the
-workspace once attached, you can only switch to another one.
-
-**Important**: To update a running workspace, your workspace must have no running compute resources
-that run in your workspace's VPC in the Classic data plane. For example, stop all all-purpose
-clusters, job clusters, pools with running clusters, and Classic SQL warehouses. If you do not
-terminate all cluster instances in the workspace before calling this API, the request will fail.
-
-### Wait until changes take effect. After calling the `PATCH` operation to update the workspace
-configuration, make repeated `GET` requests with the workspace ID and check the workspace status and
-the status of the fields. * For workspaces with a Databricks-managed VPC, the workspace status becomes
-`PROVISIONING` temporarily (typically under 20 minutes). If the workspace update is successful, the
-workspace status changes to `RUNNING`. Note that you can also check the workspace status in the
-[Account Console]. However, you cannot use or create clusters for another 20 minutes after that status
-change. This results in a total of up to 40 minutes in which you cannot create clusters. If you create
-or use clusters before this time interval elapses, clusters do not launch successfully, fail, or could
-cause other unexpected behavior. * For workspaces with a customer-managed VPC, the workspace status
-stays at status `RUNNING` and the VPC change happens immediately. A change to the storage
-customer-managed key configuration ID might take a few minutes to update, so continue to check the
-workspace until you observe that it has been updated. If the update fails, the workspace might revert
-silently to its original configuration. After the workspace has been updated, you cannot use or create
-clusters for another 20 minutes. If you create or use clusters before this time interval elapses,
-clusters do not launch successfully, fail, or could cause other unexpected behavior.
-
-If you update the _storage_ customer-managed key configurations, it takes 20 minutes for the changes
-to fully take effect. During the 20 minute wait, it is important that you stop all REST API calls to
-the DBFS API. If you are modifying _only the managed services key configuration_, you can omit the 20
-minute wait.
-
-**Important**: Customer-managed keys and customer-managed VPCs are supported by only some deployment
-types and subscription types. If you have questions about availability, contact your Databricks
-representative.
-
-This operation is available only if your account is on the E2 version of the platform or on a select
-custom plan that allows multiple workspaces per account.
-
-[Account Console]: https://docs.databricks.com/administration-guide/account-settings-e2/account-console-e2.html
-[Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
-
-:param workspace_id: int
-  Workspace ID.
-:param aws_region: str (optional)
-  The AWS region of the workspace's data plane (for example, `us-west-2`). This parameter is available
-  only for updating failed workspaces.
-:param credentials_id: str (optional)
-  ID of the workspace's credential configuration object. This parameter is available for updating both
-  failed and running workspaces.
-:param custom_tags: Dict[str,str] (optional)
-  The custom tags key-value pairing that is attached to this workspace. The key-value pair is a string
-  of utf-8 characters. The value can be an empty string, with maximum length of 255 characters. The
-  key can be of maximum length of 127 characters, and cannot be empty.
-:param managed_services_customer_managed_key_id: str (optional)
-  The ID of the workspace's managed services encryption key configuration object. This parameter is
-  available only for updating failed workspaces.
-:param network_connectivity_config_id: str (optional)
-:param network_id: str (optional)
-  The ID of the workspace's network configuration object. Used only if you already use a
-  customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a
-  customer-managed VPC by updating the workspace to add a network configuration ID.
-:param private_access_settings_id: str (optional)
-  The ID of the workspace's private access settings configuration object. This parameter is available
-  only for updating failed workspaces.
-:param storage_configuration_id: str (optional)
-  The ID of the workspace's storage configuration object. This parameter is available only for
-  updating failed workspaces.
-:param storage_customer_managed_key_id: str (optional)
-  The ID of the key configuration object for workspace storage. This parameter is available for
-  updating both failed and running workspaces.
-
-:returns:
-  Long-running operation waiter for :class:`Workspace`.
-  See :method:wait_get_workspace_running for more details.
-
+        
+        Updates a workspace configuration for either a running workspace or a failed workspace. The elements
+        that can be updated varies between these two use cases.
+        
+        ### Update a failed workspace You can update a Databricks workspace configuration for failed workspace
+        deployment for some fields, but not all fields. For a failed workspace, this request supports updates
+        to the following fields only: - Credential configuration ID - Storage configuration ID - Network
+        configuration ID. Used only to add or change a network configuration for a customer-managed VPC. For a
+        failed workspace only, you can convert a workspace with Databricks-managed VPC to use a
+        customer-managed VPC by adding this ID. You cannot downgrade a workspace with a customer-managed VPC
+        to be a Databricks-managed VPC. You can update the network configuration for a failed or running
+        workspace to add PrivateLink support, though you must also add a private access settings object. - Key
+        configuration ID for managed services (control plane storage, such as notebook source and Databricks
+        SQL queries). Used only if you use customer-managed keys for managed services. - Key configuration ID
+        for workspace storage (root S3 bucket and, optionally, EBS volumes). Used only if you use
+        customer-managed keys for workspace storage. **Important**: If the workspace was ever in the running
+        state, even if briefly before becoming a failed workspace, you cannot add a new key configuration ID
+        for workspace storage. - Private access settings ID to add PrivateLink support. You can add or update
+        the private access settings ID to upgrade a workspace to add support for front-end, back-end, or both
+        types of connectivity. You cannot remove (downgrade) any existing front-end or back-end PrivateLink
+        support on a workspace. - Custom tags. Given you provide an empty custom tags, the update would not be
+        applied. - Network connectivity configuration ID to add serverless stable IP support. You can add or
+        update the network connectivity configuration ID to ensure the workspace uses the same set of stable
+        IP CIDR blocks to access your resources. You cannot remove a network connectivity configuration from
+        the workspace once attached, you can only switch to another one.
+        
+        After calling the `PATCH` operation to update the workspace configuration, make repeated `GET`
+        requests with the workspace ID and check the workspace status. The workspace is successful if the
+        status changes to `RUNNING`.
+        
+        For information about how to create a new workspace with this API **including error handling**, see
+        [Create a new workspace using the Account API].
+        
+        ### Update a running workspace You can update a Databricks workspace configuration for running
+        workspaces for some fields, but not all fields. For a running workspace, this request supports
+        updating the following fields only: - Credential configuration ID - Network configuration ID. Used
+        only if you already use a customer-managed VPC. You cannot convert a running workspace from a
+        Databricks-managed VPC to a customer-managed VPC. You can use a network configuration update in this
+        API for a failed or running workspace to add support for PrivateLink, although you also need to add a
+        private access settings object. - Key configuration ID for managed services (control plane storage,
+        such as notebook source and Databricks SQL queries). Databricks does not directly encrypt the data
+        with the customer-managed key (CMK). Databricks uses both the CMK and the Databricks managed key (DMK)
+        that is unique to your workspace to encrypt the Data Encryption Key (DEK). Databricks uses the DEK to
+        encrypt your workspace's managed services persisted data. If the workspace does not already have a CMK
+        for managed services, adding this ID enables managed services encryption for new or updated data.
+        Existing managed services data that existed before adding the key remains not encrypted with the DEK
+        until it is modified. If the workspace already has customer-managed keys for managed services, this
+        request rotates (changes) the CMK keys and the DEK is re-encrypted with the DMK and the new CMK. - Key
+        configuration ID for workspace storage (root S3 bucket and, optionally, EBS volumes). You can set this
+        only if the workspace does not already have a customer-managed key configuration for workspace
+        storage. - Private access settings ID to add PrivateLink support. You can add or update the private
+        access settings ID to upgrade a workspace to add support for front-end, back-end, or both types of
+        connectivity. You cannot remove (downgrade) any existing front-end or back-end PrivateLink support on
+        a workspace. - Custom tags. Given you provide an empty custom tags, the update would not be applied. -
+        Network connectivity configuration ID to add serverless stable IP support. You can add or update the
+        network connectivity configuration ID to ensure the workspace uses the same set of stable IP CIDR
+        blocks to access your resources. You cannot remove a network connectivity configuration from the
+        workspace once attached, you can only switch to another one.
+        
+        **Important**: To update a running workspace, your workspace must have no running compute resources
+        that run in your workspace's VPC in the Classic data plane. For example, stop all all-purpose
+        clusters, job clusters, pools with running clusters, and Classic SQL warehouses. If you do not
+        terminate all cluster instances in the workspace before calling this API, the request will fail.
+        
+        ### Wait until changes take effect. After calling the `PATCH` operation to update the workspace
+        configuration, make repeated `GET` requests with the workspace ID and check the workspace status and
+        the status of the fields. * For workspaces with a Databricks-managed VPC, the workspace status becomes
+        `PROVISIONING` temporarily (typically under 20 minutes). If the workspace update is successful, the
+        workspace status changes to `RUNNING`. Note that you can also check the workspace status in the
+        [Account Console]. However, you cannot use or create clusters for another 20 minutes after that status
+        change. This results in a total of up to 40 minutes in which you cannot create clusters. If you create
+        or use clusters before this time interval elapses, clusters do not launch successfully, fail, or could
+        cause other unexpected behavior. * For workspaces with a customer-managed VPC, the workspace status
+        stays at status `RUNNING` and the VPC change happens immediately. A change to the storage
+        customer-managed key configuration ID might take a few minutes to update, so continue to check the
+        workspace until you observe that it has been updated. If the update fails, the workspace might revert
+        silently to its original configuration. After the workspace has been updated, you cannot use or create
+        clusters for another 20 minutes. If you create or use clusters before this time interval elapses,
+        clusters do not launch successfully, fail, or could cause other unexpected behavior.
+        
+        If you update the _storage_ customer-managed key configurations, it takes 20 minutes for the changes
+        to fully take effect. During the 20 minute wait, it is important that you stop all REST API calls to
+        the DBFS API. If you are modifying _only the managed services key configuration_, you can omit the 20
+        minute wait.
+        
+        **Important**: Customer-managed keys and customer-managed VPCs are supported by only some deployment
+        types and subscription types. If you have questions about availability, contact your Databricks
+        representative.
+        
+        This operation is available only if your account is on the E2 version of the platform or on a select
+        custom plan that allows multiple workspaces per account.
+        
+        [Account Console]: https://docs.databricks.com/administration-guide/account-settings-e2/account-console-e2.html
+        [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
+        
+        :param workspace_id: int
+          Workspace ID.
+        :param aws_region: str (optional)
+          The AWS region of the workspace's data plane (for example, `us-west-2`). This parameter is available
+          only for updating failed workspaces.
+        :param credentials_id: str (optional)
+          ID of the workspace's credential configuration object. This parameter is available for updating both
+          failed and running workspaces.
+        :param custom_tags: Dict[str,str] (optional)
+          The custom tags key-value pairing that is attached to this workspace. The key-value pair is a string
+          of utf-8 characters. The value can be an empty string, with maximum length of 255 characters. The
+          key can be of maximum length of 127 characters, and cannot be empty.
+        :param managed_services_customer_managed_key_id: str (optional)
+          The ID of the workspace's managed services encryption key configuration object. This parameter is
+          available only for updating failed workspaces.
+        :param network_connectivity_config_id: str (optional)
+        :param network_id: str (optional)
+          The ID of the workspace's network configuration object. Used only if you already use a
+          customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a
+          customer-managed VPC by updating the workspace to add a network configuration ID.
+        :param private_access_settings_id: str (optional)
+          The ID of the workspace's private access settings configuration object. This parameter is available
+          only for updating failed workspaces.
+        :param storage_configuration_id: str (optional)
+          The ID of the workspace's storage configuration object. This parameter is available only for
+          updating failed workspaces.
+        :param storage_customer_managed_key_id: str (optional)
+          The ID of the key configuration object for workspace storage. This parameter is available for
+          updating both failed and running workspaces.
+        
+        :returns:
+          Long-running operation waiter for :class:`Workspace`.
+          See :method:wait_get_workspace_running for more details.
+        
 
     .. py:method:: update_and_wait(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace
 
diff --git a/docs/account/settings/csp_enablement_account.rst b/docs/account/settings/csp_enablement_account.rst
index 5c8b0bc5b..885aae89f 100644
--- a/docs/account/settings/csp_enablement_account.rst
+++ b/docs/account/settings/csp_enablement_account.rst
@@ -5,46 +5,47 @@
 .. py:class:: CspEnablementAccountAPI
 
     The compliance security profile settings at the account level control whether to enable it for new
-workspaces. By default, this account-level setting is disabled for new workspaces. After workspace
-creation, account admins can enable the compliance security profile individually for each workspace.
-
-This settings can be disabled so that new workspaces do not have compliance security profile enabled by
-default.
+    workspaces. By default, this account-level setting is disabled for new workspaces. After workspace
+    creation, account admins can enable the compliance security profile individually for each workspace.
+    
+    This settings can be disabled so that new workspaces do not have compliance security profile enabled by
+    default.
 
     .. py:method:: get( [, etag: Optional[str]]) -> CspEnablementAccountSetting
 
         Get the compliance security profile setting for new workspaces.
-
-Gets the compliance security profile setting for new workspaces.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`CspEnablementAccountSetting`
-
+        
+        Gets the compliance security profile setting for new workspaces.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`CspEnablementAccountSetting`
+        
 
     .. py:method:: update(allow_missing: bool, setting: CspEnablementAccountSetting, field_mask: str) -> CspEnablementAccountSetting
 
         Update the compliance security profile setting for new workspaces.
-
-Updates the value of the compliance security profile setting for new workspaces.
-
-:param allow_missing: bool
-  This should always be set to true for Settings API. Added for AIP compliance.
-:param setting: :class:`CspEnablementAccountSetting`
-:param field_mask: str
-  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-  the entire collection field can be specified. Field names must exactly match the resource field
-  names.
-  
-  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-  changes in the future.
-
-:returns: :class:`CspEnablementAccountSetting`
+        
+        Updates the value of the compliance security profile setting for new workspaces.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`CspEnablementAccountSetting`
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`CspEnablementAccountSetting`
+        
\ No newline at end of file
diff --git a/docs/account/settings/disable_legacy_features.rst b/docs/account/settings/disable_legacy_features.rst
index 5d6590a0f..b10d7e2dc 100644
--- a/docs/account/settings/disable_legacy_features.rst
+++ b/docs/account/settings/disable_legacy_features.rst
@@ -5,61 +5,62 @@
 .. py:class:: DisableLegacyFeaturesAPI
 
     Disable legacy features for new Databricks workspaces.
-
-For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be
-provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions
-prior to 13.3LTS.
+    
+    For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be
+    provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions
+    prior to 13.3LTS.
 
     .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyFeaturesResponse
 
         Delete the disable legacy features setting.
-
-Deletes the disable legacy features setting.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`DeleteDisableLegacyFeaturesResponse`
-
+        
+        Deletes the disable legacy features setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteDisableLegacyFeaturesResponse`
+        
 
     .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyFeatures
 
         Get the disable legacy features setting.
-
-Gets the value of the disable legacy features setting.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`DisableLegacyFeatures`
-
+        
+        Gets the value of the disable legacy features setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DisableLegacyFeatures`
+        
 
     .. py:method:: update(allow_missing: bool, setting: DisableLegacyFeatures, field_mask: str) -> DisableLegacyFeatures
 
         Update the disable legacy features setting.
-
-Updates the value of the disable legacy features setting.
-
-:param allow_missing: bool
-  This should always be set to true for Settings API. Added for AIP compliance.
-:param setting: :class:`DisableLegacyFeatures`
-:param field_mask: str
-  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-  the entire collection field can be specified. Field names must exactly match the resource field
-  names.
-  
-  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-  changes in the future.
-
-:returns: :class:`DisableLegacyFeatures`
+        
+        Updates the value of the disable legacy features setting.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`DisableLegacyFeatures`
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`DisableLegacyFeatures`
+        
\ No newline at end of file
diff --git a/docs/account/settings/enable_ip_access_lists.rst b/docs/account/settings/enable_ip_access_lists.rst
index 30d066165..9485b7332 100644
--- a/docs/account/settings/enable_ip_access_lists.rst
+++ b/docs/account/settings/enable_ip_access_lists.rst
@@ -5,58 +5,59 @@
 .. py:class:: EnableIpAccessListsAPI
 
     Controls the enforcement of IP access lists for accessing the account console. Allowing you to enable or
-disable restricted access based on IP addresses.
+    disable restricted access based on IP addresses.
 
     .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAccountIpAccessEnableResponse
 
         Delete the account IP access toggle setting.
-
-Reverts the value of the account IP access toggle setting to default (ON)
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`DeleteAccountIpAccessEnableResponse`
-
+        
+        Reverts the value of the account IP access toggle setting to default (ON)
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteAccountIpAccessEnableResponse`
+        
 
     .. py:method:: get( [, etag: Optional[str]]) -> AccountIpAccessEnable
 
         Get the account IP access toggle setting.
-
-Gets the value of the account IP access toggle setting.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`AccountIpAccessEnable`
-
+        
+        Gets the value of the account IP access toggle setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`AccountIpAccessEnable`
+        
 
     .. py:method:: update(allow_missing: bool, setting: AccountIpAccessEnable, field_mask: str) -> AccountIpAccessEnable
 
         Update the account IP access toggle setting.
-
-Updates the value of the account IP access toggle setting.
-
-:param allow_missing: bool
-  This should always be set to true for Settings API. Added for AIP compliance.
-:param setting: :class:`AccountIpAccessEnable`
-:param field_mask: str
-  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-  the entire collection field can be specified. Field names must exactly match the resource field
-  names.
-  
-  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-  changes in the future.
-
-:returns: :class:`AccountIpAccessEnable`
+        
+        Updates the value of the account IP access toggle setting.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`AccountIpAccessEnable`
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`AccountIpAccessEnable`
+        
\ No newline at end of file
diff --git a/docs/account/settings/esm_enablement_account.rst b/docs/account/settings/esm_enablement_account.rst
index 14e2a514f..e9359d907 100644
--- a/docs/account/settings/esm_enablement_account.rst
+++ b/docs/account/settings/esm_enablement_account.rst
@@ -5,43 +5,44 @@
 .. py:class:: EsmEnablementAccountAPI
 
     The enhanced security monitoring setting at the account level controls whether to enable the feature on
-new workspaces. By default, this account-level setting is disabled for new workspaces. After workspace
-creation, account admins can enable enhanced security monitoring individually for each workspace.
+    new workspaces. By default, this account-level setting is disabled for new workspaces. After workspace
+    creation, account admins can enable enhanced security monitoring individually for each workspace.
 
     .. py:method:: get( [, etag: Optional[str]]) -> EsmEnablementAccountSetting
 
         Get the enhanced security monitoring setting for new workspaces.
-
-Gets the enhanced security monitoring setting for new workspaces.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`EsmEnablementAccountSetting`
-
+        
+        Gets the enhanced security monitoring setting for new workspaces.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`EsmEnablementAccountSetting`
+        
 
     .. py:method:: update(allow_missing: bool, setting: EsmEnablementAccountSetting, field_mask: str) -> EsmEnablementAccountSetting
 
         Update the enhanced security monitoring setting for new workspaces.
-
-Updates the value of the enhanced security monitoring setting for new workspaces.
-
-:param allow_missing: bool
-  This should always be set to true for Settings API. Added for AIP compliance.
-:param setting: :class:`EsmEnablementAccountSetting`
-:param field_mask: str
-  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-  the entire collection field can be specified. Field names must exactly match the resource field
-  names.
-  
-  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-  changes in the future.
-
-:returns: :class:`EsmEnablementAccountSetting`
+        
+        Updates the value of the enhanced security monitoring setting for new workspaces.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`EsmEnablementAccountSetting`
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`EsmEnablementAccountSetting`
+        
\ No newline at end of file
diff --git a/docs/account/settings/ip_access_lists.rst b/docs/account/settings/ip_access_lists.rst
index b3b2a0aa4..7718d0c54 100644
--- a/docs/account/settings/ip_access_lists.rst
+++ b/docs/account/settings/ip_access_lists.rst
@@ -5,146 +5,147 @@
 .. py:class:: AccountIpAccessListsAPI
 
     The Accounts IP Access List API enables account admins to configure IP access lists for access to the
-account console.
-
-Account IP Access Lists affect web application access and REST API access to the account console and
-account APIs. If the feature is disabled for the account, all access is allowed for this account. There is
-support for allow lists (inclusion) and block lists (exclusion).
-
-When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address
-matches any block list, the connection is rejected. 2. **If the connection was not rejected by block
-lists**, the IP address is compared with the allow lists.
-
-If there is at least one allow list for the account, the connection is allowed only if the IP address
-matches an allow list. If there are no allow lists for the account, all IP addresses are allowed.
-
-For all allow lists and block lists combined, the account supports a maximum of 1000 IP/CIDR values, where
-one CIDR counts as a single value.
-
-After changes to the account-level IP access lists, it can take a few minutes for changes to take effect.
+    account console.
+    
+    Account IP Access Lists affect web application access and REST API access to the account console and
+    account APIs. If the feature is disabled for the account, all access is allowed for this account. There is
+    support for allow lists (inclusion) and block lists (exclusion).
+    
+    When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address
+    matches any block list, the connection is rejected. 2. **If the connection was not rejected by block
+    lists**, the IP address is compared with the allow lists.
+    
+    If there is at least one allow list for the account, the connection is allowed only if the IP address
+    matches an allow list. If there are no allow lists for the account, all IP addresses are allowed.
+    
+    For all allow lists and block lists combined, the account supports a maximum of 1000 IP/CIDR values, where
+    one CIDR counts as a single value.
+    
+    After changes to the account-level IP access lists, it can take a few minutes for changes to take effect.
 
     .. py:method:: create(label: str, list_type: ListType [, ip_addresses: Optional[List[str]]]) -> CreateIpAccessListResponse
 
         Create access list.
-
-Creates an IP access list for the account.
-
-A list can be an allow list or a block list. See the top of this file for a description of how the
-server treats allow lists and block lists at runtime.
-
-When creating or updating an IP access list:
-
-* For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
-where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
-`error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP,
-error 400 is returned with `error_code` value `INVALID_STATE`.
-
-It can take a few minutes for the changes to take effect.
-
-:param label: str
-  Label for the IP access list. This **cannot** be empty.
-:param list_type: :class:`ListType`
-  Type of IP access list. Valid values are as follows and are case-sensitive:
-  
-  * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
-  range. IP addresses in the block list are excluded even if they are included in an allow list.
-:param ip_addresses: List[str] (optional)
-
-:returns: :class:`CreateIpAccessListResponse`
-
+        
+        Creates an IP access list for the account.
+        
+        A list can be an allow list or a block list. See the top of this file for a description of how the
+        server treats allow lists and block lists at runtime.
+        
+        When creating or updating an IP access list:
+        
+        * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
+        where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
+        `error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP,
+        error 400 is returned with `error_code` value `INVALID_STATE`.
+        
+        It can take a few minutes for the changes to take effect.
+        
+        :param label: str
+          Label for the IP access list. This **cannot** be empty.
+        :param list_type: :class:`ListType`
+          Type of IP access list. Valid values are as follows and are case-sensitive:
+          
+          * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
+          range. IP addresses in the block list are excluded even if they are included in an allow list.
+        :param ip_addresses: List[str] (optional)
+        
+        :returns: :class:`CreateIpAccessListResponse`
+        
 
     .. py:method:: delete(ip_access_list_id: str)
 
         Delete access list.
-
-Deletes an IP access list, specified by its list ID.
-
-:param ip_access_list_id: str
-  The ID for the corresponding IP access list
-
-
-
+        
+        Deletes an IP access list, specified by its list ID.
+        
+        :param ip_access_list_id: str
+          The ID for the corresponding IP access list
+        
+        
+        
 
     .. py:method:: get(ip_access_list_id: str) -> GetIpAccessListResponse
 
         Get IP access list.
-
-Gets an IP access list, specified by its list ID.
-
-:param ip_access_list_id: str
-  The ID for the corresponding IP access list
-
-:returns: :class:`GetIpAccessListResponse`
-
+        
+        Gets an IP access list, specified by its list ID.
+        
+        :param ip_access_list_id: str
+          The ID for the corresponding IP access list
+        
+        :returns: :class:`GetIpAccessListResponse`
+        
 
     .. py:method:: list() -> Iterator[IpAccessListInfo]
 
         Get access lists.
-
-Gets all IP access lists for the specified account.
-
-:returns: Iterator over :class:`IpAccessListInfo`
-
+        
+        Gets all IP access lists for the specified account.
+        
+        :returns: Iterator over :class:`IpAccessListInfo`
+        
 
     .. py:method:: replace(ip_access_list_id: str, label: str, list_type: ListType, enabled: bool [, ip_addresses: Optional[List[str]]])
 
         Replace access list.
-
-Replaces an IP access list, specified by its ID.
-
-A list can include allow lists and block lists. See the top of this file for a description of how the
-server treats allow lists and block lists at run time. When replacing an IP access list: * For all
-allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one
-CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value
-`QUOTA_EXCEEDED`. * If the resulting list would block the calling user's current IP, error 400 is
-returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take
-effect.
-
-:param ip_access_list_id: str
-  The ID for the corresponding IP access list
-:param label: str
-  Label for the IP access list. This **cannot** be empty.
-:param list_type: :class:`ListType`
-  Type of IP access list. Valid values are as follows and are case-sensitive:
-  
-  * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
-  range. IP addresses in the block list are excluded even if they are included in an allow list.
-:param enabled: bool
-  Specifies whether this IP access list is enabled.
-:param ip_addresses: List[str] (optional)
-
-
-
+        
+        Replaces an IP access list, specified by its ID.
+        
+        A list can include allow lists and block lists. See the top of this file for a description of how the
+        server treats allow lists and block lists at run time. When replacing an IP access list: * For all
+        allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one
+        CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value
+        `QUOTA_EXCEEDED`. * If the resulting list would block the calling user's current IP, error 400 is
+        returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take
+        effect.
+        
+        :param ip_access_list_id: str
+          The ID for the corresponding IP access list
+        :param label: str
+          Label for the IP access list. This **cannot** be empty.
+        :param list_type: :class:`ListType`
+          Type of IP access list. Valid values are as follows and are case-sensitive:
+          
+          * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
+          range. IP addresses in the block list are excluded even if they are included in an allow list.
+        :param enabled: bool
+          Specifies whether this IP access list is enabled.
+        :param ip_addresses: List[str] (optional)
+        
+        
+        
 
     .. py:method:: update(ip_access_list_id: str [, enabled: Optional[bool], ip_addresses: Optional[List[str]], label: Optional[str], list_type: Optional[ListType]])
 
         Update access list.
-
-Updates an existing IP access list, specified by its ID.
-
-A list can include allow lists and block lists. See the top of this file for a description of how the
-server treats allow lists and block lists at run time.
-
-When updating an IP access list:
-
-* For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
-where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
-`error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP,
-error 400 is returned with `error_code` value `INVALID_STATE`.
-
-It can take a few minutes for the changes to take effect.
-
-:param ip_access_list_id: str
-  The ID for the corresponding IP access list
-:param enabled: bool (optional)
-  Specifies whether this IP access list is enabled.
-:param ip_addresses: List[str] (optional)
-:param label: str (optional)
-  Label for the IP access list. This **cannot** be empty.
-:param list_type: :class:`ListType` (optional)
-  Type of IP access list. Valid values are as follows and are case-sensitive:
-  
-  * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
-  range. IP addresses in the block list are excluded even if they are included in an allow list.
-
-
+        
+        Updates an existing IP access list, specified by its ID.
+        
+        A list can include allow lists and block lists. See the top of this file for a description of how the
+        server treats allow lists and block lists at run time.
+        
+        When updating an IP access list:
+        
+        * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
+        where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
+        `error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP,
+        error 400 is returned with `error_code` value `INVALID_STATE`.
+        
+        It can take a few minutes for the changes to take effect.
+        
+        :param ip_access_list_id: str
+          The ID for the corresponding IP access list
+        :param enabled: bool (optional)
+          Specifies whether this IP access list is enabled.
+        :param ip_addresses: List[str] (optional)
+        :param label: str (optional)
+          Label for the IP access list. This **cannot** be empty.
+        :param list_type: :class:`ListType` (optional)
+          Type of IP access list. Valid values are as follows and are case-sensitive:
+          
+          * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
+          range. IP addresses in the block list are excluded even if they are included in an allow list.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/account/settings/network_connectivity.rst b/docs/account/settings/network_connectivity.rst
index d073fc1da..30b50abcb 100644
--- a/docs/account/settings/network_connectivity.rst
+++ b/docs/account/settings/network_connectivity.rst
@@ -5,124 +5,125 @@
 .. py:class:: NetworkConnectivityAPI
 
     These APIs provide configurations for the network connectivity of your workspaces for serverless compute
-resources.
+    resources.
 
     .. py:method:: create_network_connectivity_configuration(name: str, region: str) -> NetworkConnectivityConfiguration
 
         Create a network connectivity configuration.
-
-:param name: str
-  The name of the network connectivity configuration. The name can contain alphanumeric characters,
-  hyphens, and underscores. The length must be between 3 and 30 characters. The name must match the
-  regular expression `^[0-9a-zA-Z-_]{3,30}$`.
-:param region: str
-  The region for the network connectivity configuration. Only workspaces in the same region can be
-  attached to the network connectivity configuration.
-
-:returns: :class:`NetworkConnectivityConfiguration`
-
+        
+        :param name: str
+          The name of the network connectivity configuration. The name can contain alphanumeric characters,
+          hyphens, and underscores. The length must be between 3 and 30 characters. The name must match the
+          regular expression `^[0-9a-zA-Z-_]{3,30}$`.
+        :param region: str
+          The region for the network connectivity configuration. Only workspaces in the same region can be
+          attached to the network connectivity configuration.
+        
+        :returns: :class:`NetworkConnectivityConfiguration`
+        
 
     .. py:method:: create_private_endpoint_rule(network_connectivity_config_id: str, resource_id: str, group_id: CreatePrivateEndpointRuleRequestGroupId) -> NccAzurePrivateEndpointRule
 
         Create a private endpoint rule.
-
-Create a private endpoint rule for the specified network connectivity config object. Once the object
-is created, Databricks asynchronously provisions a new Azure private endpoint to your specified Azure
-resource.
-
-**IMPORTANT**: You must use Azure portal or other Azure tools to approve the private endpoint to
-complete the connection. To get the information of the private endpoint created, make a `GET` request
-on the new private endpoint rule. See [serverless private link].
-
-[serverless private link]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security/serverless-private-link
-
-:param network_connectivity_config_id: str
-  Your Network Connectvity Configuration ID.
-:param resource_id: str
-  The Azure resource ID of the target resource.
-:param group_id: :class:`CreatePrivateEndpointRuleRequestGroupId`
-  The sub-resource type (group ID) of the target resource. Note that to connect to workspace root
-  storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`.
-
-:returns: :class:`NccAzurePrivateEndpointRule`
-
+        
+        Create a private endpoint rule for the specified network connectivity config object. Once the object
+        is created, Databricks asynchronously provisions a new Azure private endpoint to your specified Azure
+        resource.
+        
+        **IMPORTANT**: You must use Azure portal or other Azure tools to approve the private endpoint to
+        complete the connection. To get the information of the private endpoint created, make a `GET` request
+        on the new private endpoint rule. See [serverless private link].
+        
+        [serverless private link]: https://learn.microsoft.com/azure/databricks/security/network/serverless-network-security/serverless-private-link
+        
+        :param network_connectivity_config_id: str
+          Your Network Connectvity Configuration ID.
+        :param resource_id: str
+          The Azure resource ID of the target resource.
+        :param group_id: :class:`CreatePrivateEndpointRuleRequestGroupId`
+          The sub-resource type (group ID) of the target resource. Note that to connect to workspace root
+          storage (root DBFS), you need two endpoints, one for `blob` and one for `dfs`.
+        
+        :returns: :class:`NccAzurePrivateEndpointRule`
+        
 
     .. py:method:: delete_network_connectivity_configuration(network_connectivity_config_id: str)
 
         Delete a network connectivity configuration.
-
-Deletes a network connectivity configuration.
-
-:param network_connectivity_config_id: str
-  Your Network Connectvity Configuration ID.
-
-
-
+        
+        Deletes a network connectivity configuration.
+        
+        :param network_connectivity_config_id: str
+          Your Network Connectvity Configuration ID.
+        
+        
+        
 
     .. py:method:: delete_private_endpoint_rule(network_connectivity_config_id: str, private_endpoint_rule_id: str) -> NccAzurePrivateEndpointRule
 
         Delete a private endpoint rule.
-
-Initiates deleting a private endpoint rule. If the connection state is PENDING or EXPIRED, the private
-endpoint is immediately deleted. Otherwise, the private endpoint is deactivated and will be deleted
-after seven days of deactivation. When a private endpoint is deactivated, the `deactivated` field is
-set to `true` and the private endpoint is not available to your serverless compute resources.
-
-:param network_connectivity_config_id: str
-  Your Network Connectvity Configuration ID.
-:param private_endpoint_rule_id: str
-  Your private endpoint rule ID.
-
-:returns: :class:`NccAzurePrivateEndpointRule`
-
+        
+        Initiates deleting a private endpoint rule. If the connection state is PENDING or EXPIRED, the private
+        endpoint is immediately deleted. Otherwise, the private endpoint is deactivated and will be deleted
+        after seven days of deactivation. When a private endpoint is deactivated, the `deactivated` field is
+        set to `true` and the private endpoint is not available to your serverless compute resources.
+        
+        :param network_connectivity_config_id: str
+          Your Network Connectvity Configuration ID.
+        :param private_endpoint_rule_id: str
+          Your private endpoint rule ID.
+        
+        :returns: :class:`NccAzurePrivateEndpointRule`
+        
 
     .. py:method:: get_network_connectivity_configuration(network_connectivity_config_id: str) -> NetworkConnectivityConfiguration
 
         Get a network connectivity configuration.
-
-Gets a network connectivity configuration.
-
-:param network_connectivity_config_id: str
-  Your Network Connectvity Configuration ID.
-
-:returns: :class:`NetworkConnectivityConfiguration`
-
+        
+        Gets a network connectivity configuration.
+        
+        :param network_connectivity_config_id: str
+          Your Network Connectvity Configuration ID.
+        
+        :returns: :class:`NetworkConnectivityConfiguration`
+        
 
     .. py:method:: get_private_endpoint_rule(network_connectivity_config_id: str, private_endpoint_rule_id: str) -> NccAzurePrivateEndpointRule
 
         Get a private endpoint rule.
-
-Gets the private endpoint rule.
-
-:param network_connectivity_config_id: str
-  Your Network Connectvity Configuration ID.
-:param private_endpoint_rule_id: str
-  Your private endpoint rule ID.
-
-:returns: :class:`NccAzurePrivateEndpointRule`
-
+        
+        Gets the private endpoint rule.
+        
+        :param network_connectivity_config_id: str
+          Your Network Connectvity Configuration ID.
+        :param private_endpoint_rule_id: str
+          Your private endpoint rule ID.
+        
+        :returns: :class:`NccAzurePrivateEndpointRule`
+        
 
     .. py:method:: list_network_connectivity_configurations( [, page_token: Optional[str]]) -> Iterator[NetworkConnectivityConfiguration]
 
         List network connectivity configurations.
-
-Gets an array of network connectivity configurations.
-
-:param page_token: str (optional)
-  Pagination token to go to next page based on previous query.
-
-:returns: Iterator over :class:`NetworkConnectivityConfiguration`
-
+        
+        Gets an array of network connectivity configurations.
+        
+        :param page_token: str (optional)
+          Pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`NetworkConnectivityConfiguration`
+        
 
     .. py:method:: list_private_endpoint_rules(network_connectivity_config_id: str [, page_token: Optional[str]]) -> Iterator[NccAzurePrivateEndpointRule]
 
         List private endpoint rules.
-
-Gets an array of private endpoint rules.
-
-:param network_connectivity_config_id: str
-  Your Network Connectvity Configuration ID.
-:param page_token: str (optional)
-  Pagination token to go to next page based on previous query.
-
-:returns: Iterator over :class:`NccAzurePrivateEndpointRule`
+        
+        Gets an array of private endpoint rules.
+        
+        :param network_connectivity_config_id: str
+          Your Network Connectvity Configuration ID.
+        :param page_token: str (optional)
+          Pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`NccAzurePrivateEndpointRule`
+        
\ No newline at end of file
diff --git a/docs/account/settings/personal_compute.rst b/docs/account/settings/personal_compute.rst
index 46eec4a5d..54e958a28 100644
--- a/docs/account/settings/personal_compute.rst
+++ b/docs/account/settings/personal_compute.rst
@@ -5,63 +5,64 @@
 .. py:class:: PersonalComputeAPI
 
     The Personal Compute enablement setting lets you control which users can use the Personal Compute default
-policy to create compute resources. By default all users in all workspaces have access (ON), but you can
-change the setting to instead let individual workspaces configure access control (DELEGATE).
-
-There is only one instance of this setting per account. Since this setting has a default value, this
-setting is present on all accounts even though it's never set on a given account. Deletion reverts the
-value of the setting back to the default value.
+    policy to create compute resources. By default all users in all workspaces have access (ON), but you can
+    change the setting to instead let individual workspaces configure access control (DELEGATE).
+    
+    There is only one instance of this setting per account. Since this setting has a default value, this
+    setting is present on all accounts even though it's never set on a given account. Deletion reverts the
+    value of the setting back to the default value.
 
     .. py:method:: delete( [, etag: Optional[str]]) -> DeletePersonalComputeSettingResponse
 
         Delete Personal Compute setting.
-
-Reverts back the Personal Compute setting value to default (ON)
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`DeletePersonalComputeSettingResponse`
-
+        
+        Reverts back the Personal Compute setting value to default (ON)
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeletePersonalComputeSettingResponse`
+        
 
     .. py:method:: get( [, etag: Optional[str]]) -> PersonalComputeSetting
 
         Get Personal Compute setting.
-
-Gets the value of the Personal Compute setting.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`PersonalComputeSetting`
-
+        
+        Gets the value of the Personal Compute setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`PersonalComputeSetting`
+        
 
     .. py:method:: update(allow_missing: bool, setting: PersonalComputeSetting, field_mask: str) -> PersonalComputeSetting
 
         Update Personal Compute setting.
-
-Updates the value of the Personal Compute setting.
-
-:param allow_missing: bool
-  This should always be set to true for Settings API. Added for AIP compliance.
-:param setting: :class:`PersonalComputeSetting`
-:param field_mask: str
-  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-  the entire collection field can be specified. Field names must exactly match the resource field
-  names.
-  
-  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-  changes in the future.
-
-:returns: :class:`PersonalComputeSetting`
+        
+        Updates the value of the Personal Compute setting.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`PersonalComputeSetting`
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`PersonalComputeSetting`
+        
\ No newline at end of file
diff --git a/docs/account/settings/settings.rst b/docs/account/settings/settings.rst
index 0a0b85b8b..abf1c0e45 100644
--- a/docs/account/settings/settings.rst
+++ b/docs/account/settings/settings.rst
@@ -10,41 +10,41 @@
         :type: CspEnablementAccountAPI
 
         The compliance security profile settings at the account level control whether to enable it for new
-    workspaces. By default, this account-level setting is disabled for new workspaces. After workspace
-    creation, account admins can enable the compliance security profile individually for each workspace.
-    
-    This settings can be disabled so that new workspaces do not have compliance security profile enabled by
-    default.
+        workspaces. By default, this account-level setting is disabled for new workspaces. After workspace
+        creation, account admins can enable the compliance security profile individually for each workspace.
+        
+        This settings can be disabled so that new workspaces do not have compliance security profile enabled by
+        default.
 
     .. py:property:: disable_legacy_features
         :type: DisableLegacyFeaturesAPI
 
         Disable legacy features for new Databricks workspaces.
-    
-    For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be
-    provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions
-    prior to 13.3LTS.
+        
+        For newly created workspaces: 1. Disables the use of DBFS root and mounts. 2. Hive Metastore will not be
+        provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions
+        prior to 13.3LTS.
 
     .. py:property:: enable_ip_access_lists
         :type: EnableIpAccessListsAPI
 
         Controls the enforcement of IP access lists for accessing the account console. Allowing you to enable or
-    disable restricted access based on IP addresses.
+        disable restricted access based on IP addresses.
 
     .. py:property:: esm_enablement_account
         :type: EsmEnablementAccountAPI
 
         The enhanced security monitoring setting at the account level controls whether to enable the feature on
-    new workspaces. By default, this account-level setting is disabled for new workspaces. After workspace
-    creation, account admins can enable enhanced security monitoring individually for each workspace.
+        new workspaces. By default, this account-level setting is disabled for new workspaces. After workspace
+        creation, account admins can enable enhanced security monitoring individually for each workspace.
 
     .. py:property:: personal_compute
         :type: PersonalComputeAPI
 
         The Personal Compute enablement setting lets you control which users can use the Personal Compute default
-    policy to create compute resources. By default all users in all workspaces have access (ON), but you can
-    change the setting to instead let individual workspaces configure access control (DELEGATE).
-    
-    There is only one instance of this setting per account. Since this setting has a default value, this
-    setting is present on all accounts even though it's never set on a given account. Deletion reverts the
-    value of the setting back to the default value.
\ No newline at end of file
+        policy to create compute resources. By default all users in all workspaces have access (ON), but you can
+        change the setting to instead let individual workspaces configure access control (DELEGATE).
+        
+        There is only one instance of this setting per account. Since this setting has a default value, this
+        setting is present on all accounts even though it's never set on a given account. Deletion reverts the
+        value of the setting back to the default value.
\ No newline at end of file
diff --git a/docs/workspace/apps/apps.rst b/docs/workspace/apps/apps.rst
index 9c0bba237..af7229f34 100644
--- a/docs/workspace/apps/apps.rst
+++ b/docs/workspace/apps/apps.rst
@@ -5,22 +5,22 @@
 .. py:class:: AppsAPI
 
     Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend
-Databricks services, and enable users to interact through single sign-on.
+    Databricks services, and enable users to interact through single sign-on.
 
     .. py:method:: create( [, app: Optional[App], no_compute: Optional[bool]]) -> Wait[App]
 
         Create an app.
-
-Creates a new app.
-
-:param app: :class:`App` (optional)
-:param no_compute: bool (optional)
-  If true, the app will not be started after creation.
-
-:returns:
-  Long-running operation waiter for :class:`App`.
-  See :method:wait_get_app_active for more details.
-
+        
+        Creates a new app.
+        
+        :param app: :class:`App` (optional)
+        :param no_compute: bool (optional)
+          If true, the app will not be started after creation.
+        
+        :returns:
+          Long-running operation waiter for :class:`App`.
+          See :method:wait_get_app_active for more details.
+        
 
     .. py:method:: create_and_wait( [, app: Optional[App], no_compute: Optional[bool], timeout: datetime.timedelta = 0:20:00]) -> App
 
@@ -28,29 +28,29 @@ Creates a new app.
     .. py:method:: delete(name: str) -> App
 
         Delete an app.
-
-Deletes an app.
-
-:param name: str
-  The name of the app.
-
-:returns: :class:`App`
-
+        
+        Deletes an app.
+        
+        :param name: str
+          The name of the app.
+        
+        :returns: :class:`App`
+        
 
     .. py:method:: deploy(app_name: str [, app_deployment: Optional[AppDeployment]]) -> Wait[AppDeployment]
 
         Create an app deployment.
-
-Creates an app deployment for the app with the supplied name.
-
-:param app_name: str
-  The name of the app.
-:param app_deployment: :class:`AppDeployment` (optional)
-
-:returns:
-  Long-running operation waiter for :class:`AppDeployment`.
-  See :method:wait_get_deployment_app_succeeded for more details.
-
+        
+        Creates an app deployment for the app with the supplied name.
+        
+        :param app_name: str
+          The name of the app.
+        :param app_deployment: :class:`AppDeployment` (optional)
+        
+        :returns:
+          Long-running operation waiter for :class:`AppDeployment`.
+          See :method:wait_get_deployment_app_succeeded for more details.
+        
 
     .. py:method:: deploy_and_wait(app_name: str [, app_deployment: Optional[AppDeployment], timeout: datetime.timedelta = 0:20:00]) -> AppDeployment
 
@@ -58,110 +58,110 @@ Creates an app deployment for the app with the supplied name.
     .. py:method:: get(name: str) -> App
 
         Get an app.
-
-Retrieves information for the app with the supplied name.
-
-:param name: str
-  The name of the app.
-
-:returns: :class:`App`
-
+        
+        Retrieves information for the app with the supplied name.
+        
+        :param name: str
+          The name of the app.
+        
+        :returns: :class:`App`
+        
 
     .. py:method:: get_deployment(app_name: str, deployment_id: str) -> AppDeployment
 
         Get an app deployment.
-
-Retrieves information for the app deployment with the supplied name and deployment id.
-
-:param app_name: str
-  The name of the app.
-:param deployment_id: str
-  The unique id of the deployment.
-
-:returns: :class:`AppDeployment`
-
+        
+        Retrieves information for the app deployment with the supplied name and deployment id.
+        
+        :param app_name: str
+          The name of the app.
+        :param deployment_id: str
+          The unique id of the deployment.
+        
+        :returns: :class:`AppDeployment`
+        
 
     .. py:method:: get_permission_levels(app_name: str) -> GetAppPermissionLevelsResponse
 
         Get app permission levels.
-
-Gets the permission levels that a user can have on an object.
-
-:param app_name: str
-  The app for which to get or manage permissions.
-
-:returns: :class:`GetAppPermissionLevelsResponse`
-
+        
+        Gets the permission levels that a user can have on an object.
+        
+        :param app_name: str
+          The app for which to get or manage permissions.
+        
+        :returns: :class:`GetAppPermissionLevelsResponse`
+        
 
     .. py:method:: get_permissions(app_name: str) -> AppPermissions
 
         Get app permissions.
-
-Gets the permissions of an app. Apps can inherit permissions from their root object.
-
-:param app_name: str
-  The app for which to get or manage permissions.
-
-:returns: :class:`AppPermissions`
-
+        
+        Gets the permissions of an app. Apps can inherit permissions from their root object.
+        
+        :param app_name: str
+          The app for which to get or manage permissions.
+        
+        :returns: :class:`AppPermissions`
+        
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[App]
 
         List apps.
-
-Lists all apps in the workspace.
-
-:param page_size: int (optional)
-  Upper bound for items returned.
-:param page_token: str (optional)
-  Pagination token to go to the next page of apps. Requests first page if absent.
-
-:returns: Iterator over :class:`App`
-
+        
+        Lists all apps in the workspace.
+        
+        :param page_size: int (optional)
+          Upper bound for items returned.
+        :param page_token: str (optional)
+          Pagination token to go to the next page of apps. Requests first page if absent.
+        
+        :returns: Iterator over :class:`App`
+        
 
     .. py:method:: list_deployments(app_name: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[AppDeployment]
 
         List app deployments.
-
-Lists all app deployments for the app with the supplied name.
-
-:param app_name: str
-  The name of the app.
-:param page_size: int (optional)
-  Upper bound for items returned.
-:param page_token: str (optional)
-  Pagination token to go to the next page of apps. Requests first page if absent.
-
-:returns: Iterator over :class:`AppDeployment`
-
+        
+        Lists all app deployments for the app with the supplied name.
+        
+        :param app_name: str
+          The name of the app.
+        :param page_size: int (optional)
+          Upper bound for items returned.
+        :param page_token: str (optional)
+          Pagination token to go to the next page of apps. Requests first page if absent.
+        
+        :returns: Iterator over :class:`AppDeployment`
+        
 
     .. py:method:: set_permissions(app_name: str [, access_control_list: Optional[List[AppAccessControlRequest]]]) -> AppPermissions
 
         Set app permissions.
-
-Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-permissions if none are specified. Objects can inherit permissions from their root object.
-
-:param app_name: str
-  The app for which to get or manage permissions.
-:param access_control_list: List[:class:`AppAccessControlRequest`] (optional)
-
-:returns: :class:`AppPermissions`
-
+        
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
+        
+        :param app_name: str
+          The app for which to get or manage permissions.
+        :param access_control_list: List[:class:`AppAccessControlRequest`] (optional)
+        
+        :returns: :class:`AppPermissions`
+        
 
     .. py:method:: start(name: str) -> Wait[App]
 
         Start an app.
-
-Start the last active deployment of the app in the workspace.
-
-:param name: str
-  The name of the app.
-
-:returns:
-  Long-running operation waiter for :class:`App`.
-  See :method:wait_get_app_active for more details.
-
+        
+        Start the last active deployment of the app in the workspace.
+        
+        :param name: str
+          The name of the app.
+        
+        :returns:
+          Long-running operation waiter for :class:`App`.
+          See :method:wait_get_app_active for more details.
+        
 
     .. py:method:: start_and_wait(name: str, timeout: datetime.timedelta = 0:20:00) -> App
 
@@ -169,16 +169,16 @@ Start the last active deployment of the app in the workspace.
     .. py:method:: stop(name: str) -> Wait[App]
 
         Stop an app.
-
-Stops the active deployment of the app in the workspace.
-
-:param name: str
-  The name of the app.
-
-:returns:
-  Long-running operation waiter for :class:`App`.
-  See :method:wait_get_app_stopped for more details.
-
+        
+        Stops the active deployment of the app in the workspace.
+        
+        :param name: str
+          The name of the app.
+        
+        :returns:
+          Long-running operation waiter for :class:`App`.
+          See :method:wait_get_app_stopped for more details.
+        
 
     .. py:method:: stop_and_wait(name: str, timeout: datetime.timedelta = 0:20:00) -> App
 
@@ -186,29 +186,29 @@ Stops the active deployment of the app in the workspace.
     .. py:method:: update(name: str [, app: Optional[App]]) -> App
 
         Update an app.
-
-Updates the app with the supplied name.
-
-:param name: str
-  The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
-  must be unique within the workspace.
-:param app: :class:`App` (optional)
-
-:returns: :class:`App`
-
+        
+        Updates the app with the supplied name.
+        
+        :param name: str
+          The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
+          must be unique within the workspace.
+        :param app: :class:`App` (optional)
+        
+        :returns: :class:`App`
+        
 
     .. py:method:: update_permissions(app_name: str [, access_control_list: Optional[List[AppAccessControlRequest]]]) -> AppPermissions
 
         Update app permissions.
-
-Updates the permissions on an app. Apps can inherit permissions from their root object.
-
-:param app_name: str
-  The app for which to get or manage permissions.
-:param access_control_list: List[:class:`AppAccessControlRequest`] (optional)
-
-:returns: :class:`AppPermissions`
-
+        
+        Updates the permissions on an app. Apps can inherit permissions from their root object.
+        
+        :param app_name: str
+          The app for which to get or manage permissions.
+        :param access_control_list: List[:class:`AppAccessControlRequest`] (optional)
+        
+        :returns: :class:`AppPermissions`
+        
 
     .. py:method:: wait_get_app_active(name: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[App], None]]) -> App
 
diff --git a/docs/workspace/catalog/artifact_allowlists.rst b/docs/workspace/catalog/artifact_allowlists.rst
index 9f22ed335..349bbbd0f 100644
--- a/docs/workspace/catalog/artifact_allowlists.rst
+++ b/docs/workspace/catalog/artifact_allowlists.rst
@@ -5,32 +5,33 @@
 .. py:class:: ArtifactAllowlistsAPI
 
     In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the `allowlist` in UC so
-that users can leverage these artifacts on compute configured with shared access mode.
+    that users can leverage these artifacts on compute configured with shared access mode.
 
     .. py:method:: get(artifact_type: ArtifactType) -> ArtifactAllowlistInfo
 
         Get an artifact allowlist.
-
-Get the artifact allowlist of a certain artifact type. The caller must be a metastore admin or have
-the **MANAGE ALLOWLIST** privilege on the metastore.
-
-:param artifact_type: :class:`ArtifactType`
-  The artifact type of the allowlist.
-
-:returns: :class:`ArtifactAllowlistInfo`
-
+        
+        Get the artifact allowlist of a certain artifact type. The caller must be a metastore admin or have
+        the **MANAGE ALLOWLIST** privilege on the metastore.
+        
+        :param artifact_type: :class:`ArtifactType`
+          The artifact type of the allowlist.
+        
+        :returns: :class:`ArtifactAllowlistInfo`
+        
 
     .. py:method:: update(artifact_type: ArtifactType, artifact_matchers: List[ArtifactMatcher]) -> ArtifactAllowlistInfo
 
         Set an artifact allowlist.
-
-Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is replaced with
-the new allowlist. The caller must be a metastore admin or have the **MANAGE ALLOWLIST** privilege on
-the metastore.
-
-:param artifact_type: :class:`ArtifactType`
-  The artifact type of the allowlist.
-:param artifact_matchers: List[:class:`ArtifactMatcher`]
-  A list of allowed artifact match patterns.
-
-:returns: :class:`ArtifactAllowlistInfo`
+        
+        Set the artifact allowlist of a certain artifact type. The whole artifact allowlist is replaced with
+        the new allowlist. The caller must be a metastore admin or have the **MANAGE ALLOWLIST** privilege on
+        the metastore.
+        
+        :param artifact_type: :class:`ArtifactType`
+          The artifact type of the allowlist.
+        :param artifact_matchers: List[:class:`ArtifactMatcher`]
+          A list of allowed artifact match patterns.
+        
+        :returns: :class:`ArtifactAllowlistInfo`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/catalogs.rst b/docs/workspace/catalog/catalogs.rst
index 6cd07861f..1d6b6dc2a 100644
--- a/docs/workspace/catalog/catalogs.rst
+++ b/docs/workspace/catalog/catalogs.rst
@@ -5,11 +5,11 @@
 .. py:class:: CatalogsAPI
 
     A catalog is the first layer of Unity Catalog’s three-level namespace. It’s used to organize your data
-assets. Users can see all catalogs on which they have been assigned the USE_CATALOG data permission.
-
-In Unity Catalog, admins and data stewards manage users and their access to data centrally across all of
-the workspaces in a Databricks account. Users in different workspaces can share access to the same data,
-depending on privileges granted centrally in Unity Catalog.
+    assets. Users can see all catalogs on which they have been assigned the USE_CATALOG data permission.
+    
+    In Unity Catalog, admins and data stewards manage users and their access to data centrally across all of
+    the workspaces in a Databricks account. Users in different workspaces can share access to the same data,
+    depending on privileges granted centrally in Unity Catalog.
 
     .. py:method:: create(name: str [, comment: Optional[str], connection_name: Optional[str], options: Optional[Dict[str, str]], properties: Optional[Dict[str, str]], provider_name: Optional[str], share_name: Optional[str], storage_root: Optional[str]]) -> CatalogInfo
 
@@ -30,46 +30,46 @@ depending on privileges granted centrally in Unity Catalog.
             w.catalogs.delete(name=created.name, force=True)
 
         Create a catalog.
-
-Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the
-**CREATE_CATALOG** privilege.
-
-:param name: str
-  Name of catalog.
-:param comment: str (optional)
-  User-provided free-form text description.
-:param connection_name: str (optional)
-  The name of the connection to an external data source.
-:param options: Dict[str,str] (optional)
-  A map of key-value properties attached to the securable.
-:param properties: Dict[str,str] (optional)
-  A map of key-value properties attached to the securable.
-:param provider_name: str (optional)
-  The name of delta sharing provider.
-  
-  A Delta Sharing catalog is a catalog that is based on a Delta share on a remote sharing server.
-:param share_name: str (optional)
-  The name of the share under the share provider.
-:param storage_root: str (optional)
-  Storage root URL for managed tables within catalog.
-
-:returns: :class:`CatalogInfo`
-
+        
+        Creates a new catalog instance in the parent metastore if the caller is a metastore admin or has the
+        **CREATE_CATALOG** privilege.
+        
+        :param name: str
+          Name of catalog.
+        :param comment: str (optional)
+          User-provided free-form text description.
+        :param connection_name: str (optional)
+          The name of the connection to an external data source.
+        :param options: Dict[str,str] (optional)
+          A map of key-value properties attached to the securable.
+        :param properties: Dict[str,str] (optional)
+          A map of key-value properties attached to the securable.
+        :param provider_name: str (optional)
+          The name of delta sharing provider.
+          
+          A Delta Sharing catalog is a catalog that is based on a Delta share on a remote sharing server.
+        :param share_name: str (optional)
+          The name of the share under the share provider.
+        :param storage_root: str (optional)
+          Storage root URL for managed tables within catalog.
+        
+        :returns: :class:`CatalogInfo`
+        
 
     .. py:method:: delete(name: str [, force: Optional[bool]])
 
         Delete a catalog.
-
-Deletes the catalog that matches the supplied name. The caller must be a metastore admin or the owner
-of the catalog.
-
-:param name: str
-  The name of the catalog.
-:param force: bool (optional)
-  Force deletion even if the catalog is not empty.
-
-
-
+        
+        Deletes the catalog that matches the supplied name. The caller must be a metastore admin or the owner
+        of the catalog.
+        
+        :param name: str
+          The name of the catalog.
+        :param force: bool (optional)
+          Force deletion even if the catalog is not empty.
+        
+        
+        
 
     .. py:method:: get(name: str [, include_browse: Optional[bool]]) -> CatalogInfo
 
@@ -92,18 +92,18 @@ of the catalog.
             w.catalogs.delete(name=created.name, force=True)
 
         Get a catalog.
-
-Gets the specified catalog in a metastore. The caller must be a metastore admin, the owner of the
-catalog, or a user that has the **USE_CATALOG** privilege set for their account.
-
-:param name: str
-  The name of the catalog.
-:param include_browse: bool (optional)
-  Whether to include catalogs in the response for which the principal can only access selective
-  metadata for
-
-:returns: :class:`CatalogInfo`
-
+        
+        Gets the specified catalog in a metastore. The caller must be a metastore admin, the owner of the
+        catalog, or a user that has the **USE_CATALOG** privilege set for their account.
+        
+        :param name: str
+          The name of the catalog.
+        :param include_browse: bool (optional)
+          Whether to include catalogs in the response for which the principal can only access selective
+          metadata for
+        
+        :returns: :class:`CatalogInfo`
+        
 
     .. py:method:: list( [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[CatalogInfo]
 
@@ -120,28 +120,28 @@ catalog, or a user that has the **USE_CATALOG** privilege set for their account.
             all = w.catalogs.list(catalog.ListCatalogsRequest())
 
         List catalogs.
-
-Gets an array of catalogs in the metastore. If the caller is the metastore admin, all catalogs will be
-retrieved. Otherwise, only catalogs owned by the caller (or for which the caller has the
-**USE_CATALOG** privilege) will be retrieved. There is no guarantee of a specific ordering of the
-elements in the array.
-
-:param include_browse: bool (optional)
-  Whether to include catalogs in the response for which the principal can only access selective
-  metadata for
-:param max_results: int (optional)
-  Maximum number of catalogs to return. - when set to 0, the page length is set to a server configured
-  value (recommended); - when set to a value greater than 0, the page length is the minimum of this
-  value and a server configured value; - when set to a value less than 0, an invalid parameter error
-  is returned; - If not set, all valid catalogs are returned (not recommended). - Note: The number of
-  returned catalogs might be less than the specified max_results size, even zero. The only definitive
-  indication that no further catalogs can be fetched is when the next_page_token is unset from the
-  response.
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-
-:returns: Iterator over :class:`CatalogInfo`
-
+        
+        Gets an array of catalogs in the metastore. If the caller is the metastore admin, all catalogs will be
+        retrieved. Otherwise, only catalogs owned by the caller (or for which the caller has the
+        **USE_CATALOG** privilege) will be retrieved. There is no guarantee of a specific ordering of the
+        elements in the array.
+        
+        :param include_browse: bool (optional)
+          Whether to include catalogs in the response for which the principal can only access selective
+          metadata for
+        :param max_results: int (optional)
+          Maximum number of catalogs to return. - when set to 0, the page length is set to a server configured
+          value (recommended); - when set to a value greater than 0, the page length is the minimum of this
+          value and a server configured value; - when set to a value less than 0, an invalid parameter error
+          is returned; - If not set, all valid catalogs are returned (not recommended). - Note: The number of
+          returned catalogs might be less than the specified max_results size, even zero. The only definitive
+          indication that no further catalogs can be fetched is when the next_page_token is unset from the
+          response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`CatalogInfo`
+        
 
     .. py:method:: update(name: str [, comment: Optional[str], enable_predictive_optimization: Optional[EnablePredictiveOptimization], isolation_mode: Optional[CatalogIsolationMode], new_name: Optional[str], options: Optional[Dict[str, str]], owner: Optional[str], properties: Optional[Dict[str, str]]]) -> CatalogInfo
 
@@ -164,25 +164,26 @@ elements in the array.
             w.catalogs.delete(name=created.name, force=True)
 
         Update a catalog.
-
-Updates the catalog that matches the supplied name. The caller must be either the owner of the
-catalog, or a metastore admin (when changing the owner field of the catalog).
-
-:param name: str
-  The name of the catalog.
-:param comment: str (optional)
-  User-provided free-form text description.
-:param enable_predictive_optimization: :class:`EnablePredictiveOptimization` (optional)
-  Whether predictive optimization should be enabled for this object and objects under it.
-:param isolation_mode: :class:`CatalogIsolationMode` (optional)
-  Whether the current securable is accessible from all workspaces or a specific set of workspaces.
-:param new_name: str (optional)
-  New name for the catalog.
-:param options: Dict[str,str] (optional)
-  A map of key-value properties attached to the securable.
-:param owner: str (optional)
-  Username of current owner of catalog.
-:param properties: Dict[str,str] (optional)
-  A map of key-value properties attached to the securable.
-
-:returns: :class:`CatalogInfo`
+        
+        Updates the catalog that matches the supplied name. The caller must be either the owner of the
+        catalog, or a metastore admin (when changing the owner field of the catalog).
+        
+        :param name: str
+          The name of the catalog.
+        :param comment: str (optional)
+          User-provided free-form text description.
+        :param enable_predictive_optimization: :class:`EnablePredictiveOptimization` (optional)
+          Whether predictive optimization should be enabled for this object and objects under it.
+        :param isolation_mode: :class:`CatalogIsolationMode` (optional)
+          Whether the current securable is accessible from all workspaces or a specific set of workspaces.
+        :param new_name: str (optional)
+          New name for the catalog.
+        :param options: Dict[str,str] (optional)
+          A map of key-value properties attached to the securable.
+        :param owner: str (optional)
+          Username of current owner of catalog.
+        :param properties: Dict[str,str] (optional)
+          A map of key-value properties attached to the securable.
+        
+        :returns: :class:`CatalogInfo`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/connections.rst b/docs/workspace/catalog/connections.rst
index 32105ff61..b2637c2d0 100644
--- a/docs/workspace/catalog/connections.rst
+++ b/docs/workspace/catalog/connections.rst
@@ -5,13 +5,13 @@
 .. py:class:: ConnectionsAPI
 
     Connections allow for creating a connection to an external data source.
-
-A connection is an abstraction of an external data source that can be connected from Databricks Compute.
-Creating a connection object is the first step to managing external data sources within Unity Catalog,
-with the second step being creating a data object (catalog, schema, or table) using the connection. Data
-objects derived from a connection can be written to or read from similar to other Unity Catalog data
-objects based on cloud storage. Users may create different types of connections with each connection
-having a unique set of configuration options to support credential management and other settings.
+    
+    A connection is an abstraction of an external data source that can be connected from Databricks Compute.
+    Creating a connection object is the first step to managing external data sources within Unity Catalog,
+    with the second step being creating a data object (catalog, schema, or table) using the connection. Data
+    objects derived from a connection can be written to or read from similar to other Unity Catalog data
+    objects based on cloud storage. Users may create different types of connections with each connection
+    having a unique set of configuration options to support credential management and other settings.
 
     .. py:method:: create(name: str, connection_type: ConnectionType, options: Dict[str, str] [, comment: Optional[str], properties: Optional[Dict[str, str]], read_only: Optional[bool]]) -> ConnectionInfo
 
@@ -43,39 +43,39 @@ having a unique set of configuration options to support credential management an
             w.connections.delete(name=conn_create.name)
 
         Create a connection.
-
-Creates a new connection
-
-Creates a new connection to an external data source. It allows users to specify connection details and
-configurations for interaction with the external server.
-
-:param name: str
-  Name of the connection.
-:param connection_type: :class:`ConnectionType`
-  The type of connection.
-:param options: Dict[str,str]
-  A map of key-value properties attached to the securable.
-:param comment: str (optional)
-  User-provided free-form text description.
-:param properties: Dict[str,str] (optional)
-  An object containing map of key-value properties attached to the connection.
-:param read_only: bool (optional)
-  If the connection is read only.
-
-:returns: :class:`ConnectionInfo`
-
+        
+        Creates a new connection
+        
+        Creates a new connection to an external data source. It allows users to specify connection details and
+        configurations for interaction with the external server.
+        
+        :param name: str
+          Name of the connection.
+        :param connection_type: :class:`ConnectionType`
+          The type of connection.
+        :param options: Dict[str,str]
+          A map of key-value properties attached to the securable.
+        :param comment: str (optional)
+          User-provided free-form text description.
+        :param properties: Dict[str,str] (optional)
+          An object containing map of key-value properties attached to the connection.
+        :param read_only: bool (optional)
+          If the connection is read only.
+        
+        :returns: :class:`ConnectionInfo`
+        
 
     .. py:method:: delete(name: str)
 
         Delete a connection.
-
-Deletes the connection that matches the supplied name.
-
-:param name: str
-  The name of the connection to be deleted.
-
-
-
+        
+        Deletes the connection that matches the supplied name.
+        
+        :param name: str
+          The name of the connection to be deleted.
+        
+        
+        
 
     .. py:method:: get(name: str) -> ConnectionInfo
 
@@ -119,14 +119,14 @@ Deletes the connection that matches the supplied name.
             w.connections.delete(name=conn_create.name)
 
         Get a connection.
-
-Gets a connection from it's name.
-
-:param name: str
-  Name of the connection.
-
-:returns: :class:`ConnectionInfo`
-
+        
+        Gets a connection from it's name.
+        
+        :param name: str
+          Name of the connection.
+        
+        :returns: :class:`ConnectionInfo`
+        
 
     .. py:method:: list( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ConnectionInfo]
 
@@ -143,19 +143,19 @@ Gets a connection from it's name.
             conn_list = w.connections.list(catalog.ListConnectionsRequest())
 
         List connections.
-
-List all connections.
-
-:param max_results: int (optional)
-  Maximum number of connections to return. - If not set, all connections are returned (not
-  recommended). - when set to a value greater than 0, the page length is the minimum of this value and
-  a server configured value; - when set to 0, the page length is set to a server configured value
-  (recommended); - when set to a value less than 0, an invalid parameter error is returned;
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-
-:returns: Iterator over :class:`ConnectionInfo`
-
+        
+        List all connections.
+        
+        :param max_results: int (optional)
+          Maximum number of connections to return. - If not set, all connections are returned (not
+          recommended). - when set to a value greater than 0, the page length is the minimum of this value and
+          a server configured value; - when set to 0, the page length is set to a server configured value
+          (recommended); - when set to a value less than 0, an invalid parameter error is returned;
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`ConnectionInfo`
+        
 
     .. py:method:: update(name: str, options: Dict[str, str] [, new_name: Optional[str], owner: Optional[str]]) -> ConnectionInfo
 
@@ -197,16 +197,17 @@ List all connections.
             w.connections.delete(name=conn_create.name)
 
         Update a connection.
-
-Updates the connection that matches the supplied name.
-
-:param name: str
-  Name of the connection.
-:param options: Dict[str,str]
-  A map of key-value properties attached to the securable.
-:param new_name: str (optional)
-  New name for the connection.
-:param owner: str (optional)
-  Username of current owner of the connection.
-
-:returns: :class:`ConnectionInfo`
+        
+        Updates the connection that matches the supplied name.
+        
+        :param name: str
+          Name of the connection.
+        :param options: Dict[str,str]
+          A map of key-value properties attached to the securable.
+        :param new_name: str (optional)
+          New name for the connection.
+        :param owner: str (optional)
+          Username of current owner of the connection.
+        
+        :returns: :class:`ConnectionInfo`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/credentials.rst b/docs/workspace/catalog/credentials.rst
index f8f0f81f5..3927e6351 100644
--- a/docs/workspace/catalog/credentials.rst
+++ b/docs/workspace/catalog/credentials.rst
@@ -5,188 +5,189 @@
 .. py:class:: CredentialsAPI
 
     A credential represents an authentication and authorization mechanism for accessing services on your cloud
-tenant. Each credential is subject to Unity Catalog access-control policies that control which users and
-groups can access the credential.
-
-To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE CREDENTIAL`
-privilege. The user who creates the credential can delegate ownership to another user or group to manage
-permissions on it.
+    tenant. Each credential is subject to Unity Catalog access-control policies that control which users and
+    groups can access the credential.
+    
+    To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE CREDENTIAL`
+    privilege. The user who creates the credential can delegate ownership to another user or group to manage
+    permissions on it.
 
     .. py:method:: create_credential(name: str [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], azure_service_principal: Optional[AzureServicePrincipal], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], purpose: Optional[CredentialPurpose], read_only: Optional[bool], skip_validation: Optional[bool]]) -> CredentialInfo
 
         Create a credential.
-
-Creates a new credential. The type of credential to be created is determined by the **purpose** field,
-which should be either **SERVICE** or **STORAGE**.
-
-The caller must be a metastore admin or have the metastore privilege **CREATE_STORAGE_CREDENTIAL** for
-storage credentials, or **CREATE_SERVICE_CREDENTIAL** for service credentials.
-
-:param name: str
-  The credential name. The name must be unique among storage and service credentials within the
-  metastore.
-:param aws_iam_role: :class:`AwsIamRole` (optional)
-  The AWS IAM role configuration
-:param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
-  The Azure managed identity configuration.
-:param azure_service_principal: :class:`AzureServicePrincipal` (optional)
-  The Azure service principal configuration. Only applicable when purpose is **STORAGE**.
-:param comment: str (optional)
-  Comment associated with the credential.
-:param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional)
-  GCP long-lived credential. Databricks-created Google Cloud Storage service account.
-:param purpose: :class:`CredentialPurpose` (optional)
-  Indicates the purpose of the credential.
-:param read_only: bool (optional)
-  Whether the credential is usable only for read operations. Only applicable when purpose is
-  **STORAGE**.
-:param skip_validation: bool (optional)
-  Optional. Supplying true to this argument skips validation of the created set of credentials.
-
-:returns: :class:`CredentialInfo`
-
+        
+        Creates a new credential. The type of credential to be created is determined by the **purpose** field,
+        which should be either **SERVICE** or **STORAGE**.
+        
+        The caller must be a metastore admin or have the metastore privilege **CREATE_STORAGE_CREDENTIAL** for
+        storage credentials, or **CREATE_SERVICE_CREDENTIAL** for service credentials.
+        
+        :param name: str
+          The credential name. The name must be unique among storage and service credentials within the
+          metastore.
+        :param aws_iam_role: :class:`AwsIamRole` (optional)
+          The AWS IAM role configuration
+        :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
+          The Azure managed identity configuration.
+        :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
+          The Azure service principal configuration. Only applicable when purpose is **STORAGE**.
+        :param comment: str (optional)
+          Comment associated with the credential.
+        :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional)
+          GCP long-lived credential. Databricks-created Google Cloud Storage service account.
+        :param purpose: :class:`CredentialPurpose` (optional)
+          Indicates the purpose of the credential.
+        :param read_only: bool (optional)
+          Whether the credential is usable only for read operations. Only applicable when purpose is
+          **STORAGE**.
+        :param skip_validation: bool (optional)
+          Optional. Supplying true to this argument skips validation of the created set of credentials.
+        
+        :returns: :class:`CredentialInfo`
+        
 
     .. py:method:: delete_credential(name_arg: str [, force: Optional[bool]])
 
         Delete a credential.
-
-Deletes a service or storage credential from the metastore. The caller must be an owner of the
-credential.
-
-:param name_arg: str
-  Name of the credential.
-:param force: bool (optional)
-  Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
-  external locations and external tables (when purpose is **STORAGE**).
-
-
-
+        
+        Deletes a service or storage credential from the metastore. The caller must be an owner of the
+        credential.
+        
+        :param name_arg: str
+          Name of the credential.
+        :param force: bool (optional)
+          Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
+          external locations and external tables (when purpose is **STORAGE**).
+        
+        
+        
 
     .. py:method:: generate_temporary_service_credential(credential_name: str [, azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions], gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions]]) -> TemporaryCredentials
 
         Generate a temporary service credential.
-
-Returns a set of temporary credentials generated using the specified service credential. The caller
-must be a metastore admin or have the metastore privilege **ACCESS** on the service credential.
-
-:param credential_name: str
-  The name of the service credential used to generate a temporary credential
-:param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional)
-  The Azure cloud options to customize the requested temporary credential
-:param gcp_options: :class:`GenerateTemporaryServiceCredentialGcpOptions` (optional)
-  The GCP cloud options to customize the requested temporary credential
-
-:returns: :class:`TemporaryCredentials`
-
+        
+        Returns a set of temporary credentials generated using the specified service credential. The caller
+        must be a metastore admin or have the metastore privilege **ACCESS** on the service credential.
+        
+        :param credential_name: str
+          The name of the service credential used to generate a temporary credential
+        :param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional)
+          The Azure cloud options to customize the requested temporary credential
+        :param gcp_options: :class:`GenerateTemporaryServiceCredentialGcpOptions` (optional)
+          The GCP cloud options to customize the requested temporary credential
+        
+        :returns: :class:`TemporaryCredentials`
+        
 
     .. py:method:: get_credential(name_arg: str) -> CredentialInfo
 
         Get a credential.
-
-Gets a service or storage credential from the metastore. The caller must be a metastore admin, the
-owner of the credential, or have any permission on the credential.
-
-:param name_arg: str
-  Name of the credential.
-
-:returns: :class:`CredentialInfo`
-
+        
+        Gets a service or storage credential from the metastore. The caller must be a metastore admin, the
+        owner of the credential, or have any permission on the credential.
+        
+        :param name_arg: str
+          Name of the credential.
+        
+        :returns: :class:`CredentialInfo`
+        
 
     .. py:method:: list_credentials( [, max_results: Optional[int], page_token: Optional[str], purpose: Optional[CredentialPurpose]]) -> Iterator[CredentialInfo]
 
         List credentials.
-
-Gets an array of credentials (as __CredentialInfo__ objects).
-
-The array is limited to only the credentials that the caller has permission to access. If the caller
-is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific
-ordering of the elements in the array.
-
-:param max_results: int (optional)
-  Maximum number of credentials to return. - If not set, the default max page size is used. - When set
-  to a value greater than 0, the page length is the minimum of this value and a server-configured
-  value. - When set to 0, the page length is set to a server-configured value (recommended). - When
-  set to a value less than 0, an invalid parameter error is returned.
-:param page_token: str (optional)
-  Opaque token to retrieve the next page of results.
-:param purpose: :class:`CredentialPurpose` (optional)
-  Return only credentials for the specified purpose.
-
-:returns: Iterator over :class:`CredentialInfo`
-
+        
+        Gets an array of credentials (as __CredentialInfo__ objects).
+        
+        The array is limited to only the credentials that the caller has permission to access. If the caller
+        is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific
+        ordering of the elements in the array.
+        
+        :param max_results: int (optional)
+          Maximum number of credentials to return. - If not set, the default max page size is used. - When set
+          to a value greater than 0, the page length is the minimum of this value and a server-configured
+          value. - When set to 0, the page length is set to a server-configured value (recommended). - When
+          set to a value less than 0, an invalid parameter error is returned.
+        :param page_token: str (optional)
+          Opaque token to retrieve the next page of results.
+        :param purpose: :class:`CredentialPurpose` (optional)
+          Return only credentials for the specified purpose.
+        
+        :returns: Iterator over :class:`CredentialInfo`
+        
 
     .. py:method:: update_credential(name_arg: str [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], azure_service_principal: Optional[AzureServicePrincipal], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], force: Optional[bool], isolation_mode: Optional[IsolationMode], new_name: Optional[str], owner: Optional[str], read_only: Optional[bool], skip_validation: Optional[bool]]) -> CredentialInfo
 
         Update a credential.
-
-Updates a service or storage credential on the metastore.
-
-The caller must be the owner of the credential or a metastore admin or have the `MANAGE` permission.
-If the caller is a metastore admin, only the __owner__ field can be changed.
-
-:param name_arg: str
-  Name of the credential.
-:param aws_iam_role: :class:`AwsIamRole` (optional)
-  The AWS IAM role configuration
-:param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
-  The Azure managed identity configuration.
-:param azure_service_principal: :class:`AzureServicePrincipal` (optional)
-  The Azure service principal configuration. Only applicable when purpose is **STORAGE**.
-:param comment: str (optional)
-  Comment associated with the credential.
-:param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional)
-  GCP long-lived credential. Databricks-created Google Cloud Storage service account.
-:param force: bool (optional)
-  Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
-  external locations and external tables (when purpose is **STORAGE**).
-:param isolation_mode: :class:`IsolationMode` (optional)
-  Whether the current securable is accessible from all workspaces or a specific set of workspaces.
-:param new_name: str (optional)
-  New name of credential.
-:param owner: str (optional)
-  Username of current owner of credential.
-:param read_only: bool (optional)
-  Whether the credential is usable only for read operations. Only applicable when purpose is
-  **STORAGE**.
-:param skip_validation: bool (optional)
-  Supply true to this argument to skip validation of the updated credential.
-
-:returns: :class:`CredentialInfo`
-
+        
+        Updates a service or storage credential on the metastore.
+        
+        The caller must be the owner of the credential or a metastore admin or have the `MANAGE` permission.
+        If the caller is a metastore admin, only the __owner__ field can be changed.
+        
+        :param name_arg: str
+          Name of the credential.
+        :param aws_iam_role: :class:`AwsIamRole` (optional)
+          The AWS IAM role configuration
+        :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
+          The Azure managed identity configuration.
+        :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
+          The Azure service principal configuration. Only applicable when purpose is **STORAGE**.
+        :param comment: str (optional)
+          Comment associated with the credential.
+        :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional)
+          GCP long-lived credential. Databricks-created Google Cloud Storage service account.
+        :param force: bool (optional)
+          Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
+          external locations and external tables (when purpose is **STORAGE**).
+        :param isolation_mode: :class:`IsolationMode` (optional)
+          Whether the current securable is accessible from all workspaces or a specific set of workspaces.
+        :param new_name: str (optional)
+          New name of credential.
+        :param owner: str (optional)
+          Username of current owner of credential.
+        :param read_only: bool (optional)
+          Whether the credential is usable only for read operations. Only applicable when purpose is
+          **STORAGE**.
+        :param skip_validation: bool (optional)
+          Supply true to this argument to skip validation of the updated credential.
+        
+        :returns: :class:`CredentialInfo`
+        
 
     .. py:method:: validate_credential( [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], credential_name: Optional[str], external_location_name: Optional[str], purpose: Optional[CredentialPurpose], read_only: Optional[bool], url: Optional[str]]) -> ValidateCredentialResponse
 
         Validate a credential.
-
-Validates a credential.
-
-For service credentials (purpose is **SERVICE**), either the __credential_name__ or the cloud-specific
-credential must be provided.
-
-For storage credentials (purpose is **STORAGE**), at least one of __external_location_name__ and
-__url__ need to be provided. If only one of them is provided, it will be used for validation. And if
-both are provided, the __url__ will be used for validation, and __external_location_name__ will be
-ignored when checking overlapping urls. Either the __credential_name__ or the cloud-specific
-credential must be provided.
-
-The caller must be a metastore admin or the credential owner or have the required permission on the
-metastore and the credential (e.g., **CREATE_EXTERNAL_LOCATION** when purpose is **STORAGE**).
-
-:param aws_iam_role: :class:`AwsIamRole` (optional)
-  The AWS IAM role configuration
-:param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
-  The Azure managed identity configuration.
-:param credential_name: str (optional)
-  Required. The name of an existing credential or long-lived cloud credential to validate.
-:param external_location_name: str (optional)
-  The name of an existing external location to validate. Only applicable for storage credentials
-  (purpose is **STORAGE**.)
-:param purpose: :class:`CredentialPurpose` (optional)
-  The purpose of the credential. This should only be used when the credential is specified.
-:param read_only: bool (optional)
-  Whether the credential is only usable for read operations. Only applicable for storage credentials
-  (purpose is **STORAGE**.)
-:param url: str (optional)
-  The external location url to validate. Only applicable when purpose is **STORAGE**.
-
-:returns: :class:`ValidateCredentialResponse`
+        
+        Validates a credential.
+        
+        For service credentials (purpose is **SERVICE**), either the __credential_name__ or the cloud-specific
+        credential must be provided.
+        
+        For storage credentials (purpose is **STORAGE**), at least one of __external_location_name__ and
+        __url__ need to be provided. If only one of them is provided, it will be used for validation. And if
+        both are provided, the __url__ will be used for validation, and __external_location_name__ will be
+        ignored when checking overlapping urls. Either the __credential_name__ or the cloud-specific
+        credential must be provided.
+        
+        The caller must be a metastore admin or the credential owner or have the required permission on the
+        metastore and the credential (e.g., **CREATE_EXTERNAL_LOCATION** when purpose is **STORAGE**).
+        
+        :param aws_iam_role: :class:`AwsIamRole` (optional)
+          The AWS IAM role configuration
+        :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
+          The Azure managed identity configuration.
+        :param credential_name: str (optional)
+          Required. The name of an existing credential or long-lived cloud credential to validate.
+        :param external_location_name: str (optional)
+          The name of an existing external location to validate. Only applicable for storage credentials
+          (purpose is **STORAGE**.)
+        :param purpose: :class:`CredentialPurpose` (optional)
+          The purpose of the credential. This should only be used when the credential is specified.
+        :param read_only: bool (optional)
+          Whether the credential is only usable for read operations. Only applicable for storage credentials
+          (purpose is **STORAGE**.)
+        :param url: str (optional)
+          The external location url to validate. Only applicable when purpose is **STORAGE**.
+        
+        :returns: :class:`ValidateCredentialResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst
index 13e4a90f0..fc60b18f6 100644
--- a/docs/workspace/catalog/external_locations.rst
+++ b/docs/workspace/catalog/external_locations.rst
@@ -5,15 +5,15 @@
 .. py:class:: ExternalLocationsAPI
 
     An external location is an object that combines a cloud storage path with a storage credential that
-authorizes access to the cloud storage path. Each external location is subject to Unity Catalog
-access-control policies that control which users and groups can access the credential. If a user does not
-have access to an external location in Unity Catalog, the request fails and Unity Catalog does not attempt
-to authenticate to your cloud tenant on the user’s behalf.
-
-Databricks recommends using external locations rather than using storage credentials directly.
-
-To create external locations, you must be a metastore admin or a user with the
-**CREATE_EXTERNAL_LOCATION** privilege.
+    authorizes access to the cloud storage path. Each external location is subject to Unity Catalog
+    access-control policies that control which users and groups can access the credential. If a user does not
+    have access to an external location in Unity Catalog, the request fails and Unity Catalog does not attempt
+    to authenticate to your cloud tenant on the user’s behalf.
+    
+    Databricks recommends using external locations rather than using storage credentials directly.
+    
+    To create external locations, you must be a metastore admin or a user with the
+    **CREATE_EXTERNAL_LOCATION** privilege.
 
     .. py:method:: create(name: str, url: str, credential_name: str [, access_point: Optional[str], comment: Optional[str], encryption_details: Optional[EncryptionDetails], fallback: Optional[bool], read_only: Optional[bool], skip_validation: Optional[bool]]) -> ExternalLocationInfo
 
@@ -46,49 +46,49 @@ To create external locations, you must be a metastore admin or a user with the
             w.external_locations.delete(name=external_location.name)
 
         Create an external location.
-
-Creates a new external location entry in the metastore. The caller must be a metastore admin or have
-the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage
-credential.
-
-:param name: str
-  Name of the external location.
-:param url: str
-  Path URL of the external location.
-:param credential_name: str
-  Name of the storage credential used with this location.
-:param access_point: str (optional)
-  The AWS access point to use when accesing s3 for this external location.
-:param comment: str (optional)
-  User-provided free-form text description.
-:param encryption_details: :class:`EncryptionDetails` (optional)
-  Encryption options that apply to clients connecting to cloud storage.
-:param fallback: bool (optional)
-  Indicates whether fallback mode is enabled for this external location. When fallback mode is
-  enabled, the access to the location falls back to cluster credentials if UC credentials are not
-  sufficient.
-:param read_only: bool (optional)
-  Indicates whether the external location is read-only.
-:param skip_validation: bool (optional)
-  Skips validation of the storage credential associated with the external location.
-
-:returns: :class:`ExternalLocationInfo`
-
+        
+        Creates a new external location entry in the metastore. The caller must be a metastore admin or have
+        the **CREATE_EXTERNAL_LOCATION** privilege on both the metastore and the associated storage
+        credential.
+        
+        :param name: str
+          Name of the external location.
+        :param url: str
+          Path URL of the external location.
+        :param credential_name: str
+          Name of the storage credential used with this location.
+        :param access_point: str (optional)
+          The AWS access point to use when accesing s3 for this external location.
+        :param comment: str (optional)
+          User-provided free-form text description.
+        :param encryption_details: :class:`EncryptionDetails` (optional)
+          Encryption options that apply to clients connecting to cloud storage.
+        :param fallback: bool (optional)
+          Indicates whether fallback mode is enabled for this external location. When fallback mode is
+          enabled, the access to the location falls back to cluster credentials if UC credentials are not
+          sufficient.
+        :param read_only: bool (optional)
+          Indicates whether the external location is read-only.
+        :param skip_validation: bool (optional)
+          Skips validation of the storage credential associated with the external location.
+        
+        :returns: :class:`ExternalLocationInfo`
+        
 
     .. py:method:: delete(name: str [, force: Optional[bool]])
 
         Delete an external location.
-
-Deletes the specified external location from the metastore. The caller must be the owner of the
-external location.
-
-:param name: str
-  Name of the external location.
-:param force: bool (optional)
-  Force deletion even if there are dependent external tables or mounts.
-
-
-
+        
+        Deletes the specified external location from the metastore. The caller must be the owner of the
+        external location.
+        
+        :param name: str
+          Name of the external location.
+        :param force: bool (optional)
+          Force deletion even if there are dependent external tables or mounts.
+        
+        
+        
 
     .. py:method:: get(name: str [, include_browse: Optional[bool]]) -> ExternalLocationInfo
 
@@ -120,18 +120,18 @@ external location.
             w.external_locations.delete(delete=created.name)
 
         Get an external location.
-
-Gets an external location from the metastore. The caller must be either a metastore admin, the owner
-of the external location, or a user that has some privilege on the external location.
-
-:param name: str
-  Name of the external location.
-:param include_browse: bool (optional)
-  Whether to include external locations in the response for which the principal can only access
-  selective metadata for
-
-:returns: :class:`ExternalLocationInfo`
-
+        
+        Gets an external location from the metastore. The caller must be either a metastore admin, the owner
+        of the external location, or a user that has some privilege on the external location.
+        
+        :param name: str
+          Name of the external location.
+        :param include_browse: bool (optional)
+          Whether to include external locations in the response for which the principal can only access
+          selective metadata for
+        
+        :returns: :class:`ExternalLocationInfo`
+        
 
     .. py:method:: list( [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ExternalLocationInfo]
 
@@ -148,24 +148,24 @@ of the external location, or a user that has some privilege on the external loca
             all = w.external_locations.list(catalog.ListExternalLocationsRequest())
 
         List external locations.
-
-Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller
-must be a metastore admin, the owner of the external location, or a user that has some privilege on
-the external location. There is no guarantee of a specific ordering of the elements in the array.
-
-:param include_browse: bool (optional)
-  Whether to include external locations in the response for which the principal can only access
-  selective metadata for
-:param max_results: int (optional)
-  Maximum number of external locations to return. If not set, all the external locations are returned
-  (not recommended). - when set to a value greater than 0, the page length is the minimum of this
-  value and a server configured value; - when set to 0, the page length is set to a server configured
-  value (recommended); - when set to a value less than 0, an invalid parameter error is returned;
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-
-:returns: Iterator over :class:`ExternalLocationInfo`
-
+        
+        Gets an array of external locations (__ExternalLocationInfo__ objects) from the metastore. The caller
+        must be a metastore admin, the owner of the external location, or a user that has some privilege on
+        the external location. There is no guarantee of a specific ordering of the elements in the array.
+        
+        :param include_browse: bool (optional)
+          Whether to include external locations in the response for which the principal can only access
+          selective metadata for
+        :param max_results: int (optional)
+          Maximum number of external locations to return. If not set, all the external locations are returned
+          (not recommended). - when set to a value greater than 0, the page length is the minimum of this
+          value and a server configured value; - when set to 0, the page length is set to a server configured
+          value (recommended); - when set to a value less than 0, an invalid parameter error is returned;
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`ExternalLocationInfo`
+        
 
     .. py:method:: update(name: str [, access_point: Optional[str], comment: Optional[str], credential_name: Optional[str], encryption_details: Optional[EncryptionDetails], fallback: Optional[bool], force: Optional[bool], isolation_mode: Optional[IsolationMode], new_name: Optional[str], owner: Optional[str], read_only: Optional[bool], skip_validation: Optional[bool], url: Optional[str]]) -> ExternalLocationInfo
 
@@ -199,37 +199,38 @@ the external location. There is no guarantee of a specific ordering of the eleme
             w.external_locations.delete(name=created.name)
 
         Update an external location.
-
-Updates an external location in the metastore. The caller must be the owner of the external location,
-or be a metastore admin. In the second case, the admin can only update the name of the external
-location.
-
-:param name: str
-  Name of the external location.
-:param access_point: str (optional)
-  The AWS access point to use when accesing s3 for this external location.
-:param comment: str (optional)
-  User-provided free-form text description.
-:param credential_name: str (optional)
-  Name of the storage credential used with this location.
-:param encryption_details: :class:`EncryptionDetails` (optional)
-  Encryption options that apply to clients connecting to cloud storage.
-:param fallback: bool (optional)
-  Indicates whether fallback mode is enabled for this external location. When fallback mode is
-  enabled, the access to the location falls back to cluster credentials if UC credentials are not
-  sufficient.
-:param force: bool (optional)
-  Force update even if changing url invalidates dependent external tables or mounts.
-:param isolation_mode: :class:`IsolationMode` (optional)
-:param new_name: str (optional)
-  New name for the external location.
-:param owner: str (optional)
-  The owner of the external location.
-:param read_only: bool (optional)
-  Indicates whether the external location is read-only.
-:param skip_validation: bool (optional)
-  Skips validation of the storage credential associated with the external location.
-:param url: str (optional)
-  Path URL of the external location.
-
-:returns: :class:`ExternalLocationInfo`
+        
+        Updates an external location in the metastore. The caller must be the owner of the external location,
+        or be a metastore admin. In the second case, the admin can only update the name of the external
+        location.
+        
+        :param name: str
+          Name of the external location.
+        :param access_point: str (optional)
+          The AWS access point to use when accesing s3 for this external location.
+        :param comment: str (optional)
+          User-provided free-form text description.
+        :param credential_name: str (optional)
+          Name of the storage credential used with this location.
+        :param encryption_details: :class:`EncryptionDetails` (optional)
+          Encryption options that apply to clients connecting to cloud storage.
+        :param fallback: bool (optional)
+          Indicates whether fallback mode is enabled for this external location. When fallback mode is
+          enabled, the access to the location falls back to cluster credentials if UC credentials are not
+          sufficient.
+        :param force: bool (optional)
+          Force update even if changing url invalidates dependent external tables or mounts.
+        :param isolation_mode: :class:`IsolationMode` (optional)
+        :param new_name: str (optional)
+          New name for the external location.
+        :param owner: str (optional)
+          The owner of the external location.
+        :param read_only: bool (optional)
+          Indicates whether the external location is read-only.
+        :param skip_validation: bool (optional)
+          Skips validation of the storage credential associated with the external location.
+        :param url: str (optional)
+          Path URL of the external location.
+        
+        :returns: :class:`ExternalLocationInfo`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/functions.rst b/docs/workspace/catalog/functions.rst
index 61537556b..646488074 100644
--- a/docs/workspace/catalog/functions.rst
+++ b/docs/workspace/catalog/functions.rst
@@ -5,112 +5,113 @@
 .. py:class:: FunctionsAPI
 
     Functions implement User-Defined Functions (UDFs) in Unity Catalog.
-
-The function implementation can be any SQL expression or Query, and it can be invoked wherever a table
-reference is allowed in a query. In Unity Catalog, a function resides at the same level as a table, so it
-can be referenced with the form __catalog_name__.__schema_name__.__function_name__.
+    
+    The function implementation can be any SQL expression or Query, and it can be invoked wherever a table
+    reference is allowed in a query. In Unity Catalog, a function resides at the same level as a table, so it
+    can be referenced with the form __catalog_name__.__schema_name__.__function_name__.
 
     .. py:method:: create(function_info: CreateFunction) -> FunctionInfo
 
         Create a function.
-
-**WARNING: This API is experimental and will change in future versions**
-
-Creates a new function
-
-The user must have the following permissions in order for the function to be created: -
-**USE_CATALOG** on the function's parent catalog - **USE_SCHEMA** and **CREATE_FUNCTION** on the
-function's parent schema
-
-:param function_info: :class:`CreateFunction`
-  Partial __FunctionInfo__ specifying the function to be created.
-
-:returns: :class:`FunctionInfo`
-
+        
+        **WARNING: This API is experimental and will change in future versions**
+        
+        Creates a new function
+        
+        The user must have the following permissions in order for the function to be created: -
+        **USE_CATALOG** on the function's parent catalog - **USE_SCHEMA** and **CREATE_FUNCTION** on the
+        function's parent schema
+        
+        :param function_info: :class:`CreateFunction`
+          Partial __FunctionInfo__ specifying the function to be created.
+        
+        :returns: :class:`FunctionInfo`
+        
 
     .. py:method:: delete(name: str [, force: Optional[bool]])
 
         Delete a function.
-
-Deletes the function that matches the supplied name. For the deletion to succeed, the user must
-satisfy one of the following conditions: - Is the owner of the function's parent catalog - Is the
-owner of the function's parent schema and have the **USE_CATALOG** privilege on its parent catalog -
-Is the owner of the function itself and have both the **USE_CATALOG** privilege on its parent catalog
-and the **USE_SCHEMA** privilege on its parent schema
-
-:param name: str
-  The fully-qualified name of the function (of the form
-  __catalog_name__.__schema_name__.__function__name__).
-:param force: bool (optional)
-  Force deletion even if the function is notempty.
-
-
-
+        
+        Deletes the function that matches the supplied name. For the deletion to succeed, the user must
+        satisfy one of the following conditions: - Is the owner of the function's parent catalog - Is the
+        owner of the function's parent schema and have the **USE_CATALOG** privilege on its parent catalog -
+        Is the owner of the function itself and have both the **USE_CATALOG** privilege on its parent catalog
+        and the **USE_SCHEMA** privilege on its parent schema
+        
+        :param name: str
+          The fully-qualified name of the function (of the form
+          __catalog_name__.__schema_name__.__function__name__).
+        :param force: bool (optional)
+          Force deletion even if the function is notempty.
+        
+        
+        
 
     .. py:method:: get(name: str [, include_browse: Optional[bool]]) -> FunctionInfo
 
         Get a function.
-
-Gets a function from within a parent catalog and schema. For the fetch to succeed, the user must
-satisfy one of the following requirements: - Is a metastore admin - Is an owner of the function's
-parent catalog - Have the **USE_CATALOG** privilege on the function's parent catalog and be the owner
-of the function - Have the **USE_CATALOG** privilege on the function's parent catalog, the
-**USE_SCHEMA** privilege on the function's parent schema, and the **EXECUTE** privilege on the
-function itself
-
-:param name: str
-  The fully-qualified name of the function (of the form
-  __catalog_name__.__schema_name__.__function__name__).
-:param include_browse: bool (optional)
-  Whether to include functions in the response for which the principal can only access selective
-  metadata for
-
-:returns: :class:`FunctionInfo`
-
+        
+        Gets a function from within a parent catalog and schema. For the fetch to succeed, the user must
+        satisfy one of the following requirements: - Is a metastore admin - Is an owner of the function's
+        parent catalog - Have the **USE_CATALOG** privilege on the function's parent catalog and be the owner
+        of the function - Have the **USE_CATALOG** privilege on the function's parent catalog, the
+        **USE_SCHEMA** privilege on the function's parent schema, and the **EXECUTE** privilege on the
+        function itself
+        
+        :param name: str
+          The fully-qualified name of the function (of the form
+          __catalog_name__.__schema_name__.__function__name__).
+        :param include_browse: bool (optional)
+          Whether to include functions in the response for which the principal can only access selective
+          metadata for
+        
+        :returns: :class:`FunctionInfo`
+        
 
     .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[FunctionInfo]
 
         List functions.
-
-List functions within the specified parent catalog and schema. If the user is a metastore admin, all
-functions are returned in the output list. Otherwise, the user must have the **USE_CATALOG** privilege
-on the catalog and the **USE_SCHEMA** privilege on the schema, and the output list contains only
-functions for which either the user has the **EXECUTE** privilege or the user is the owner. There is
-no guarantee of a specific ordering of the elements in the array.
-
-:param catalog_name: str
-  Name of parent catalog for functions of interest.
-:param schema_name: str
-  Parent schema of functions.
-:param include_browse: bool (optional)
-  Whether to include functions in the response for which the principal can only access selective
-  metadata for
-:param max_results: int (optional)
-  Maximum number of functions to return. If not set, all the functions are returned (not recommended).
-  - when set to a value greater than 0, the page length is the minimum of this value and a server
-  configured value; - when set to 0, the page length is set to a server configured value
-  (recommended); - when set to a value less than 0, an invalid parameter error is returned;
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-
-:returns: Iterator over :class:`FunctionInfo`
-
+        
+        List functions within the specified parent catalog and schema. If the user is a metastore admin, all
+        functions are returned in the output list. Otherwise, the user must have the **USE_CATALOG** privilege
+        on the catalog and the **USE_SCHEMA** privilege on the schema, and the output list contains only
+        functions for which either the user has the **EXECUTE** privilege or the user is the owner. There is
+        no guarantee of a specific ordering of the elements in the array.
+        
+        :param catalog_name: str
+          Name of parent catalog for functions of interest.
+        :param schema_name: str
+          Parent schema of functions.
+        :param include_browse: bool (optional)
+          Whether to include functions in the response for which the principal can only access selective
+          metadata for
+        :param max_results: int (optional)
+          Maximum number of functions to return. If not set, all the functions are returned (not recommended).
+          - when set to a value greater than 0, the page length is the minimum of this value and a server
+          configured value; - when set to 0, the page length is set to a server configured value
+          (recommended); - when set to a value less than 0, an invalid parameter error is returned;
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`FunctionInfo`
+        
 
     .. py:method:: update(name: str [, owner: Optional[str]]) -> FunctionInfo
 
         Update a function.
-
-Updates the function that matches the supplied name. Only the owner of the function can be updated. If
-the user is not a metastore admin, the user must be a member of the group that is the new function
-owner. - Is a metastore admin - Is the owner of the function's parent catalog - Is the owner of the
-function's parent schema and has the **USE_CATALOG** privilege on its parent catalog - Is the owner of
-the function itself and has the **USE_CATALOG** privilege on its parent catalog as well as the
-**USE_SCHEMA** privilege on the function's parent schema.
-
-:param name: str
-  The fully-qualified name of the function (of the form
-  __catalog_name__.__schema_name__.__function__name__).
-:param owner: str (optional)
-  Username of current owner of function.
-
-:returns: :class:`FunctionInfo`
+        
+        Updates the function that matches the supplied name. Only the owner of the function can be updated. If
+        the user is not a metastore admin, the user must be a member of the group that is the new function
+        owner. - Is a metastore admin - Is the owner of the function's parent catalog - Is the owner of the
+        function's parent schema and has the **USE_CATALOG** privilege on its parent catalog - Is the owner of
+        the function itself and has the **USE_CATALOG** privilege on its parent catalog as well as the
+        **USE_SCHEMA** privilege on the function's parent schema.
+        
+        :param name: str
+          The fully-qualified name of the function (of the form
+          __catalog_name__.__schema_name__.__function__name__).
+        :param owner: str (optional)
+          Username of current owner of function.
+        
+        :returns: :class:`FunctionInfo`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/grants.rst b/docs/workspace/catalog/grants.rst
index 20c63fc27..8def7ff83 100644
--- a/docs/workspace/catalog/grants.rst
+++ b/docs/workspace/catalog/grants.rst
@@ -5,14 +5,14 @@
 .. py:class:: GrantsAPI
 
     In Unity Catalog, data is secure by default. Initially, users have no access to data in a metastore.
-Access can be granted by either a metastore admin, the owner of an object, or the owner of the catalog or
-schema that contains the object. Securable objects in Unity Catalog are hierarchical and privileges are
-inherited downward.
-
-Securable objects in Unity Catalog are hierarchical and privileges are inherited downward. This means that
-granting a privilege on the catalog automatically grants the privilege to all current and future objects
-within the catalog. Similarly, privileges granted on a schema are inherited by all current and future
-objects within that schema.
+    Access can be granted by either a metastore admin, the owner of an object, or the owner of the catalog or
+    schema that contains the object. Securable objects in Unity Catalog are hierarchical and privileges are
+    inherited downward.
+    
+    Securable objects in Unity Catalog are hierarchical and privileges are inherited downward. This means that
+    granting a privilege on the catalog automatically grants the privilege to all current and future objects
+    within the catalog. Similarly, privileges granted on a schema are inherited by all current and future
+    objects within that schema.
 
     .. py:method:: get(securable_type: SecurableType, full_name: str [, principal: Optional[str]]) -> PermissionsList
 
@@ -52,18 +52,18 @@ objects within that schema.
             w.tables.delete(full_name=table_full_name)
 
         Get permissions.
-
-Gets the permissions for a securable.
-
-:param securable_type: :class:`SecurableType`
-  Type of securable.
-:param full_name: str
-  Full name of securable.
-:param principal: str (optional)
-  If provided, only the permissions for the specified principal (user or group) are returned.
-
-:returns: :class:`PermissionsList`
-
+        
+        Gets the permissions for a securable.
+        
+        :param securable_type: :class:`SecurableType`
+          Type of securable.
+        :param full_name: str
+          Full name of securable.
+        :param principal: str (optional)
+          If provided, only the permissions for the specified principal (user or group) are returned.
+        
+        :returns: :class:`PermissionsList`
+        
 
     .. py:method:: get_effective(securable_type: SecurableType, full_name: str [, principal: Optional[str]]) -> EffectivePermissionsList
 
@@ -103,19 +103,19 @@ Gets the permissions for a securable.
             w.tables.delete(full_name=table_full_name)
 
         Get effective permissions.
-
-Gets the effective permissions for a securable.
-
-:param securable_type: :class:`SecurableType`
-  Type of securable.
-:param full_name: str
-  Full name of securable.
-:param principal: str (optional)
-  If provided, only the effective permissions for the specified principal (user or group) are
-  returned.
-
-:returns: :class:`EffectivePermissionsList`
-
+        
+        Gets the effective permissions for a securable.
+        
+        :param securable_type: :class:`SecurableType`
+          Type of securable.
+        :param full_name: str
+          Full name of securable.
+        :param principal: str (optional)
+          If provided, only the effective permissions for the specified principal (user or group) are
+          returned.
+        
+        :returns: :class:`EffectivePermissionsList`
+        
 
     .. py:method:: update(securable_type: SecurableType, full_name: str [, changes: Optional[List[PermissionsChange]]]) -> PermissionsList
 
@@ -162,14 +162,15 @@ Gets the effective permissions for a securable.
             w.tables.delete(full_name=table_full_name)
 
         Update permissions.
-
-Updates the permissions for a securable.
-
-:param securable_type: :class:`SecurableType`
-  Type of securable.
-:param full_name: str
-  Full name of securable.
-:param changes: List[:class:`PermissionsChange`] (optional)
-  Array of permissions change objects.
-
-:returns: :class:`PermissionsList`
+        
+        Updates the permissions for a securable.
+        
+        :param securable_type: :class:`SecurableType`
+          Type of securable.
+        :param full_name: str
+          Full name of securable.
+        :param changes: List[:class:`PermissionsChange`] (optional)
+          Array of permissions change objects.
+        
+        :returns: :class:`PermissionsList`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/metastores.rst b/docs/workspace/catalog/metastores.rst
index f1ab5ff61..01a936e0b 100644
--- a/docs/workspace/catalog/metastores.rst
+++ b/docs/workspace/catalog/metastores.rst
@@ -5,16 +5,16 @@
 .. py:class:: MetastoresAPI
 
     A metastore is the top-level container of objects in Unity Catalog. It stores data assets (tables and
-views) and the permissions that govern access to them. Databricks account admins can create metastores and
-assign them to Databricks workspaces to control which workloads use each metastore. For a workspace to use
-Unity Catalog, it must have a Unity Catalog metastore attached.
-
-Each metastore is configured with a root storage location in a cloud storage account. This storage
-location is used for metadata and managed tables data.
-
-NOTE: This metastore is distinct from the metastore included in Databricks workspaces created before Unity
-Catalog was released. If your workspace includes a legacy Hive metastore, the data in that metastore is
-available in a catalog named hive_metastore.
+    views) and the permissions that govern access to them. Databricks account admins can create metastores and
+    assign them to Databricks workspaces to control which workloads use each metastore. For a workspace to use
+    Unity Catalog, it must have a Unity Catalog metastore attached.
+    
+    Each metastore is configured with a root storage location in a cloud storage account. This storage
+    location is used for metadata and managed tables data.
+    
+    NOTE: This metastore is distinct from the metastore included in Databricks workspaces created before Unity
+    Catalog was released. If your workspace includes a legacy Hive metastore, the data in that metastore is
+    available in a catalog named hive_metastore.
 
     .. py:method:: assign(workspace_id: int, metastore_id: str, default_catalog_name: str)
 
@@ -42,21 +42,21 @@ available in a catalog named hive_metastore.
             w.metastores.delete(id=created.metastore_id, force=True)
 
         Create an assignment.
-
-Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists, it will be
-overwritten by the new __metastore_id__ and __default_catalog_name__. The caller must be an account
-admin.
-
-:param workspace_id: int
-  A workspace ID.
-:param metastore_id: str
-  The unique ID of the metastore.
-:param default_catalog_name: str
-  The name of the default catalog in the metastore. This field is depracted. Please use "Default
-  Namespace API" to configure the default catalog for a Databricks workspace.
-
-
-
+        
+        Creates a new metastore assignment. If an assignment for the same __workspace_id__ exists, it will be
+        overwritten by the new __metastore_id__ and __default_catalog_name__. The caller must be an account
+        admin.
+        
+        :param workspace_id: int
+          A workspace ID.
+        :param metastore_id: str
+          The unique ID of the metastore.
+        :param default_catalog_name: str
+          The name of the default catalog in the metastore. This field is depracted. Please use "Default
+          Namespace API" to configure the default catalog for a Databricks workspace.
+        
+        
+        
 
     .. py:method:: create(name: str [, region: Optional[str], storage_root: Optional[str]]) -> MetastoreInfo
 
@@ -80,23 +80,23 @@ admin.
             w.metastores.delete(id=created.metastore_id, force=True)
 
         Create a metastore.
-
-Creates a new metastore based on a provided name and optional storage root path. By default (if the
-__owner__ field is not set), the owner of the new metastore is the user calling the
-__createMetastore__ API. If the __owner__ field is set to the empty string (**""**), the ownership is
-assigned to the System User instead.
-
-:param name: str
-  The user-specified name of the metastore.
-:param region: str (optional)
-  Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). The field can be omitted in
-  the __workspace-level__ __API__ but not in the __account-level__ __API__. If this field is omitted,
-  the region of the workspace receiving the request will be used.
-:param storage_root: str (optional)
-  The storage root URL for metastore
-
-:returns: :class:`MetastoreInfo`
-
+        
+        Creates a new metastore based on a provided name and optional storage root path. By default (if the
+        __owner__ field is not set), the owner of the new metastore is the user calling the
+        __createMetastore__ API. If the __owner__ field is set to the empty string (**""**), the ownership is
+        assigned to the System User instead.
+        
+        :param name: str
+          The user-specified name of the metastore.
+        :param region: str (optional)
+          Cloud region which the metastore serves (e.g., `us-west-2`, `westus`). The field can be omitted in
+          the __workspace-level__ __API__ but not in the __account-level__ __API__. If this field is omitted,
+          the region of the workspace receiving the request will be used.
+        :param storage_root: str (optional)
+          The storage root URL for metastore
+        
+        :returns: :class:`MetastoreInfo`
+        
 
     .. py:method:: current() -> MetastoreAssignment
 
@@ -112,25 +112,25 @@ assigned to the System User instead.
             current_metastore = w.metastores.current()
 
         Get metastore assignment for workspace.
-
-Gets the metastore assignment for the workspace being accessed.
-
-:returns: :class:`MetastoreAssignment`
-
+        
+        Gets the metastore assignment for the workspace being accessed.
+        
+        :returns: :class:`MetastoreAssignment`
+        
 
     .. py:method:: delete(id: str [, force: Optional[bool]])
 
         Delete a metastore.
-
-Deletes a metastore. The caller must be a metastore admin.
-
-:param id: str
-  Unique ID of the metastore.
-:param force: bool (optional)
-  Force deletion even if the metastore is not empty. Default is false.
-
-
-
+        
+        Deletes a metastore. The caller must be a metastore admin.
+        
+        :param id: str
+          Unique ID of the metastore.
+        :param force: bool (optional)
+          Force deletion even if the metastore is not empty. Default is false.
+        
+        
+        
 
     .. py:method:: get(id: str) -> MetastoreInfo
 
@@ -156,15 +156,15 @@ Deletes a metastore. The caller must be a metastore admin.
             w.metastores.delete(id=created.metastore_id, force=True)
 
         Get a metastore.
-
-Gets a metastore that matches the supplied ID. The caller must be a metastore admin to retrieve this
-info.
-
-:param id: str
-  Unique ID of the metastore.
-
-:returns: :class:`MetastoreInfo`
-
+        
+        Gets a metastore that matches the supplied ID. The caller must be a metastore admin to retrieve this
+        info.
+        
+        :param id: str
+          Unique ID of the metastore.
+        
+        :returns: :class:`MetastoreInfo`
+        
 
     .. py:method:: list() -> Iterator[MetastoreInfo]
 
@@ -180,12 +180,12 @@ info.
             all = w.metastores.list()
 
         List metastores.
-
-Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an admin
-to retrieve this info. There is no guarantee of a specific ordering of the elements in the array.
-
-:returns: Iterator over :class:`MetastoreInfo`
-
+        
+        Gets an array of the available metastores (as __MetastoreInfo__ objects). The caller must be an admin
+        to retrieve this info. There is no guarantee of a specific ordering of the elements in the array.
+        
+        :returns: Iterator over :class:`MetastoreInfo`
+        
 
     .. py:method:: summary() -> GetMetastoreSummaryResponse
 
@@ -201,12 +201,12 @@ to retrieve this info. There is no guarantee of a specific ordering of the eleme
             summary = w.metastores.summary()
 
         Get a metastore summary.
-
-Gets information about a metastore. This summary includes the storage credential, the cloud vendor,
-the cloud region, and the global metastore ID.
-
-:returns: :class:`GetMetastoreSummaryResponse`
-
+        
+        Gets information about a metastore. This summary includes the storage credential, the cloud vendor,
+        the cloud region, and the global metastore ID.
+        
+        :returns: :class:`GetMetastoreSummaryResponse`
+        
 
     .. py:method:: unassign(workspace_id: int, metastore_id: str)
 
@@ -234,16 +234,16 @@ the cloud region, and the global metastore ID.
             w.metastores.delete(id=created.metastore_id, force=True)
 
         Delete an assignment.
-
-Deletes a metastore assignment. The caller must be an account administrator.
-
-:param workspace_id: int
-  A workspace ID.
-:param metastore_id: str
-  Query for the ID of the metastore to delete.
-
-
-
+        
+        Deletes a metastore assignment. The caller must be an account administrator.
+        
+        :param workspace_id: int
+          A workspace ID.
+        :param metastore_id: str
+          Query for the ID of the metastore to delete.
+        
+        
+        
 
     .. py:method:: update(id: str [, delta_sharing_organization_name: Optional[str], delta_sharing_recipient_token_lifetime_in_seconds: Optional[int], delta_sharing_scope: Optional[UpdateMetastoreDeltaSharingScope], new_name: Optional[str], owner: Optional[str], privilege_model_version: Optional[str], storage_root_credential_id: Optional[str]]) -> MetastoreInfo
 
@@ -269,46 +269,47 @@ Deletes a metastore assignment. The caller must be an account administrator.
             w.metastores.delete(id=created.metastore_id, force=True)
 
         Update a metastore.
-
-Updates information for a specific metastore. The caller must be a metastore admin. If the __owner__
-field is set to the empty string (**""**), the ownership is updated to the System User.
-
-:param id: str
-  Unique ID of the metastore.
-:param delta_sharing_organization_name: str (optional)
-  The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta
-  Sharing as the official name.
-:param delta_sharing_recipient_token_lifetime_in_seconds: int (optional)
-  The lifetime of delta sharing recipient token in seconds.
-:param delta_sharing_scope: :class:`UpdateMetastoreDeltaSharingScope` (optional)
-  The scope of Delta Sharing enabled for the metastore.
-:param new_name: str (optional)
-  New name for the metastore.
-:param owner: str (optional)
-  The owner of the metastore.
-:param privilege_model_version: str (optional)
-  Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`).
-:param storage_root_credential_id: str (optional)
-  UUID of storage credential to access the metastore storage_root.
-
-:returns: :class:`MetastoreInfo`
-
+        
+        Updates information for a specific metastore. The caller must be a metastore admin. If the __owner__
+        field is set to the empty string (**""**), the ownership is updated to the System User.
+        
+        :param id: str
+          Unique ID of the metastore.
+        :param delta_sharing_organization_name: str (optional)
+          The organization name of a Delta Sharing entity, to be used in Databricks-to-Databricks Delta
+          Sharing as the official name.
+        :param delta_sharing_recipient_token_lifetime_in_seconds: int (optional)
+          The lifetime of delta sharing recipient token in seconds.
+        :param delta_sharing_scope: :class:`UpdateMetastoreDeltaSharingScope` (optional)
+          The scope of Delta Sharing enabled for the metastore.
+        :param new_name: str (optional)
+          New name for the metastore.
+        :param owner: str (optional)
+          The owner of the metastore.
+        :param privilege_model_version: str (optional)
+          Privilege model version of the metastore, of the form `major.minor` (e.g., `1.0`).
+        :param storage_root_credential_id: str (optional)
+          UUID of storage credential to access the metastore storage_root.
+        
+        :returns: :class:`MetastoreInfo`
+        
 
     .. py:method:: update_assignment(workspace_id: int [, default_catalog_name: Optional[str], metastore_id: Optional[str]])
 
         Update an assignment.
-
-Updates a metastore assignment. This operation can be used to update __metastore_id__ or
-__default_catalog_name__ for a specified Workspace, if the Workspace is already assigned a metastore.
-The caller must be an account admin to update __metastore_id__; otherwise, the caller can be a
-Workspace admin.
-
-:param workspace_id: int
-  A workspace ID.
-:param default_catalog_name: str (optional)
-  The name of the default catalog in the metastore. This field is depracted. Please use "Default
-  Namespace API" to configure the default catalog for a Databricks workspace.
-:param metastore_id: str (optional)
-  The unique ID of the metastore.
-
-
+        
+        Updates a metastore assignment. This operation can be used to update __metastore_id__ or
+        __default_catalog_name__ for a specified Workspace, if the Workspace is already assigned a metastore.
+        The caller must be an account admin to update __metastore_id__; otherwise, the caller can be a
+        Workspace admin.
+        
+        :param workspace_id: int
+          A workspace ID.
+        :param default_catalog_name: str (optional)
+          The name of the default catalog in the metastore. This field is depracted. Please use "Default
+          Namespace API" to configure the default catalog for a Databricks workspace.
+        :param metastore_id: str (optional)
+          The unique ID of the metastore.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/model_versions.rst b/docs/workspace/catalog/model_versions.rst
index 018379273..bae6f25f8 100644
--- a/docs/workspace/catalog/model_versions.rst
+++ b/docs/workspace/catalog/model_versions.rst
@@ -5,124 +5,125 @@
 .. py:class:: ModelVersionsAPI
 
     Databricks provides a hosted version of MLflow Model Registry in Unity Catalog. Models in Unity Catalog
-provide centralized access control, auditing, lineage, and discovery of ML models across Databricks
-workspaces.
-
-This API reference documents the REST endpoints for managing model versions in Unity Catalog. For more
-details, see the [registered models API docs](/api/workspace/registeredmodels).
+    provide centralized access control, auditing, lineage, and discovery of ML models across Databricks
+    workspaces.
+    
+    This API reference documents the REST endpoints for managing model versions in Unity Catalog. For more
+    details, see the [registered models API docs](/api/workspace/registeredmodels).
 
     .. py:method:: delete(full_name: str, version: int)
 
         Delete a Model Version.
-
-Deletes a model version from the specified registered model. Any aliases assigned to the model version
-will also be deleted.
-
-The caller must be a metastore admin or an owner of the parent registered model. For the latter case,
-the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
-**USE_SCHEMA** privilege on the parent schema.
-
-:param full_name: str
-  The three-level (fully qualified) name of the model version
-:param version: int
-  The integer version number of the model version
-
-
-
+        
+        Deletes a model version from the specified registered model. Any aliases assigned to the model version
+        will also be deleted.
+        
+        The caller must be a metastore admin or an owner of the parent registered model. For the latter case,
+        the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
+        **USE_SCHEMA** privilege on the parent schema.
+        
+        :param full_name: str
+          The three-level (fully qualified) name of the model version
+        :param version: int
+          The integer version number of the model version
+        
+        
+        
 
     .. py:method:: get(full_name: str, version: int [, include_aliases: Optional[bool], include_browse: Optional[bool]]) -> ModelVersionInfo
 
         Get a Model Version.
-
-Get a model version.
-
-The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the parent
-registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG**
-privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-
-:param full_name: str
-  The three-level (fully qualified) name of the model version
-:param version: int
-  The integer version number of the model version
-:param include_aliases: bool (optional)
-  Whether to include aliases associated with the model version in the response
-:param include_browse: bool (optional)
-  Whether to include model versions in the response for which the principal can only access selective
-  metadata for
-
-:returns: :class:`ModelVersionInfo`
-
+        
+        Get a model version.
+        
+        The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the parent
+        registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG**
+        privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
+        
+        :param full_name: str
+          The three-level (fully qualified) name of the model version
+        :param version: int
+          The integer version number of the model version
+        :param include_aliases: bool (optional)
+          Whether to include aliases associated with the model version in the response
+        :param include_browse: bool (optional)
+          Whether to include model versions in the response for which the principal can only access selective
+          metadata for
+        
+        :returns: :class:`ModelVersionInfo`
+        
 
     .. py:method:: get_by_alias(full_name: str, alias: str [, include_aliases: Optional[bool]]) -> ModelVersionInfo
 
         Get Model Version By Alias.
-
-Get a model version by alias.
-
-The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the
-registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG**
-privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-
-:param full_name: str
-  The three-level (fully qualified) name of the registered model
-:param alias: str
-  The name of the alias
-:param include_aliases: bool (optional)
-  Whether to include aliases associated with the model version in the response
-
-:returns: :class:`ModelVersionInfo`
-
+        
+        Get a model version by alias.
+        
+        The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the
+        registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG**
+        privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
+        
+        :param full_name: str
+          The three-level (fully qualified) name of the registered model
+        :param alias: str
+          The name of the alias
+        :param include_aliases: bool (optional)
+          Whether to include aliases associated with the model version in the response
+        
+        :returns: :class:`ModelVersionInfo`
+        
 
     .. py:method:: list(full_name: str [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ModelVersionInfo]
 
         List Model Versions.
-
-List model versions. You can list model versions under a particular schema, or list all model versions
-in the current metastore.
-
-The returned models are filtered based on the privileges of the calling user. For example, the
-metastore admin is able to list all the model versions. A regular user needs to be the owner or have
-the **EXECUTE** privilege on the parent registered model to recieve the model versions in the
-response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege
-on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-
-There is no guarantee of a specific ordering of the elements in the response. The elements in the
-response will not contain any aliases or tags.
-
-:param full_name: str
-  The full three-level name of the registered model under which to list model versions
-:param include_browse: bool (optional)
-  Whether to include model versions in the response for which the principal can only access selective
-  metadata for
-:param max_results: int (optional)
-  Maximum number of model versions to return. If not set, the page length is set to a server
-  configured value (100, as of 1/3/2024). - when set to a value greater than 0, the page length is the
-  minimum of this value and a server configured value(1000, as of 1/3/2024); - when set to 0, the page
-  length is set to a server configured value (100, as of 1/3/2024) (recommended); - when set to a
-  value less than 0, an invalid parameter error is returned;
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-
-:returns: Iterator over :class:`ModelVersionInfo`
-
+        
+        List model versions. You can list model versions under a particular schema, or list all model versions
+        in the current metastore.
+        
+        The returned models are filtered based on the privileges of the calling user. For example, the
+        metastore admin is able to list all the model versions. A regular user needs to be the owner or have
+        the **EXECUTE** privilege on the parent registered model to recieve the model versions in the
+        response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege
+        on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
+        
+        There is no guarantee of a specific ordering of the elements in the response. The elements in the
+        response will not contain any aliases or tags.
+        
+        :param full_name: str
+          The full three-level name of the registered model under which to list model versions
+        :param include_browse: bool (optional)
+          Whether to include model versions in the response for which the principal can only access selective
+          metadata for
+        :param max_results: int (optional)
+          Maximum number of model versions to return. If not set, the page length is set to a server
+          configured value (100, as of 1/3/2024). - when set to a value greater than 0, the page length is the
+          minimum of this value and a server configured value(1000, as of 1/3/2024); - when set to 0, the page
+          length is set to a server configured value (100, as of 1/3/2024) (recommended); - when set to a
+          value less than 0, an invalid parameter error is returned;
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`ModelVersionInfo`
+        
 
     .. py:method:: update(full_name: str, version: int [, comment: Optional[str]]) -> ModelVersionInfo
 
         Update a Model Version.
-
-Updates the specified model version.
-
-The caller must be a metastore admin or an owner of the parent registered model. For the latter case,
-the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
-**USE_SCHEMA** privilege on the parent schema.
-
-Currently only the comment of the model version can be updated.
-
-:param full_name: str
-  The three-level (fully qualified) name of the model version
-:param version: int
-  The integer version number of the model version
-:param comment: str (optional)
-  The comment attached to the model version
-
-:returns: :class:`ModelVersionInfo`
+        
+        Updates the specified model version.
+        
+        The caller must be a metastore admin or an owner of the parent registered model. For the latter case,
+        the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
+        **USE_SCHEMA** privilege on the parent schema.
+        
+        Currently only the comment of the model version can be updated.
+        
+        :param full_name: str
+          The three-level (fully qualified) name of the model version
+        :param version: int
+          The integer version number of the model version
+        :param comment: str (optional)
+          The comment attached to the model version
+        
+        :returns: :class:`ModelVersionInfo`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/online_tables.rst b/docs/workspace/catalog/online_tables.rst
index fe4cee905..d0119657f 100644
--- a/docs/workspace/catalog/online_tables.rst
+++ b/docs/workspace/catalog/online_tables.rst
@@ -9,16 +9,16 @@
     .. py:method:: create( [, table: Optional[OnlineTable]]) -> Wait[OnlineTable]
 
         Create an Online Table.
-
-Create a new Online Table.
-
-:param table: :class:`OnlineTable` (optional)
-  Online Table information.
-
-:returns:
-  Long-running operation waiter for :class:`OnlineTable`.
-  See :method:wait_get_online_table_active for more details.
-
+        
+        Create a new Online Table.
+        
+        :param table: :class:`OnlineTable` (optional)
+          Online Table information.
+        
+        :returns:
+          Long-running operation waiter for :class:`OnlineTable`.
+          See :method:wait_get_online_table_active for more details.
+        
 
     .. py:method:: create_and_wait( [, table: Optional[OnlineTable], timeout: datetime.timedelta = 0:20:00]) -> OnlineTable
 
@@ -26,27 +26,27 @@ Create a new Online Table.
     .. py:method:: delete(name: str)
 
         Delete an Online Table.
-
-Delete an online table. Warning: This will delete all the data in the online table. If the source
-Delta table was deleted or modified since this Online Table was created, this will lose the data
-forever!
-
-:param name: str
-  Full three-part (catalog, schema, table) name of the table.
-
-
-
+        
+        Delete an online table. Warning: This will delete all the data in the online table. If the source
+        Delta table was deleted or modified since this Online Table was created, this will lose the data
+        forever!
+        
+        :param name: str
+          Full three-part (catalog, schema, table) name of the table.
+        
+        
+        
 
     .. py:method:: get(name: str) -> OnlineTable
 
         Get an Online Table.
-
-Get information about an existing online table and its status.
-
-:param name: str
-  Full three-part (catalog, schema, table) name of the table.
-
-:returns: :class:`OnlineTable`
-
+        
+        Get information about an existing online table and its status.
+        
+        :param name: str
+          Full three-part (catalog, schema, table) name of the table.
+        
+        :returns: :class:`OnlineTable`
+        
 
     .. py:method:: wait_get_online_table_active(name: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[OnlineTable], None]]) -> OnlineTable
diff --git a/docs/workspace/catalog/quality_monitors.rst b/docs/workspace/catalog/quality_monitors.rst
index 8e71050f2..93f05b69a 100644
--- a/docs/workspace/catalog/quality_monitors.rst
+++ b/docs/workspace/catalog/quality_monitors.rst
@@ -5,254 +5,255 @@
 .. py:class:: QualityMonitorsAPI
 
     A monitor computes and monitors data or model quality metrics for a table over time. It generates metrics
-tables and a dashboard that you can use to monitor table health and set alerts.
-
-Most write operations require the user to be the owner of the table (or its parent schema or parent
-catalog). Viewing the dashboard, computed metrics, or monitor configuration only requires the user to have
-**SELECT** privileges on the table (along with **USE_SCHEMA** and **USE_CATALOG**).
+    tables and a dashboard that you can use to monitor table health and set alerts.
+    
+    Most write operations require the user to be the owner of the table (or its parent schema or parent
+    catalog). Viewing the dashboard, computed metrics, or monitor configuration only requires the user to have
+    **SELECT** privileges on the table (along with **USE_SCHEMA** and **USE_CATALOG**).
 
     .. py:method:: cancel_refresh(table_name: str, refresh_id: str)
 
         Cancel refresh.
-
-Cancel an active monitor refresh for the given refresh ID.
-
-The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
-table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
-- **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
-owner of the table
-
-Additionally, the call must be made from the workspace where the monitor was created.
-
-:param table_name: str
-  Full name of the table.
-:param refresh_id: str
-  ID of the refresh.
-
-
-
+        
+        Cancel an active monitor refresh for the given refresh ID.
+        
+        The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
+        table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
+        - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
+        owner of the table
+        
+        Additionally, the call must be made from the workspace where the monitor was created.
+        
+        :param table_name: str
+          Full name of the table.
+        :param refresh_id: str
+          ID of the refresh.
+        
+        
+        
 
     .. py:method:: create(table_name: str, assets_dir: str, output_schema_name: str [, baseline_table_name: Optional[str], custom_metrics: Optional[List[MonitorMetric]], data_classification_config: Optional[MonitorDataClassificationConfig], inference_log: Optional[MonitorInferenceLog], notifications: Optional[MonitorNotifications], schedule: Optional[MonitorCronSchedule], skip_builtin_dashboard: Optional[bool], slicing_exprs: Optional[List[str]], snapshot: Optional[MonitorSnapshot], time_series: Optional[MonitorTimeSeries], warehouse_id: Optional[str]]) -> MonitorInfo
 
         Create a table monitor.
-
-Creates a new monitor for the specified table.
-
-The caller must either: 1. be an owner of the table's parent catalog, have **USE_SCHEMA** on the
-table's parent schema, and have **SELECT** access on the table 2. have **USE_CATALOG** on the table's
-parent catalog, be an owner of the table's parent schema, and have **SELECT** access on the table. 3.
-have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on
-the table's parent schema - be an owner of the table.
-
-Workspace assets, such as the dashboard, will be created in the workspace where this call was made.
-
-:param table_name: str
-  Full name of the table.
-:param assets_dir: str
-  The directory to store monitoring assets (e.g. dashboard, metric tables).
-:param output_schema_name: str
-  Schema where output metric tables are created.
-:param baseline_table_name: str (optional)
-  Name of the baseline table from which drift metrics are computed from. Columns in the monitored
-  table should also be present in the baseline table.
-:param custom_metrics: List[:class:`MonitorMetric`] (optional)
-  Custom metrics to compute on the monitored table. These can be aggregate metrics, derived metrics
-  (from already computed aggregate metrics), or drift metrics (comparing metrics across time windows).
-:param data_classification_config: :class:`MonitorDataClassificationConfig` (optional)
-  The data classification config for the monitor.
-:param inference_log: :class:`MonitorInferenceLog` (optional)
-  Configuration for monitoring inference logs.
-:param notifications: :class:`MonitorNotifications` (optional)
-  The notification settings for the monitor.
-:param schedule: :class:`MonitorCronSchedule` (optional)
-  The schedule for automatically updating and refreshing metric tables.
-:param skip_builtin_dashboard: bool (optional)
-  Whether to skip creating a default dashboard summarizing data quality metrics.
-:param slicing_exprs: List[str] (optional)
-  List of column expressions to slice data with for targeted analysis. The data is grouped by each
-  expression independently, resulting in a separate slice for each predicate and its complements. For
-  high-cardinality columns, only the top 100 unique values by frequency will generate slices.
-:param snapshot: :class:`MonitorSnapshot` (optional)
-  Configuration for monitoring snapshot tables.
-:param time_series: :class:`MonitorTimeSeries` (optional)
-  Configuration for monitoring time series tables.
-:param warehouse_id: str (optional)
-  Optional argument to specify the warehouse for dashboard creation. If not specified, the first
-  running warehouse will be used.
-
-:returns: :class:`MonitorInfo`
-
+        
+        Creates a new monitor for the specified table.
+        
+        The caller must either: 1. be an owner of the table's parent catalog, have **USE_SCHEMA** on the
+        table's parent schema, and have **SELECT** access on the table 2. have **USE_CATALOG** on the table's
+        parent catalog, be an owner of the table's parent schema, and have **SELECT** access on the table. 3.
+        have the following permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on
+        the table's parent schema - be an owner of the table.
+        
+        Workspace assets, such as the dashboard, will be created in the workspace where this call was made.
+        
+        :param table_name: str
+          Full name of the table.
+        :param assets_dir: str
+          The directory to store monitoring assets (e.g. dashboard, metric tables).
+        :param output_schema_name: str
+          Schema where output metric tables are created.
+        :param baseline_table_name: str (optional)
+          Name of the baseline table from which drift metrics are computed from. Columns in the monitored
+          table should also be present in the baseline table.
+        :param custom_metrics: List[:class:`MonitorMetric`] (optional)
+          Custom metrics to compute on the monitored table. These can be aggregate metrics, derived metrics
+          (from already computed aggregate metrics), or drift metrics (comparing metrics across time windows).
+        :param data_classification_config: :class:`MonitorDataClassificationConfig` (optional)
+          The data classification config for the monitor.
+        :param inference_log: :class:`MonitorInferenceLog` (optional)
+          Configuration for monitoring inference logs.
+        :param notifications: :class:`MonitorNotifications` (optional)
+          The notification settings for the monitor.
+        :param schedule: :class:`MonitorCronSchedule` (optional)
+          The schedule for automatically updating and refreshing metric tables.
+        :param skip_builtin_dashboard: bool (optional)
+          Whether to skip creating a default dashboard summarizing data quality metrics.
+        :param slicing_exprs: List[str] (optional)
+          List of column expressions to slice data with for targeted analysis. The data is grouped by each
+          expression independently, resulting in a separate slice for each predicate and its complements. For
+          high-cardinality columns, only the top 100 unique values by frequency will generate slices.
+        :param snapshot: :class:`MonitorSnapshot` (optional)
+          Configuration for monitoring snapshot tables.
+        :param time_series: :class:`MonitorTimeSeries` (optional)
+          Configuration for monitoring time series tables.
+        :param warehouse_id: str (optional)
+          Optional argument to specify the warehouse for dashboard creation. If not specified, the first
+          running warehouse will be used.
+        
+        :returns: :class:`MonitorInfo`
+        
 
     .. py:method:: delete(table_name: str)
 
         Delete a table monitor.
-
-Deletes a monitor for the specified table.
-
-The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
-table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
-- **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
-owner of the table.
-
-Additionally, the call must be made from the workspace where the monitor was created.
-
-Note that the metric tables and dashboard will not be deleted as part of this call; those assets must
-be manually cleaned up (if desired).
-
-:param table_name: str
-  Full name of the table.
-
-
-
+        
+        Deletes a monitor for the specified table.
+        
+        The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
+        table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
+        - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
+        owner of the table.
+        
+        Additionally, the call must be made from the workspace where the monitor was created.
+        
+        Note that the metric tables and dashboard will not be deleted as part of this call; those assets must
+        be manually cleaned up (if desired).
+        
+        :param table_name: str
+          Full name of the table.
+        
+        
+        
 
     .. py:method:: get(table_name: str) -> MonitorInfo
 
         Get a table monitor.
-
-Gets a monitor for the specified table.
-
-The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
-table's parent catalog and be an owner of the table's parent schema. 3. have the following
-permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent
-schema - **SELECT** privilege on the table.
-
-The returned information includes configuration values, as well as information on assets created by
-the monitor. Some information (e.g., dashboard) may be filtered out if the caller is in a different
-workspace than where the monitor was created.
-
-:param table_name: str
-  Full name of the table.
-
-:returns: :class:`MonitorInfo`
-
+        
+        Gets a monitor for the specified table.
+        
+        The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
+        table's parent catalog and be an owner of the table's parent schema. 3. have the following
+        permissions: - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent
+        schema - **SELECT** privilege on the table.
+        
+        The returned information includes configuration values, as well as information on assets created by
+        the monitor. Some information (e.g., dashboard) may be filtered out if the caller is in a different
+        workspace than where the monitor was created.
+        
+        :param table_name: str
+          Full name of the table.
+        
+        :returns: :class:`MonitorInfo`
+        
 
     .. py:method:: get_refresh(table_name: str, refresh_id: str) -> MonitorRefreshInfo
 
         Get refresh.
-
-Gets info about a specific monitor refresh using the given refresh ID.
-
-The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
-table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
-- **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema -
-**SELECT** privilege on the table.
-
-Additionally, the call must be made from the workspace where the monitor was created.
-
-:param table_name: str
-  Full name of the table.
-:param refresh_id: str
-  ID of the refresh.
-
-:returns: :class:`MonitorRefreshInfo`
-
+        
+        Gets info about a specific monitor refresh using the given refresh ID.
+        
+        The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
+        table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
+        - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema -
+        **SELECT** privilege on the table.
+        
+        Additionally, the call must be made from the workspace where the monitor was created.
+        
+        :param table_name: str
+          Full name of the table.
+        :param refresh_id: str
+          ID of the refresh.
+        
+        :returns: :class:`MonitorRefreshInfo`
+        
 
     .. py:method:: list_refreshes(table_name: str) -> MonitorRefreshListResponse
 
         List refreshes.
-
-Gets an array containing the history of the most recent refreshes (up to 25) for this table.
-
-The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
-table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
-- **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema -
-**SELECT** privilege on the table.
-
-Additionally, the call must be made from the workspace where the monitor was created.
-
-:param table_name: str
-  Full name of the table.
-
-:returns: :class:`MonitorRefreshListResponse`
-
+        
+        Gets an array containing the history of the most recent refreshes (up to 25) for this table.
+        
+        The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
+        table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
+        - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema -
+        **SELECT** privilege on the table.
+        
+        Additionally, the call must be made from the workspace where the monitor was created.
+        
+        :param table_name: str
+          Full name of the table.
+        
+        :returns: :class:`MonitorRefreshListResponse`
+        
 
     .. py:method:: regenerate_dashboard(table_name: str [, warehouse_id: Optional[str]]) -> RegenerateDashboardResponse
 
         Regenerate a monitoring dashboard.
-
-Regenerates the monitoring dashboard for the specified table.
-
-The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
-table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
-- **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
-owner of the table
-
-The call must be made from the workspace where the monitor was created. The dashboard will be
-regenerated in the assets directory that was specified when the monitor was created.
-
-:param table_name: str
-  Full name of the table.
-:param warehouse_id: str (optional)
-  Optional argument to specify the warehouse for dashboard regeneration. If not specified, the first
-  running warehouse will be used.
-
-:returns: :class:`RegenerateDashboardResponse`
-
+        
+        Regenerates the monitoring dashboard for the specified table.
+        
+        The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
+        table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
+        - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
+        owner of the table
+        
+        The call must be made from the workspace where the monitor was created. The dashboard will be
+        regenerated in the assets directory that was specified when the monitor was created.
+        
+        :param table_name: str
+          Full name of the table.
+        :param warehouse_id: str (optional)
+          Optional argument to specify the warehouse for dashboard regeneration. If not specified, the first
+          running warehouse will be used.
+        
+        :returns: :class:`RegenerateDashboardResponse`
+        
 
     .. py:method:: run_refresh(table_name: str) -> MonitorRefreshInfo
 
         Queue a metric refresh for a monitor.
-
-Queues a metric refresh on the monitor for the specified table. The refresh will execute in the
-background.
-
-The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
-table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
-- **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
-owner of the table
-
-Additionally, the call must be made from the workspace where the monitor was created.
-
-:param table_name: str
-  Full name of the table.
-
-:returns: :class:`MonitorRefreshInfo`
-
+        
+        Queues a metric refresh on the monitor for the specified table. The refresh will execute in the
+        background.
+        
+        The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
+        table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
+        - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
+        owner of the table
+        
+        Additionally, the call must be made from the workspace where the monitor was created.
+        
+        :param table_name: str
+          Full name of the table.
+        
+        :returns: :class:`MonitorRefreshInfo`
+        
 
     .. py:method:: update(table_name: str, output_schema_name: str [, baseline_table_name: Optional[str], custom_metrics: Optional[List[MonitorMetric]], dashboard_id: Optional[str], data_classification_config: Optional[MonitorDataClassificationConfig], inference_log: Optional[MonitorInferenceLog], notifications: Optional[MonitorNotifications], schedule: Optional[MonitorCronSchedule], slicing_exprs: Optional[List[str]], snapshot: Optional[MonitorSnapshot], time_series: Optional[MonitorTimeSeries]]) -> MonitorInfo
 
         Update a table monitor.
-
-Updates a monitor for the specified table.
-
-The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
-table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
-- **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
-owner of the table.
-
-Additionally, the call must be made from the workspace where the monitor was created, and the caller
-must be the original creator of the monitor.
-
-Certain configuration fields, such as output asset identifiers, cannot be updated.
-
-:param table_name: str
-  Full name of the table.
-:param output_schema_name: str
-  Schema where output metric tables are created.
-:param baseline_table_name: str (optional)
-  Name of the baseline table from which drift metrics are computed from. Columns in the monitored
-  table should also be present in the baseline table.
-:param custom_metrics: List[:class:`MonitorMetric`] (optional)
-  Custom metrics to compute on the monitored table. These can be aggregate metrics, derived metrics
-  (from already computed aggregate metrics), or drift metrics (comparing metrics across time windows).
-:param dashboard_id: str (optional)
-  Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in PENDING
-  state.
-:param data_classification_config: :class:`MonitorDataClassificationConfig` (optional)
-  The data classification config for the monitor.
-:param inference_log: :class:`MonitorInferenceLog` (optional)
-  Configuration for monitoring inference logs.
-:param notifications: :class:`MonitorNotifications` (optional)
-  The notification settings for the monitor.
-:param schedule: :class:`MonitorCronSchedule` (optional)
-  The schedule for automatically updating and refreshing metric tables.
-:param slicing_exprs: List[str] (optional)
-  List of column expressions to slice data with for targeted analysis. The data is grouped by each
-  expression independently, resulting in a separate slice for each predicate and its complements. For
-  high-cardinality columns, only the top 100 unique values by frequency will generate slices.
-:param snapshot: :class:`MonitorSnapshot` (optional)
-  Configuration for monitoring snapshot tables.
-:param time_series: :class:`MonitorTimeSeries` (optional)
-  Configuration for monitoring time series tables.
-
-:returns: :class:`MonitorInfo`
+        
+        Updates a monitor for the specified table.
+        
+        The caller must either: 1. be an owner of the table's parent catalog 2. have **USE_CATALOG** on the
+        table's parent catalog and be an owner of the table's parent schema 3. have the following permissions:
+        - **USE_CATALOG** on the table's parent catalog - **USE_SCHEMA** on the table's parent schema - be an
+        owner of the table.
+        
+        Additionally, the call must be made from the workspace where the monitor was created, and the caller
+        must be the original creator of the monitor.
+        
+        Certain configuration fields, such as output asset identifiers, cannot be updated.
+        
+        :param table_name: str
+          Full name of the table.
+        :param output_schema_name: str
+          Schema where output metric tables are created.
+        :param baseline_table_name: str (optional)
+          Name of the baseline table from which drift metrics are computed from. Columns in the monitored
+          table should also be present in the baseline table.
+        :param custom_metrics: List[:class:`MonitorMetric`] (optional)
+          Custom metrics to compute on the monitored table. These can be aggregate metrics, derived metrics
+          (from already computed aggregate metrics), or drift metrics (comparing metrics across time windows).
+        :param dashboard_id: str (optional)
+          Id of dashboard that visualizes the computed metrics. This can be empty if the monitor is in PENDING
+          state.
+        :param data_classification_config: :class:`MonitorDataClassificationConfig` (optional)
+          The data classification config for the monitor.
+        :param inference_log: :class:`MonitorInferenceLog` (optional)
+          Configuration for monitoring inference logs.
+        :param notifications: :class:`MonitorNotifications` (optional)
+          The notification settings for the monitor.
+        :param schedule: :class:`MonitorCronSchedule` (optional)
+          The schedule for automatically updating and refreshing metric tables.
+        :param slicing_exprs: List[str] (optional)
+          List of column expressions to slice data with for targeted analysis. The data is grouped by each
+          expression independently, resulting in a separate slice for each predicate and its complements. For
+          high-cardinality columns, only the top 100 unique values by frequency will generate slices.
+        :param snapshot: :class:`MonitorSnapshot` (optional)
+          Configuration for monitoring snapshot tables.
+        :param time_series: :class:`MonitorTimeSeries` (optional)
+          Configuration for monitoring time series tables.
+        
+        :returns: :class:`MonitorInfo`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/registered_models.rst b/docs/workspace/catalog/registered_models.rst
index cba3cbc96..b05a702b5 100644
--- a/docs/workspace/catalog/registered_models.rst
+++ b/docs/workspace/catalog/registered_models.rst
@@ -5,196 +5,197 @@
 .. py:class:: RegisteredModelsAPI
 
     Databricks provides a hosted version of MLflow Model Registry in Unity Catalog. Models in Unity Catalog
-provide centralized access control, auditing, lineage, and discovery of ML models across Databricks
-workspaces.
-
-An MLflow registered model resides in the third layer of Unity Catalog’s three-level namespace.
-Registered models contain model versions, which correspond to actual ML models (MLflow models). Creating
-new model versions currently requires use of the MLflow Python client. Once model versions are created,
-you can load them for batch inference using MLflow Python client APIs, or deploy them for real-time
-serving using Databricks Model Serving.
-
-All operations on registered models and model versions require USE_CATALOG permissions on the enclosing
-catalog and USE_SCHEMA permissions on the enclosing schema. In addition, the following additional
-privileges are required for various operations:
-
-* To create a registered model, users must additionally have the CREATE_MODEL permission on the target
-schema. * To view registered model or model version metadata, model version data files, or invoke a model
-version, users must additionally have the EXECUTE permission on the registered model * To update
-registered model or model version tags, users must additionally have APPLY TAG permissions on the
-registered model * To update other registered model or model version metadata (comments, aliases) create a
-new model version, or update permissions on the registered model, users must be owners of the registered
-model.
-
-Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. tagging, grants) that
-specify a securable type, use "FUNCTION" as the securable type.
+    provide centralized access control, auditing, lineage, and discovery of ML models across Databricks
+    workspaces.
+    
+    An MLflow registered model resides in the third layer of Unity Catalog’s three-level namespace.
+    Registered models contain model versions, which correspond to actual ML models (MLflow models). Creating
+    new model versions currently requires use of the MLflow Python client. Once model versions are created,
+    you can load them for batch inference using MLflow Python client APIs, or deploy them for real-time
+    serving using Databricks Model Serving.
+    
+    All operations on registered models and model versions require USE_CATALOG permissions on the enclosing
+    catalog and USE_SCHEMA permissions on the enclosing schema. In addition, the following additional
+    privileges are required for various operations:
+    
+    * To create a registered model, users must additionally have the CREATE_MODEL permission on the target
+    schema. * To view registered model or model version metadata, model version data files, or invoke a model
+    version, users must additionally have the EXECUTE permission on the registered model * To update
+    registered model or model version tags, users must additionally have APPLY TAG permissions on the
+    registered model * To update other registered model or model version metadata (comments, aliases) create a
+    new model version, or update permissions on the registered model, users must be owners of the registered
+    model.
+    
+    Note: The securable type for models is "FUNCTION". When using REST APIs (e.g. tagging, grants) that
+    specify a securable type, use "FUNCTION" as the securable type.
 
     .. py:method:: create(catalog_name: str, schema_name: str, name: str [, comment: Optional[str], storage_location: Optional[str]]) -> RegisteredModelInfo
 
         Create a Registered Model.
-
-Creates a new registered model in Unity Catalog.
-
-File storage for model versions in the registered model will be located in the default location which
-is specified by the parent schema, or the parent catalog, or the Metastore.
-
-For registered model creation to succeed, the user must satisfy the following conditions: - The caller
-must be a metastore admin, or be the owner of the parent catalog and schema, or have the
-**USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-- The caller must have the **CREATE MODEL** or **CREATE FUNCTION** privilege on the parent schema.
-
-:param catalog_name: str
-  The name of the catalog where the schema and the registered model reside
-:param schema_name: str
-  The name of the schema where the registered model resides
-:param name: str
-  The name of the registered model
-:param comment: str (optional)
-  The comment attached to the registered model
-:param storage_location: str (optional)
-  The storage location on the cloud under which model version data files are stored
-
-:returns: :class:`RegisteredModelInfo`
-
+        
+        Creates a new registered model in Unity Catalog.
+        
+        File storage for model versions in the registered model will be located in the default location which
+        is specified by the parent schema, or the parent catalog, or the Metastore.
+        
+        For registered model creation to succeed, the user must satisfy the following conditions: - The caller
+        must be a metastore admin, or be the owner of the parent catalog and schema, or have the
+        **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
+        - The caller must have the **CREATE MODEL** or **CREATE FUNCTION** privilege on the parent schema.
+        
+        :param catalog_name: str
+          The name of the catalog where the schema and the registered model reside
+        :param schema_name: str
+          The name of the schema where the registered model resides
+        :param name: str
+          The name of the registered model
+        :param comment: str (optional)
+          The comment attached to the registered model
+        :param storage_location: str (optional)
+          The storage location on the cloud under which model version data files are stored
+        
+        :returns: :class:`RegisteredModelInfo`
+        
 
     .. py:method:: delete(full_name: str)
 
         Delete a Registered Model.
-
-Deletes a registered model and all its model versions from the specified parent catalog and schema.
-
-The caller must be a metastore admin or an owner of the registered model. For the latter case, the
-caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
-**USE_SCHEMA** privilege on the parent schema.
-
-:param full_name: str
-  The three-level (fully qualified) name of the registered model
-
-
-
+        
+        Deletes a registered model and all its model versions from the specified parent catalog and schema.
+        
+        The caller must be a metastore admin or an owner of the registered model. For the latter case, the
+        caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
+        **USE_SCHEMA** privilege on the parent schema.
+        
+        :param full_name: str
+          The three-level (fully qualified) name of the registered model
+        
+        
+        
 
     .. py:method:: delete_alias(full_name: str, alias: str)
 
         Delete a Registered Model Alias.
-
-Deletes a registered model alias.
-
-The caller must be a metastore admin or an owner of the registered model. For the latter case, the
-caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
-**USE_SCHEMA** privilege on the parent schema.
-
-:param full_name: str
-  The three-level (fully qualified) name of the registered model
-:param alias: str
-  The name of the alias
-
-
-
+        
+        Deletes a registered model alias.
+        
+        The caller must be a metastore admin or an owner of the registered model. For the latter case, the
+        caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
+        **USE_SCHEMA** privilege on the parent schema.
+        
+        :param full_name: str
+          The three-level (fully qualified) name of the registered model
+        :param alias: str
+          The name of the alias
+        
+        
+        
 
     .. py:method:: get(full_name: str [, include_aliases: Optional[bool], include_browse: Optional[bool]]) -> RegisteredModelInfo
 
         Get a Registered Model.
-
-Get a registered model.
-
-The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the
-registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG**
-privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-
-:param full_name: str
-  The three-level (fully qualified) name of the registered model
-:param include_aliases: bool (optional)
-  Whether to include registered model aliases in the response
-:param include_browse: bool (optional)
-  Whether to include registered models in the response for which the principal can only access
-  selective metadata for
-
-:returns: :class:`RegisteredModelInfo`
-
+        
+        Get a registered model.
+        
+        The caller must be a metastore admin or an owner of (or have the **EXECUTE** privilege on) the
+        registered model. For the latter case, the caller must also be the owner or have the **USE_CATALOG**
+        privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
+        
+        :param full_name: str
+          The three-level (fully qualified) name of the registered model
+        :param include_aliases: bool (optional)
+          Whether to include registered model aliases in the response
+        :param include_browse: bool (optional)
+          Whether to include registered models in the response for which the principal can only access
+          selective metadata for
+        
+        :returns: :class:`RegisteredModelInfo`
+        
 
     .. py:method:: list( [, catalog_name: Optional[str], include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str], schema_name: Optional[str]]) -> Iterator[RegisteredModelInfo]
 
         List Registered Models.
-
-List registered models. You can list registered models under a particular schema, or list all
-registered models in the current metastore.
-
-The returned models are filtered based on the privileges of the calling user. For example, the
-metastore admin is able to list all the registered models. A regular user needs to be the owner or
-have the **EXECUTE** privilege on the registered model to recieve the registered models in the
-response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege
-on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-
-There is no guarantee of a specific ordering of the elements in the response.
-
-:param catalog_name: str (optional)
-  The identifier of the catalog under which to list registered models. If specified, schema_name must
-  be specified.
-:param include_browse: bool (optional)
-  Whether to include registered models in the response for which the principal can only access
-  selective metadata for
-:param max_results: int (optional)
-  Max number of registered models to return.
-  
-  If both catalog and schema are specified: - when max_results is not specified, the page length is
-  set to a server configured value (10000, as of 4/2/2024). - when set to a value greater than 0, the
-  page length is the minimum of this value and a server configured value (10000, as of 4/2/2024); -
-  when set to 0, the page length is set to a server configured value (10000, as of 4/2/2024); - when
-  set to a value less than 0, an invalid parameter error is returned;
-  
-  If neither schema nor catalog is specified: - when max_results is not specified, the page length is
-  set to a server configured value (100, as of 4/2/2024). - when set to a value greater than 0, the
-  page length is the minimum of this value and a server configured value (1000, as of 4/2/2024); -
-  when set to 0, the page length is set to a server configured value (100, as of 4/2/2024); - when set
-  to a value less than 0, an invalid parameter error is returned;
-:param page_token: str (optional)
-  Opaque token to send for the next page of results (pagination).
-:param schema_name: str (optional)
-  The identifier of the schema under which to list registered models. If specified, catalog_name must
-  be specified.
-
-:returns: Iterator over :class:`RegisteredModelInfo`
-
+        
+        List registered models. You can list registered models under a particular schema, or list all
+        registered models in the current metastore.
+        
+        The returned models are filtered based on the privileges of the calling user. For example, the
+        metastore admin is able to list all the registered models. A regular user needs to be the owner or
+        have the **EXECUTE** privilege on the registered model to recieve the registered models in the
+        response. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege
+        on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
+        
+        There is no guarantee of a specific ordering of the elements in the response.
+        
+        :param catalog_name: str (optional)
+          The identifier of the catalog under which to list registered models. If specified, schema_name must
+          be specified.
+        :param include_browse: bool (optional)
+          Whether to include registered models in the response for which the principal can only access
+          selective metadata for
+        :param max_results: int (optional)
+          Max number of registered models to return.
+          
+          If both catalog and schema are specified: - when max_results is not specified, the page length is
+          set to a server configured value (10000, as of 4/2/2024). - when set to a value greater than 0, the
+          page length is the minimum of this value and a server configured value (10000, as of 4/2/2024); -
+          when set to 0, the page length is set to a server configured value (10000, as of 4/2/2024); - when
+          set to a value less than 0, an invalid parameter error is returned;
+          
+          If neither schema nor catalog is specified: - when max_results is not specified, the page length is
+          set to a server configured value (100, as of 4/2/2024). - when set to a value greater than 0, the
+          page length is the minimum of this value and a server configured value (1000, as of 4/2/2024); -
+          when set to 0, the page length is set to a server configured value (100, as of 4/2/2024); - when set
+          to a value less than 0, an invalid parameter error is returned;
+        :param page_token: str (optional)
+          Opaque token to send for the next page of results (pagination).
+        :param schema_name: str (optional)
+          The identifier of the schema under which to list registered models. If specified, catalog_name must
+          be specified.
+        
+        :returns: Iterator over :class:`RegisteredModelInfo`
+        
 
     .. py:method:: set_alias(full_name: str, alias: str, version_num: int) -> RegisteredModelAlias
 
         Set a Registered Model Alias.
-
-Set an alias on the specified registered model.
-
-The caller must be a metastore admin or an owner of the registered model. For the latter case, the
-caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
-**USE_SCHEMA** privilege on the parent schema.
-
-:param full_name: str
-  Full name of the registered model
-:param alias: str
-  The name of the alias
-:param version_num: int
-  The version number of the model version to which the alias points
-
-:returns: :class:`RegisteredModelAlias`
-
+        
+        Set an alias on the specified registered model.
+        
+        The caller must be a metastore admin or an owner of the registered model. For the latter case, the
+        caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
+        **USE_SCHEMA** privilege on the parent schema.
+        
+        :param full_name: str
+          Full name of the registered model
+        :param alias: str
+          The name of the alias
+        :param version_num: int
+          The version number of the model version to which the alias points
+        
+        :returns: :class:`RegisteredModelAlias`
+        
 
     .. py:method:: update(full_name: str [, comment: Optional[str], new_name: Optional[str], owner: Optional[str]]) -> RegisteredModelInfo
 
         Update a Registered Model.
-
-Updates the specified registered model.
-
-The caller must be a metastore admin or an owner of the registered model. For the latter case, the
-caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
-**USE_SCHEMA** privilege on the parent schema.
-
-Currently only the name, the owner or the comment of the registered model can be updated.
-
-:param full_name: str
-  The three-level (fully qualified) name of the registered model
-:param comment: str (optional)
-  The comment attached to the registered model
-:param new_name: str (optional)
-  New name for the registered model.
-:param owner: str (optional)
-  The identifier of the user who owns the registered model
-
-:returns: :class:`RegisteredModelInfo`
+        
+        Updates the specified registered model.
+        
+        The caller must be a metastore admin or an owner of the registered model. For the latter case, the
+        caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
+        **USE_SCHEMA** privilege on the parent schema.
+        
+        Currently only the name, the owner or the comment of the registered model can be updated.
+        
+        :param full_name: str
+          The three-level (fully qualified) name of the registered model
+        :param comment: str (optional)
+          The comment attached to the registered model
+        :param new_name: str (optional)
+          New name for the registered model.
+        :param owner: str (optional)
+          The identifier of the user who owns the registered model
+        
+        :returns: :class:`RegisteredModelInfo`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/resource_quotas.rst b/docs/workspace/catalog/resource_quotas.rst
index 22f50f0f1..3396011f0 100644
--- a/docs/workspace/catalog/resource_quotas.rst
+++ b/docs/workspace/catalog/resource_quotas.rst
@@ -5,40 +5,41 @@
 .. py:class:: ResourceQuotasAPI
 
     Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that
-can be created. Quotas are expressed in terms of a resource type and a parent (for example, tables per
-metastore or schemas per catalog). The resource quota APIs enable you to monitor your current usage and
-limits. For more information on resource quotas see the [Unity Catalog documentation].
-
-[Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas
+    can be created. Quotas are expressed in terms of a resource type and a parent (for example, tables per
+    metastore or schemas per catalog). The resource quota APIs enable you to monitor your current usage and
+    limits. For more information on resource quotas see the [Unity Catalog documentation].
+    
+    [Unity Catalog documentation]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#resource-quotas
 
     .. py:method:: get_quota(parent_securable_type: str, parent_full_name: str, quota_name: str) -> GetQuotaResponse
 
         Get information for a single resource quota.
-
-The GetQuota API returns usage information for a single resource quota, defined as a child-parent
-pair. This API also refreshes the quota count if it is out of date. Refreshes are triggered
-asynchronously. The updated count might not be returned in the first call.
-
-:param parent_securable_type: str
-  Securable type of the quota parent.
-:param parent_full_name: str
-  Full name of the parent resource. Provide the metastore ID if the parent is a metastore.
-:param quota_name: str
-  Name of the quota. Follows the pattern of the quota type, with "-quota" added as a suffix.
-
-:returns: :class:`GetQuotaResponse`
-
+        
+        The GetQuota API returns usage information for a single resource quota, defined as a child-parent
+        pair. This API also refreshes the quota count if it is out of date. Refreshes are triggered
+        asynchronously. The updated count might not be returned in the first call.
+        
+        :param parent_securable_type: str
+          Securable type of the quota parent.
+        :param parent_full_name: str
+          Full name of the parent resource. Provide the metastore ID if the parent is a metastore.
+        :param quota_name: str
+          Name of the quota. Follows the pattern of the quota type, with "-quota" added as a suffix.
+        
+        :returns: :class:`GetQuotaResponse`
+        
 
     .. py:method:: list_quotas( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[QuotaInfo]
 
         List all resource quotas under a metastore.
-
-ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the
-counts returned. This API does not trigger a refresh of quota counts.
-
-:param max_results: int (optional)
-  The number of quotas to return.
-:param page_token: str (optional)
-  Opaque token for the next page of results.
-
-:returns: Iterator over :class:`QuotaInfo`
+        
+        ListQuotas returns all quota values under the metastore. There are no SLAs on the freshness of the
+        counts returned. This API does not trigger a refresh of quota counts.
+        
+        :param max_results: int (optional)
+          The number of quotas to return.
+        :param page_token: str (optional)
+          Opaque token for the next page of results.
+        
+        :returns: Iterator over :class:`QuotaInfo`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/schemas.rst b/docs/workspace/catalog/schemas.rst
index a3c9d2096..feaf7c7a0 100644
--- a/docs/workspace/catalog/schemas.rst
+++ b/docs/workspace/catalog/schemas.rst
@@ -5,9 +5,9 @@
 .. py:class:: SchemasAPI
 
     A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace. A schema
-organizes tables, views and functions. To access (or list) a table or view in a schema, users must have
-the USE_SCHEMA data permission on the schema and its parent catalog, and they must have the SELECT
-permission on the table or view.
+    organizes tables, views and functions. To access (or list) a table or view in a schema, users must have
+    the USE_SCHEMA data permission on the schema and its parent catalog, and they must have the SELECT
+    permission on the table or view.
 
     .. py:method:: create(name: str, catalog_name: str [, comment: Optional[str], properties: Optional[Dict[str, str]], storage_root: Optional[str]]) -> SchemaInfo
 
@@ -31,38 +31,38 @@ permission on the table or view.
             w.schemas.delete(full_name=created_schema.full_name)
 
         Create a schema.
-
-Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or have the
-**CREATE_SCHEMA** privilege in the parent catalog.
-
-:param name: str
-  Name of schema, relative to parent catalog.
-:param catalog_name: str
-  Name of parent catalog.
-:param comment: str (optional)
-  User-provided free-form text description.
-:param properties: Dict[str,str] (optional)
-  A map of key-value properties attached to the securable.
-:param storage_root: str (optional)
-  Storage root URL for managed tables within schema.
-
-:returns: :class:`SchemaInfo`
-
+        
+        Creates a new schema for catalog in the Metatastore. The caller must be a metastore admin, or have the
+        **CREATE_SCHEMA** privilege in the parent catalog.
+        
+        :param name: str
+          Name of schema, relative to parent catalog.
+        :param catalog_name: str
+          Name of parent catalog.
+        :param comment: str (optional)
+          User-provided free-form text description.
+        :param properties: Dict[str,str] (optional)
+          A map of key-value properties attached to the securable.
+        :param storage_root: str (optional)
+          Storage root URL for managed tables within schema.
+        
+        :returns: :class:`SchemaInfo`
+        
 
     .. py:method:: delete(full_name: str [, force: Optional[bool]])
 
         Delete a schema.
-
-Deletes the specified schema from the parent catalog. The caller must be the owner of the schema or an
-owner of the parent catalog.
-
-:param full_name: str
-  Full name of the schema.
-:param force: bool (optional)
-  Force deletion even if the schema is not empty.
-
-
-
+        
+        Deletes the specified schema from the parent catalog. The caller must be the owner of the schema or an
+        owner of the parent catalog.
+        
+        :param full_name: str
+          Full name of the schema.
+        :param force: bool (optional)
+          Force deletion even if the schema is not empty.
+        
+        
+        
 
     .. py:method:: get(full_name: str [, include_browse: Optional[bool]]) -> SchemaInfo
 
@@ -88,18 +88,18 @@ owner of the parent catalog.
             w.schemas.delete(full_name=created.full_name)
 
         Get a schema.
-
-Gets the specified schema within the metastore. The caller must be a metastore admin, the owner of the
-schema, or a user that has the **USE_SCHEMA** privilege on the schema.
-
-:param full_name: str
-  Full name of the schema.
-:param include_browse: bool (optional)
-  Whether to include schemas in the response for which the principal can only access selective
-  metadata for
-
-:returns: :class:`SchemaInfo`
-
+        
+        Gets the specified schema within the metastore. The caller must be a metastore admin, the owner of the
+        schema, or a user that has the **USE_SCHEMA** privilege on the schema.
+        
+        :param full_name: str
+          Full name of the schema.
+        :param include_browse: bool (optional)
+          Whether to include schemas in the response for which the principal can only access selective
+          metadata for
+        
+        :returns: :class:`SchemaInfo`
+        
 
     .. py:method:: list(catalog_name: str [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[SchemaInfo]
 
@@ -122,27 +122,27 @@ schema, or a user that has the **USE_SCHEMA** privilege on the schema.
             w.catalogs.delete(name=new_catalog.name, force=True)
 
         List schemas.
-
-Gets an array of schemas for a catalog in the metastore. If the caller is the metastore admin or the
-owner of the parent catalog, all schemas for the catalog will be retrieved. Otherwise, only schemas
-owned by the caller (or for which the caller has the **USE_SCHEMA** privilege) will be retrieved.
-There is no guarantee of a specific ordering of the elements in the array.
-
-:param catalog_name: str
-  Parent catalog for schemas of interest.
-:param include_browse: bool (optional)
-  Whether to include schemas in the response for which the principal can only access selective
-  metadata for
-:param max_results: int (optional)
-  Maximum number of schemas to return. If not set, all the schemas are returned (not recommended). -
-  when set to a value greater than 0, the page length is the minimum of this value and a server
-  configured value; - when set to 0, the page length is set to a server configured value
-  (recommended); - when set to a value less than 0, an invalid parameter error is returned;
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-
-:returns: Iterator over :class:`SchemaInfo`
-
+        
+        Gets an array of schemas for a catalog in the metastore. If the caller is the metastore admin or the
+        owner of the parent catalog, all schemas for the catalog will be retrieved. Otherwise, only schemas
+        owned by the caller (or for which the caller has the **USE_SCHEMA** privilege) will be retrieved.
+        There is no guarantee of a specific ordering of the elements in the array.
+        
+        :param catalog_name: str
+          Parent catalog for schemas of interest.
+        :param include_browse: bool (optional)
+          Whether to include schemas in the response for which the principal can only access selective
+          metadata for
+        :param max_results: int (optional)
+          Maximum number of schemas to return. If not set, all the schemas are returned (not recommended). -
+          when set to a value greater than 0, the page length is the minimum of this value and a server
+          configured value; - when set to 0, the page length is set to a server configured value
+          (recommended); - when set to a value less than 0, an invalid parameter error is returned;
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`SchemaInfo`
+        
 
     .. py:method:: update(full_name: str [, comment: Optional[str], enable_predictive_optimization: Optional[EnablePredictiveOptimization], new_name: Optional[str], owner: Optional[str], properties: Optional[Dict[str, str]]]) -> SchemaInfo
 
@@ -168,23 +168,24 @@ There is no guarantee of a specific ordering of the elements in the array.
             w.schemas.delete(full_name=created.full_name)
 
         Update a schema.
-
-Updates a schema for a catalog. The caller must be the owner of the schema or a metastore admin. If
-the caller is a metastore admin, only the __owner__ field can be changed in the update. If the
-__name__ field must be updated, the caller must be a metastore admin or have the **CREATE_SCHEMA**
-privilege on the parent catalog.
-
-:param full_name: str
-  Full name of the schema.
-:param comment: str (optional)
-  User-provided free-form text description.
-:param enable_predictive_optimization: :class:`EnablePredictiveOptimization` (optional)
-  Whether predictive optimization should be enabled for this object and objects under it.
-:param new_name: str (optional)
-  New name for the schema.
-:param owner: str (optional)
-  Username of current owner of schema.
-:param properties: Dict[str,str] (optional)
-  A map of key-value properties attached to the securable.
-
-:returns: :class:`SchemaInfo`
+        
+        Updates a schema for a catalog. The caller must be the owner of the schema or a metastore admin. If
+        the caller is a metastore admin, only the __owner__ field can be changed in the update. If the
+        __name__ field must be updated, the caller must be a metastore admin or have the **CREATE_SCHEMA**
+        privilege on the parent catalog.
+        
+        :param full_name: str
+          Full name of the schema.
+        :param comment: str (optional)
+          User-provided free-form text description.
+        :param enable_predictive_optimization: :class:`EnablePredictiveOptimization` (optional)
+          Whether predictive optimization should be enabled for this object and objects under it.
+        :param new_name: str (optional)
+          New name for the schema.
+        :param owner: str (optional)
+          Username of current owner of schema.
+        :param properties: Dict[str,str] (optional)
+          A map of key-value properties attached to the securable.
+        
+        :returns: :class:`SchemaInfo`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/storage_credentials.rst b/docs/workspace/catalog/storage_credentials.rst
index 80cee7900..cac70a944 100644
--- a/docs/workspace/catalog/storage_credentials.rst
+++ b/docs/workspace/catalog/storage_credentials.rst
@@ -5,15 +5,15 @@
 .. py:class:: StorageCredentialsAPI
 
     A storage credential represents an authentication and authorization mechanism for accessing data stored on
-your cloud tenant. Each storage credential is subject to Unity Catalog access-control policies that
-control which users and groups can access the credential. If a user does not have access to a storage
-credential in Unity Catalog, the request fails and Unity Catalog does not attempt to authenticate to your
-cloud tenant on the user’s behalf.
-
-Databricks recommends using external locations rather than using storage credentials directly.
-
-To create storage credentials, you must be a Databricks account admin. The account admin who creates the
-storage credential can delegate ownership to another user or group to manage permissions on it.
+    your cloud tenant. Each storage credential is subject to Unity Catalog access-control policies that
+    control which users and groups can access the credential. If a user does not have access to a storage
+    credential in Unity Catalog, the request fails and Unity Catalog does not attempt to authenticate to your
+    cloud tenant on the user’s behalf.
+    
+    Databricks recommends using external locations rather than using storage credentials directly.
+    
+    To create storage credentials, you must be a Databricks account admin. The account admin who creates the
+    storage credential can delegate ownership to another user or group to manage permissions on it.
 
     .. py:method:: create(name: str [, aws_iam_role: Optional[AwsIamRoleRequest], azure_managed_identity: Optional[AzureManagedIdentityRequest], azure_service_principal: Optional[AzureServicePrincipal], cloudflare_api_token: Optional[CloudflareApiToken], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest], read_only: Optional[bool], skip_validation: Optional[bool]]) -> StorageCredentialInfo
 
@@ -38,45 +38,45 @@ storage credential can delegate ownership to another user or group to manage per
             w.storage_credentials.delete(delete=created.name)
 
         Create a storage credential.
-
-Creates a new storage credential.
-
-:param name: str
-  The credential name. The name must be unique within the metastore.
-:param aws_iam_role: :class:`AwsIamRoleRequest` (optional)
-  The AWS IAM role configuration.
-:param azure_managed_identity: :class:`AzureManagedIdentityRequest` (optional)
-  The Azure managed identity configuration.
-:param azure_service_principal: :class:`AzureServicePrincipal` (optional)
-  The Azure service principal configuration.
-:param cloudflare_api_token: :class:`CloudflareApiToken` (optional)
-  The Cloudflare API token configuration.
-:param comment: str (optional)
-  Comment associated with the credential.
-:param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
-  The Databricks managed GCP service account configuration.
-:param read_only: bool (optional)
-  Whether the storage credential is only usable for read operations.
-:param skip_validation: bool (optional)
-  Supplying true to this argument skips validation of the created credential.
-
-:returns: :class:`StorageCredentialInfo`
-
+        
+        Creates a new storage credential.
+        
+        :param name: str
+          The credential name. The name must be unique within the metastore.
+        :param aws_iam_role: :class:`AwsIamRoleRequest` (optional)
+          The AWS IAM role configuration.
+        :param azure_managed_identity: :class:`AzureManagedIdentityRequest` (optional)
+          The Azure managed identity configuration.
+        :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
+          The Azure service principal configuration.
+        :param cloudflare_api_token: :class:`CloudflareApiToken` (optional)
+          The Cloudflare API token configuration.
+        :param comment: str (optional)
+          Comment associated with the credential.
+        :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
+          The Databricks managed GCP service account configuration.
+        :param read_only: bool (optional)
+          Whether the storage credential is only usable for read operations.
+        :param skip_validation: bool (optional)
+          Supplying true to this argument skips validation of the created credential.
+        
+        :returns: :class:`StorageCredentialInfo`
+        
 
     .. py:method:: delete(name: str [, force: Optional[bool]])
 
         Delete a credential.
-
-Deletes a storage credential from the metastore. The caller must be an owner of the storage
-credential.
-
-:param name: str
-  Name of the storage credential.
-:param force: bool (optional)
-  Force deletion even if there are dependent external locations or external tables.
-
-
-
+        
+        Deletes a storage credential from the metastore. The caller must be an owner of the storage
+        credential.
+        
+        :param name: str
+          Name of the storage credential.
+        :param force: bool (optional)
+          Force deletion even if there are dependent external locations or external tables.
+        
+        
+        
 
     .. py:method:: get(name: str) -> StorageCredentialInfo
 
@@ -103,15 +103,15 @@ credential.
             w.storage_credentials.delete(name=created.name)
 
         Get a credential.
-
-Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the
-storage credential, or have some permission on the storage credential.
-
-:param name: str
-  Name of the storage credential.
-
-:returns: :class:`StorageCredentialInfo`
-
+        
+        Gets a storage credential from the metastore. The caller must be a metastore admin, the owner of the
+        storage credential, or have some permission on the storage credential.
+        
+        :param name: str
+          Name of the storage credential.
+        
+        :returns: :class:`StorageCredentialInfo`
+        
 
     .. py:method:: list( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[StorageCredentialInfo]
 
@@ -127,23 +127,23 @@ storage credential, or have some permission on the storage credential.
             all = w.storage_credentials.list()
 
         List credentials.
-
-Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to
-only those storage credentials the caller has permission to access. If the caller is a metastore
-admin, retrieval of credentials is unrestricted. There is no guarantee of a specific ordering of the
-elements in the array.
-
-:param max_results: int (optional)
-  Maximum number of storage credentials to return. If not set, all the storage credentials are
-  returned (not recommended). - when set to a value greater than 0, the page length is the minimum of
-  this value and a server configured value; - when set to 0, the page length is set to a server
-  configured value (recommended); - when set to a value less than 0, an invalid parameter error is
-  returned;
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-
-:returns: Iterator over :class:`StorageCredentialInfo`
-
+        
+        Gets an array of storage credentials (as __StorageCredentialInfo__ objects). The array is limited to
+        only those storage credentials the caller has permission to access. If the caller is a metastore
+        admin, retrieval of credentials is unrestricted. There is no guarantee of a specific ordering of the
+        elements in the array.
+        
+        :param max_results: int (optional)
+          Maximum number of storage credentials to return. If not set, all the storage credentials are
+          returned (not recommended). - when set to a value greater than 0, the page length is the minimum of
+          this value and a server configured value; - when set to 0, the page length is set to a server
+          configured value (recommended); - when set to a value less than 0, an invalid parameter error is
+          returned;
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`StorageCredentialInfo`
+        
 
     .. py:method:: update(name: str [, aws_iam_role: Optional[AwsIamRoleRequest], azure_managed_identity: Optional[AzureManagedIdentityResponse], azure_service_principal: Optional[AzureServicePrincipal], cloudflare_api_token: Optional[CloudflareApiToken], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest], force: Optional[bool], isolation_mode: Optional[IsolationMode], new_name: Optional[str], owner: Optional[str], read_only: Optional[bool], skip_validation: Optional[bool]]) -> StorageCredentialInfo
 
@@ -173,69 +173,70 @@ elements in the array.
             w.storage_credentials.delete(delete=created.name)
 
         Update a credential.
-
-Updates a storage credential on the metastore.
-
-:param name: str
-  Name of the storage credential.
-:param aws_iam_role: :class:`AwsIamRoleRequest` (optional)
-  The AWS IAM role configuration.
-:param azure_managed_identity: :class:`AzureManagedIdentityResponse` (optional)
-  The Azure managed identity configuration.
-:param azure_service_principal: :class:`AzureServicePrincipal` (optional)
-  The Azure service principal configuration.
-:param cloudflare_api_token: :class:`CloudflareApiToken` (optional)
-  The Cloudflare API token configuration.
-:param comment: str (optional)
-  Comment associated with the credential.
-:param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
-  The Databricks managed GCP service account configuration.
-:param force: bool (optional)
-  Force update even if there are dependent external locations or external tables.
-:param isolation_mode: :class:`IsolationMode` (optional)
-:param new_name: str (optional)
-  New name for the storage credential.
-:param owner: str (optional)
-  Username of current owner of credential.
-:param read_only: bool (optional)
-  Whether the storage credential is only usable for read operations.
-:param skip_validation: bool (optional)
-  Supplying true to this argument skips validation of the updated credential.
-
-:returns: :class:`StorageCredentialInfo`
-
+        
+        Updates a storage credential on the metastore.
+        
+        :param name: str
+          Name of the storage credential.
+        :param aws_iam_role: :class:`AwsIamRoleRequest` (optional)
+          The AWS IAM role configuration.
+        :param azure_managed_identity: :class:`AzureManagedIdentityResponse` (optional)
+          The Azure managed identity configuration.
+        :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
+          The Azure service principal configuration.
+        :param cloudflare_api_token: :class:`CloudflareApiToken` (optional)
+          The Cloudflare API token configuration.
+        :param comment: str (optional)
+          Comment associated with the credential.
+        :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
+          The Databricks managed GCP service account configuration.
+        :param force: bool (optional)
+          Force update even if there are dependent external locations or external tables.
+        :param isolation_mode: :class:`IsolationMode` (optional)
+        :param new_name: str (optional)
+          New name for the storage credential.
+        :param owner: str (optional)
+          Username of current owner of credential.
+        :param read_only: bool (optional)
+          Whether the storage credential is only usable for read operations.
+        :param skip_validation: bool (optional)
+          Supplying true to this argument skips validation of the updated credential.
+        
+        :returns: :class:`StorageCredentialInfo`
+        
 
     .. py:method:: validate( [, aws_iam_role: Optional[AwsIamRoleRequest], azure_managed_identity: Optional[AzureManagedIdentityRequest], azure_service_principal: Optional[AzureServicePrincipal], cloudflare_api_token: Optional[CloudflareApiToken], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountRequest], external_location_name: Optional[str], read_only: Optional[bool], storage_credential_name: Optional[str], url: Optional[str]]) -> ValidateStorageCredentialResponse
 
         Validate a storage credential.
-
-Validates a storage credential. At least one of __external_location_name__ and __url__ need to be
-provided. If only one of them is provided, it will be used for validation. And if both are provided,
-the __url__ will be used for validation, and __external_location_name__ will be ignored when checking
-overlapping urls.
-
-Either the __storage_credential_name__ or the cloud-specific credential must be provided.
-
-The caller must be a metastore admin or the storage credential owner or have the
-**CREATE_EXTERNAL_LOCATION** privilege on the metastore and the storage credential.
-
-:param aws_iam_role: :class:`AwsIamRoleRequest` (optional)
-  The AWS IAM role configuration.
-:param azure_managed_identity: :class:`AzureManagedIdentityRequest` (optional)
-  The Azure managed identity configuration.
-:param azure_service_principal: :class:`AzureServicePrincipal` (optional)
-  The Azure service principal configuration.
-:param cloudflare_api_token: :class:`CloudflareApiToken` (optional)
-  The Cloudflare API token configuration.
-:param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
-  The Databricks created GCP service account configuration.
-:param external_location_name: str (optional)
-  The name of an existing external location to validate.
-:param read_only: bool (optional)
-  Whether the storage credential is only usable for read operations.
-:param storage_credential_name: str (optional)
-  The name of the storage credential to validate.
-:param url: str (optional)
-  The external location url to validate.
-
-:returns: :class:`ValidateStorageCredentialResponse`
+        
+        Validates a storage credential. At least one of __external_location_name__ and __url__ need to be
+        provided. If only one of them is provided, it will be used for validation. And if both are provided,
+        the __url__ will be used for validation, and __external_location_name__ will be ignored when checking
+        overlapping urls.
+        
+        Either the __storage_credential_name__ or the cloud-specific credential must be provided.
+        
+        The caller must be a metastore admin or the storage credential owner or have the
+        **CREATE_EXTERNAL_LOCATION** privilege on the metastore and the storage credential.
+        
+        :param aws_iam_role: :class:`AwsIamRoleRequest` (optional)
+          The AWS IAM role configuration.
+        :param azure_managed_identity: :class:`AzureManagedIdentityRequest` (optional)
+          The Azure managed identity configuration.
+        :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
+          The Azure service principal configuration.
+        :param cloudflare_api_token: :class:`CloudflareApiToken` (optional)
+          The Cloudflare API token configuration.
+        :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccountRequest` (optional)
+          The Databricks created GCP service account configuration.
+        :param external_location_name: str (optional)
+          The name of an existing external location to validate.
+        :param read_only: bool (optional)
+          Whether the storage credential is only usable for read operations.
+        :param storage_credential_name: str (optional)
+          The name of the storage credential to validate.
+        :param url: str (optional)
+          The external location url to validate.
+        
+        :returns: :class:`ValidateStorageCredentialResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/system_schemas.rst b/docs/workspace/catalog/system_schemas.rst
index 91e82ca57..2028a3623 100644
--- a/docs/workspace/catalog/system_schemas.rst
+++ b/docs/workspace/catalog/system_schemas.rst
@@ -5,53 +5,54 @@
 .. py:class:: SystemSchemasAPI
 
     A system schema is a schema that lives within the system catalog. A system schema may contain information
-about customer usage of Unity Catalog such as audit-logs, billing-logs, lineage information, etc.
+    about customer usage of Unity Catalog such as audit-logs, billing-logs, lineage information, etc.
 
     .. py:method:: disable(metastore_id: str, schema_name: str)
 
         Disable a system schema.
-
-Disables the system schema and removes it from the system catalog. The caller must be an account admin
-or a metastore admin.
-
-:param metastore_id: str
-  The metastore ID under which the system schema lives.
-:param schema_name: str
-  Full name of the system schema.
-
-
-
+        
+        Disables the system schema and removes it from the system catalog. The caller must be an account admin
+        or a metastore admin.
+        
+        :param metastore_id: str
+          The metastore ID under which the system schema lives.
+        :param schema_name: str
+          Full name of the system schema.
+        
+        
+        
 
     .. py:method:: enable(metastore_id: str, schema_name: str)
 
         Enable a system schema.
-
-Enables the system schema and adds it to the system catalog. The caller must be an account admin or a
-metastore admin.
-
-:param metastore_id: str
-  The metastore ID under which the system schema lives.
-:param schema_name: str
-  Full name of the system schema.
-
-
-
+        
+        Enables the system schema and adds it to the system catalog. The caller must be an account admin or a
+        metastore admin.
+        
+        :param metastore_id: str
+          The metastore ID under which the system schema lives.
+        :param schema_name: str
+          Full name of the system schema.
+        
+        
+        
 
     .. py:method:: list(metastore_id: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[SystemSchemaInfo]
 
         List system schemas.
-
-Gets an array of system schemas for a metastore. The caller must be an account admin or a metastore
-admin.
-
-:param metastore_id: str
-  The ID for the metastore in which the system schema resides.
-:param max_results: int (optional)
-  Maximum number of schemas to return. - When set to 0, the page length is set to a server configured
-  value (recommended); - When set to a value greater than 0, the page length is the minimum of this
-  value and a server configured value; - When set to a value less than 0, an invalid parameter error
-  is returned; - If not set, all the schemas are returned (not recommended).
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-
-:returns: Iterator over :class:`SystemSchemaInfo`
+        
+        Gets an array of system schemas for a metastore. The caller must be an account admin or a metastore
+        admin.
+        
+        :param metastore_id: str
+          The ID for the metastore in which the system schema resides.
+        :param max_results: int (optional)
+          Maximum number of schemas to return. - When set to 0, the page length is set to a server configured
+          value (recommended); - When set to a value greater than 0, the page length is the minimum of this
+          value and a server configured value; - When set to a value less than 0, an invalid parameter error
+          is returned; - If not set, all the schemas are returned (not recommended).
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`SystemSchemaInfo`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/table_constraints.rst b/docs/workspace/catalog/table_constraints.rst
index 0b631408f..dd46c42f3 100644
--- a/docs/workspace/catalog/table_constraints.rst
+++ b/docs/workspace/catalog/table_constraints.rst
@@ -5,57 +5,58 @@
 .. py:class:: TableConstraintsAPI
 
     Primary key and foreign key constraints encode relationships between fields in tables.
-
-Primary and foreign keys are informational only and are not enforced. Foreign keys must reference a
-primary key in another table. This primary key is the parent constraint of the foreign key and the table
-this primary key is on is the parent table of the foreign key. Similarly, the foreign key is the child
-constraint of its referenced primary key; the table of the foreign key is the child table of the primary
-key.
-
-You can declare primary keys and foreign keys as part of the table specification during table creation.
-You can also add or drop constraints on existing tables.
+    
+    Primary and foreign keys are informational only and are not enforced. Foreign keys must reference a
+    primary key in another table. This primary key is the parent constraint of the foreign key and the table
+    this primary key is on is the parent table of the foreign key. Similarly, the foreign key is the child
+    constraint of its referenced primary key; the table of the foreign key is the child table of the primary
+    key.
+    
+    You can declare primary keys and foreign keys as part of the table specification during table creation.
+    You can also add or drop constraints on existing tables.
 
     .. py:method:: create(full_name_arg: str, constraint: TableConstraint) -> TableConstraint
 
         Create a table constraint.
-
-Creates a new table constraint.
-
-For the table constraint creation to succeed, the user must satisfy both of these conditions: - the
-user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA**
-privilege on the table's parent schema, and be the owner of the table. - if the new constraint is a
-__ForeignKeyConstraint__, the user must have the **USE_CATALOG** privilege on the referenced parent
-table's catalog, the **USE_SCHEMA** privilege on the referenced parent table's schema, and be the
-owner of the referenced parent table.
-
-:param full_name_arg: str
-  The full name of the table referenced by the constraint.
-:param constraint: :class:`TableConstraint`
-  A table constraint, as defined by *one* of the following fields being set:
-  __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__.
-
-:returns: :class:`TableConstraint`
-
+        
+        Creates a new table constraint.
+        
+        For the table constraint creation to succeed, the user must satisfy both of these conditions: - the
+        user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA**
+        privilege on the table's parent schema, and be the owner of the table. - if the new constraint is a
+        __ForeignKeyConstraint__, the user must have the **USE_CATALOG** privilege on the referenced parent
+        table's catalog, the **USE_SCHEMA** privilege on the referenced parent table's schema, and be the
+        owner of the referenced parent table.
+        
+        :param full_name_arg: str
+          The full name of the table referenced by the constraint.
+        :param constraint: :class:`TableConstraint`
+          A table constraint, as defined by *one* of the following fields being set:
+          __primary_key_constraint__, __foreign_key_constraint__, __named_table_constraint__.
+        
+        :returns: :class:`TableConstraint`
+        
 
     .. py:method:: delete(full_name: str, constraint_name: str, cascade: bool)
 
         Delete a table constraint.
-
-Deletes a table constraint.
-
-For the table constraint deletion to succeed, the user must satisfy both of these conditions: - the
-user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA**
-privilege on the table's parent schema, and be the owner of the table. - if __cascade__ argument is
-**true**, the user must have the following permissions on all of the child tables: the **USE_CATALOG**
-privilege on the table's catalog, the **USE_SCHEMA** privilege on the table's schema, and be the owner
-of the table.
-
-:param full_name: str
-  Full name of the table referenced by the constraint.
-:param constraint_name: str
-  The name of the constraint to delete.
-:param cascade: bool
-  If true, try deleting all child constraints of the current constraint. If false, reject this
-  operation if the current constraint has any child constraints.
-
-
+        
+        Deletes a table constraint.
+        
+        For the table constraint deletion to succeed, the user must satisfy both of these conditions: - the
+        user must have the **USE_CATALOG** privilege on the table's parent catalog, the **USE_SCHEMA**
+        privilege on the table's parent schema, and be the owner of the table. - if __cascade__ argument is
+        **true**, the user must have the following permissions on all of the child tables: the **USE_CATALOG**
+        privilege on the table's catalog, the **USE_SCHEMA** privilege on the table's schema, and be the owner
+        of the table.
+        
+        :param full_name: str
+          Full name of the table referenced by the constraint.
+        :param constraint_name: str
+          The name of the constraint to delete.
+        :param cascade: bool
+          If true, try deleting all child constraints of the current constraint. If false, reject this
+          operation if the current constraint has any child constraints.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/tables.rst b/docs/workspace/catalog/tables.rst
index 3fa411272..15cfb1cac 100644
--- a/docs/workspace/catalog/tables.rst
+++ b/docs/workspace/catalog/tables.rst
@@ -5,45 +5,45 @@
 .. py:class:: TablesAPI
 
     A table resides in the third layer of Unity Catalog’s three-level namespace. It contains rows of data.
-To create a table, users must have CREATE_TABLE and USE_SCHEMA permissions on the schema, and they must
-have the USE_CATALOG permission on its parent catalog. To query a table, users must have the SELECT
-permission on the table, and they must have the USE_CATALOG permission on its parent catalog and the
-USE_SCHEMA permission on its parent schema.
-
-A table can be managed or external. From an API perspective, a __VIEW__ is a particular kind of table
-(rather than a managed or external table).
+    To create a table, users must have CREATE_TABLE and USE_SCHEMA permissions on the schema, and they must
+    have the USE_CATALOG permission on its parent catalog. To query a table, users must have the SELECT
+    permission on the table, and they must have the USE_CATALOG permission on its parent catalog and the
+    USE_SCHEMA permission on its parent schema.
+    
+    A table can be managed or external. From an API perspective, a __VIEW__ is a particular kind of table
+    (rather than a managed or external table).
 
     .. py:method:: delete(full_name: str)
 
         Delete a table.
-
-Deletes a table from the specified parent catalog and schema. The caller must be the owner of the
-parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the
-parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent
-catalog and the **USE_SCHEMA** privilege on the parent schema.
-
-:param full_name: str
-  Full name of the table.
-
-
-
+        
+        Deletes a table from the specified parent catalog and schema. The caller must be the owner of the
+        parent catalog, have the **USE_CATALOG** privilege on the parent catalog and be the owner of the
+        parent schema, or be the owner of the table and have the **USE_CATALOG** privilege on the parent
+        catalog and the **USE_SCHEMA** privilege on the parent schema.
+        
+        :param full_name: str
+          Full name of the table.
+        
+        
+        
 
     .. py:method:: exists(full_name: str) -> TableExistsResponse
 
         Get boolean reflecting if table exists.
-
-Gets if a table exists in the metastore for a specific catalog and schema. The caller must satisfy one
-of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the
-owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the
-**USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema,
-and either be the table owner or have the SELECT privilege on the table. * Have BROWSE privilege on
-the parent catalog * Have BROWSE privilege on the parent schema.
-
-:param full_name: str
-  Full name of the table.
-
-:returns: :class:`TableExistsResponse`
-
+        
+        Gets if a table exists in the metastore for a specific catalog and schema. The caller must satisfy one
+        of the following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the
+        owner of the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the
+        **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema,
+        and either be the table owner or have the SELECT privilege on the table. * Have BROWSE privilege on
+        the parent catalog * Have BROWSE privilege on the parent schema.
+        
+        :param full_name: str
+          Full name of the table.
+        
+        :returns: :class:`TableExistsResponse`
+        
 
     .. py:method:: get(full_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool], include_manifest_capabilities: Optional[bool]]) -> TableInfo
 
@@ -80,25 +80,25 @@ the parent catalog * Have BROWSE privilege on the parent schema.
             w.tables.delete(full_name=table_full_name)
 
         Get a table.
-
-Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one of the
-following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of
-the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG**
-privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, and either be
-the table owner or have the SELECT privilege on the table.
-
-:param full_name: str
-  Full name of the table.
-:param include_browse: bool (optional)
-  Whether to include tables in the response for which the principal can only access selective metadata
-  for
-:param include_delta_metadata: bool (optional)
-  Whether delta metadata should be included in the response.
-:param include_manifest_capabilities: bool (optional)
-  Whether to include a manifest containing capabilities the table has.
-
-:returns: :class:`TableInfo`
-
+        
+        Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one of the
+        following requirements: * Be a metastore admin * Be the owner of the parent catalog * Be the owner of
+        the parent schema and have the USE_CATALOG privilege on the parent catalog * Have the **USE_CATALOG**
+        privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema, and either be
+        the table owner or have the SELECT privilege on the table.
+        
+        :param full_name: str
+          Full name of the table.
+        :param include_browse: bool (optional)
+          Whether to include tables in the response for which the principal can only access selective metadata
+          for
+        :param include_delta_metadata: bool (optional)
+          Whether delta metadata should be included in the response.
+        :param include_manifest_capabilities: bool (optional)
+          Whether to include a manifest containing capabilities the table has.
+        
+        :returns: :class:`TableInfo`
+        
 
     .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool], include_manifest_capabilities: Optional[bool], max_results: Optional[int], omit_columns: Optional[bool], omit_properties: Optional[bool], omit_username: Optional[bool], page_token: Optional[str]]) -> Iterator[TableInfo]
 
@@ -124,41 +124,41 @@ the table owner or have the SELECT privilege on the table.
             w.catalogs.delete(name=created_catalog.name, force=True)
 
         List tables.
-
-Gets an array of all tables for the current metastore under the parent catalog and schema. The caller
-must be a metastore admin or an owner of (or have the **SELECT** privilege on) the table. For the
-latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent
-catalog and the **USE_SCHEMA** privilege on the parent schema. There is no guarantee of a specific
-ordering of the elements in the array.
-
-:param catalog_name: str
-  Name of parent catalog for tables of interest.
-:param schema_name: str
-  Parent schema of tables.
-:param include_browse: bool (optional)
-  Whether to include tables in the response for which the principal can only access selective metadata
-  for
-:param include_delta_metadata: bool (optional)
-  Whether delta metadata should be included in the response.
-:param include_manifest_capabilities: bool (optional)
-  Whether to include a manifest containing capabilities the table has.
-:param max_results: int (optional)
-  Maximum number of tables to return. If not set, all the tables are returned (not recommended). -
-  when set to a value greater than 0, the page length is the minimum of this value and a server
-  configured value; - when set to 0, the page length is set to a server configured value
-  (recommended); - when set to a value less than 0, an invalid parameter error is returned;
-:param omit_columns: bool (optional)
-  Whether to omit the columns of the table from the response or not.
-:param omit_properties: bool (optional)
-  Whether to omit the properties of the table from the response or not.
-:param omit_username: bool (optional)
-  Whether to omit the username of the table (e.g. owner, updated_by, created_by) from the response or
-  not.
-:param page_token: str (optional)
-  Opaque token to send for the next page of results (pagination).
-
-:returns: Iterator over :class:`TableInfo`
-
+        
+        Gets an array of all tables for the current metastore under the parent catalog and schema. The caller
+        must be a metastore admin or an owner of (or have the **SELECT** privilege on) the table. For the
+        latter case, the caller must also be the owner or have the **USE_CATALOG** privilege on the parent
+        catalog and the **USE_SCHEMA** privilege on the parent schema. There is no guarantee of a specific
+        ordering of the elements in the array.
+        
+        :param catalog_name: str
+          Name of parent catalog for tables of interest.
+        :param schema_name: str
+          Parent schema of tables.
+        :param include_browse: bool (optional)
+          Whether to include tables in the response for which the principal can only access selective metadata
+          for
+        :param include_delta_metadata: bool (optional)
+          Whether delta metadata should be included in the response.
+        :param include_manifest_capabilities: bool (optional)
+          Whether to include a manifest containing capabilities the table has.
+        :param max_results: int (optional)
+          Maximum number of tables to return. If not set, all the tables are returned (not recommended). -
+          when set to a value greater than 0, the page length is the minimum of this value and a server
+          configured value; - when set to 0, the page length is set to a server configured value
+          (recommended); - when set to a value less than 0, an invalid parameter error is returned;
+        :param omit_columns: bool (optional)
+          Whether to omit the columns of the table from the response or not.
+        :param omit_properties: bool (optional)
+          Whether to omit the properties of the table from the response or not.
+        :param omit_username: bool (optional)
+          Whether to omit the username of the table (e.g. owner, updated_by, created_by) from the response or
+          not.
+        :param page_token: str (optional)
+          Opaque token to send for the next page of results (pagination).
+        
+        :returns: Iterator over :class:`TableInfo`
+        
 
     .. py:method:: list_summaries(catalog_name: str [, include_manifest_capabilities: Optional[bool], max_results: Optional[int], page_token: Optional[str], schema_name_pattern: Optional[str], table_name_pattern: Optional[str]]) -> Iterator[TableSummary]
 
@@ -185,49 +185,50 @@ ordering of the elements in the array.
             w.catalogs.delete(name=created_catalog.name, force=True)
 
         List table summaries.
-
-Gets an array of summaries for tables for a schema and catalog within the metastore. The table
-summaries returned are either:
-
-* summaries for tables (within the current metastore and parent catalog and schema), when the user is
-a metastore admin, or: * summaries for tables and schemas (within the current metastore and parent
-catalog) for which the user has ownership or the **SELECT** privilege on the table and ownership or
-**USE_SCHEMA** privilege on the schema, provided that the user also has ownership or the
-**USE_CATALOG** privilege on the parent catalog.
-
-There is no guarantee of a specific ordering of the elements in the array.
-
-:param catalog_name: str
-  Name of parent catalog for tables of interest.
-:param include_manifest_capabilities: bool (optional)
-  Whether to include a manifest containing capabilities the table has.
-:param max_results: int (optional)
-  Maximum number of summaries for tables to return. If not set, the page length is set to a server
-  configured value (10000, as of 1/5/2024). - when set to a value greater than 0, the page length is
-  the minimum of this value and a server configured value (10000, as of 1/5/2024); - when set to 0,
-  the page length is set to a server configured value (10000, as of 1/5/2024) (recommended); - when
-  set to a value less than 0, an invalid parameter error is returned;
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-:param schema_name_pattern: str (optional)
-  A sql LIKE pattern (% and _) for schema names. All schemas will be returned if not set or empty.
-:param table_name_pattern: str (optional)
-  A sql LIKE pattern (% and _) for table names. All tables will be returned if not set or empty.
-
-:returns: Iterator over :class:`TableSummary`
-
+        
+        Gets an array of summaries for tables for a schema and catalog within the metastore. The table
+        summaries returned are either:
+        
+        * summaries for tables (within the current metastore and parent catalog and schema), when the user is
+        a metastore admin, or: * summaries for tables and schemas (within the current metastore and parent
+        catalog) for which the user has ownership or the **SELECT** privilege on the table and ownership or
+        **USE_SCHEMA** privilege on the schema, provided that the user also has ownership or the
+        **USE_CATALOG** privilege on the parent catalog.
+        
+        There is no guarantee of a specific ordering of the elements in the array.
+        
+        :param catalog_name: str
+          Name of parent catalog for tables of interest.
+        :param include_manifest_capabilities: bool (optional)
+          Whether to include a manifest containing capabilities the table has.
+        :param max_results: int (optional)
+          Maximum number of summaries for tables to return. If not set, the page length is set to a server
+          configured value (10000, as of 1/5/2024). - when set to a value greater than 0, the page length is
+          the minimum of this value and a server configured value (10000, as of 1/5/2024); - when set to 0,
+          the page length is set to a server configured value (10000, as of 1/5/2024) (recommended); - when
+          set to a value less than 0, an invalid parameter error is returned;
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        :param schema_name_pattern: str (optional)
+          A sql LIKE pattern (% and _) for schema names. All schemas will be returned if not set or empty.
+        :param table_name_pattern: str (optional)
+          A sql LIKE pattern (% and _) for table names. All tables will be returned if not set or empty.
+        
+        :returns: Iterator over :class:`TableSummary`
+        
 
     .. py:method:: update(full_name: str [, owner: Optional[str]])
 
         Update a table owner.
-
-Change the owner of the table. The caller must be the owner of the parent catalog, have the
-**USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner
-of the table and have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
-privilege on the parent schema.
-
-:param full_name: str
-  Full name of the table.
-:param owner: str (optional)
-
-
+        
+        Change the owner of the table. The caller must be the owner of the parent catalog, have the
+        **USE_CATALOG** privilege on the parent catalog and be the owner of the parent schema, or be the owner
+        of the table and have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
+        privilege on the parent schema.
+        
+        :param full_name: str
+          Full name of the table.
+        :param owner: str (optional)
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/temporary_table_credentials.rst b/docs/workspace/catalog/temporary_table_credentials.rst
index 9898af1ba..1acd462b7 100644
--- a/docs/workspace/catalog/temporary_table_credentials.rst
+++ b/docs/workspace/catalog/temporary_table_credentials.rst
@@ -5,31 +5,32 @@
 .. py:class:: TemporaryTableCredentialsAPI
 
     Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage
-locationswhere table data is stored in Databricks. These credentials are employed to provide secure and
-time-limitedaccess to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud provider
-has its own typeof credentials: AWS uses temporary session tokens via AWS Security Token Service (STS),
-Azure utilizesShared Access Signatures (SAS) for its data storage services, and Google Cloud supports
-temporary credentialsthrough OAuth 2.0.Temporary table credentials ensure that data access is limited in
-scope and duration, reducing the risk ofunauthorized access or misuse. To use the temporary table
-credentials API, a metastore admin needs to enable the external_access_enabled flag (off by default) at
-the metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema level
-by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by
-catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for
-security reason.
+    locationswhere table data is stored in Databricks. These credentials are employed to provide secure and
+    time-limitedaccess to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud provider
+    has its own typeof credentials: AWS uses temporary session tokens via AWS Security Token Service (STS),
+    Azure utilizesShared Access Signatures (SAS) for its data storage services, and Google Cloud supports
+    temporary credentialsthrough OAuth 2.0.Temporary table credentials ensure that data access is limited in
+    scope and duration, reducing the risk ofunauthorized access or misuse. To use the temporary table
+    credentials API, a metastore admin needs to enable the external_access_enabled flag (off by default) at
+    the metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema level
+    by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by
+    catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for
+    security reason.
 
     .. py:method:: generate_temporary_table_credentials( [, operation: Optional[TableOperation], table_id: Optional[str]]) -> GenerateTemporaryTableCredentialResponse
 
         Generate a temporary table credential.
-
-Get a short-lived credential for directly accessing the table data on cloud storage. The metastore
-must have external_access_enabled flag set to true (default false). The caller must have
-EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog
-owners.
-
-:param operation: :class:`TableOperation` (optional)
-  The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is
-  specified, the credentials returned will have write permissions, otherwise, it will be read only.
-:param table_id: str (optional)
-  UUID of the table to read or write.
-
-:returns: :class:`GenerateTemporaryTableCredentialResponse`
+        
+        Get a short-lived credential for directly accessing the table data on cloud storage. The metastore
+        must have external_access_enabled flag set to true (default false). The caller must have
+        EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog
+        owners.
+        
+        :param operation: :class:`TableOperation` (optional)
+          The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is
+          specified, the credentials returned will have write permissions, otherwise, it will be read only.
+        :param table_id: str (optional)
+          UUID of the table to read or write.
+        
+        :returns: :class:`GenerateTemporaryTableCredentialResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/volumes.rst b/docs/workspace/catalog/volumes.rst
index 62d23b88b..76e7c6c33 100644
--- a/docs/workspace/catalog/volumes.rst
+++ b/docs/workspace/catalog/volumes.rst
@@ -5,11 +5,11 @@
 .. py:class:: VolumesAPI
 
     Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing
-files. Use cases include running machine learning on unstructured data such as image, audio, video, or PDF
-files, organizing data sets during the data exploration stages in data science, working with libraries
-that require access to the local file system on cluster machines, storing library and config files of
-arbitrary formats such as .whl or .txt centrally and providing secure access across workspaces to it, or
-transforming and querying non-tabular data files in ETL.
+    files. Use cases include running machine learning on unstructured data such as image, audio, video, or PDF
+    files, organizing data sets during the data exploration stages in data science, working with libraries
+    that require access to the local file system on cluster machines, storing library and config files of
+    arbitrary formats such as .whl or .txt centrally and providing secure access across workspaces to it, or
+    transforming and querying non-tabular data files in ETL.
 
     .. py:method:: create(catalog_name: str, schema_name: str, name: str, volume_type: VolumeType [, comment: Optional[str], storage_location: Optional[str]]) -> VolumeInfo
 
@@ -55,53 +55,53 @@ transforming and querying non-tabular data files in ETL.
             w.volumes.delete(name=created_volume.full_name)
 
         Create a Volume.
-
-Creates a new volume.
-
-The user could create either an external volume or a managed volume. An external volume will be
-created in the specified external location, while a managed volume will be located in the default
-location which is specified by the parent schema, or the parent catalog, or the Metastore.
-
-For the volume creation to succeed, the user must satisfy following conditions: - The caller must be a
-metastore admin, or be the owner of the parent catalog and schema, or have the **USE_CATALOG**
-privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - The caller
-must have **CREATE VOLUME** privilege on the parent schema.
-
-For an external volume, following conditions also need to satisfy - The caller must have **CREATE
-EXTERNAL VOLUME** privilege on the external location. - There are no other tables, nor volumes
-existing in the specified storage location. - The specified storage location is not under the location
-of other tables, nor volumes, or catalogs or schemas.
-
-:param catalog_name: str
-  The name of the catalog where the schema and the volume are
-:param schema_name: str
-  The name of the schema where the volume is
-:param name: str
-  The name of the volume
-:param volume_type: :class:`VolumeType`
-:param comment: str (optional)
-  The comment attached to the volume
-:param storage_location: str (optional)
-  The storage location on the cloud
-
-:returns: :class:`VolumeInfo`
-
+        
+        Creates a new volume.
+        
+        The user could create either an external volume or a managed volume. An external volume will be
+        created in the specified external location, while a managed volume will be located in the default
+        location which is specified by the parent schema, or the parent catalog, or the Metastore.
+        
+        For the volume creation to succeed, the user must satisfy following conditions: - The caller must be a
+        metastore admin, or be the owner of the parent catalog and schema, or have the **USE_CATALOG**
+        privilege on the parent catalog and the **USE_SCHEMA** privilege on the parent schema. - The caller
+        must have **CREATE VOLUME** privilege on the parent schema.
+        
+        For an external volume, following conditions also need to satisfy - The caller must have **CREATE
+        EXTERNAL VOLUME** privilege on the external location. - There are no other tables, nor volumes
+        existing in the specified storage location. - The specified storage location is not under the location
+        of other tables, nor volumes, or catalogs or schemas.
+        
+        :param catalog_name: str
+          The name of the catalog where the schema and the volume are
+        :param schema_name: str
+          The name of the schema where the volume is
+        :param name: str
+          The name of the volume
+        :param volume_type: :class:`VolumeType`
+        :param comment: str (optional)
+          The comment attached to the volume
+        :param storage_location: str (optional)
+          The storage location on the cloud
+        
+        :returns: :class:`VolumeInfo`
+        
 
     .. py:method:: delete(name: str)
 
         Delete a Volume.
-
-Deletes a volume from the specified parent catalog and schema.
-
-The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must
-also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
-privilege on the parent schema.
-
-:param name: str
-  The three-level (fully qualified) name of the volume
-
-
-
+        
+        Deletes a volume from the specified parent catalog and schema.
+        
+        The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must
+        also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
+        privilege on the parent schema.
+        
+        :param name: str
+          The three-level (fully qualified) name of the volume
+        
+        
+        
 
     .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[VolumeInfo]
 
@@ -127,42 +127,42 @@ privilege on the parent schema.
             w.catalogs.delete(name=created_catalog.name, force=True)
 
         List Volumes.
-
-Gets an array of volumes for the current metastore under the parent catalog and schema.
-
-The returned volumes are filtered based on the privileges of the calling user. For example, the
-metastore admin is able to list all the volumes. A regular user needs to be the owner or have the
-**READ VOLUME** privilege on the volume to recieve the volumes in the response. For the latter case,
-the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
-**USE_SCHEMA** privilege on the parent schema.
-
-There is no guarantee of a specific ordering of the elements in the array.
-
-:param catalog_name: str
-  The identifier of the catalog
-:param schema_name: str
-  The identifier of the schema
-:param include_browse: bool (optional)
-  Whether to include volumes in the response for which the principal can only access selective
-  metadata for
-:param max_results: int (optional)
-  Maximum number of volumes to return (page length).
-  
-  If not set, the page length is set to a server configured value (10000, as of 1/29/2024). - when set
-  to a value greater than 0, the page length is the minimum of this value and a server configured
-  value (10000, as of 1/29/2024); - when set to 0, the page length is set to a server configured value
-  (10000, as of 1/29/2024) (recommended); - when set to a value less than 0, an invalid parameter
-  error is returned;
-  
-  Note: this parameter controls only the maximum number of volumes to return. The actual number of
-  volumes returned in a page may be smaller than this value, including 0, even if there are more
-  pages.
-:param page_token: str (optional)
-  Opaque token returned by a previous request. It must be included in the request to retrieve the next
-  page of results (pagination).
-
-:returns: Iterator over :class:`VolumeInfo`
-
+        
+        Gets an array of volumes for the current metastore under the parent catalog and schema.
+        
+        The returned volumes are filtered based on the privileges of the calling user. For example, the
+        metastore admin is able to list all the volumes. A regular user needs to be the owner or have the
+        **READ VOLUME** privilege on the volume to recieve the volumes in the response. For the latter case,
+        the caller must also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the
+        **USE_SCHEMA** privilege on the parent schema.
+        
+        There is no guarantee of a specific ordering of the elements in the array.
+        
+        :param catalog_name: str
+          The identifier of the catalog
+        :param schema_name: str
+          The identifier of the schema
+        :param include_browse: bool (optional)
+          Whether to include volumes in the response for which the principal can only access selective
+          metadata for
+        :param max_results: int (optional)
+          Maximum number of volumes to return (page length).
+          
+          If not set, the page length is set to a server configured value (10000, as of 1/29/2024). - when set
+          to a value greater than 0, the page length is the minimum of this value and a server configured
+          value (10000, as of 1/29/2024); - when set to 0, the page length is set to a server configured value
+          (10000, as of 1/29/2024) (recommended); - when set to a value less than 0, an invalid parameter
+          error is returned;
+          
+          Note: this parameter controls only the maximum number of volumes to return. The actual number of
+          volumes returned in a page may be smaller than this value, including 0, even if there are more
+          pages.
+        :param page_token: str (optional)
+          Opaque token returned by a previous request. It must be included in the request to retrieve the next
+          page of results (pagination).
+        
+        :returns: Iterator over :class:`VolumeInfo`
+        
 
     .. py:method:: read(name: str [, include_browse: Optional[bool]]) -> VolumeInfo
 
@@ -210,21 +210,21 @@ There is no guarantee of a specific ordering of the elements in the array.
             w.volumes.delete(name=created_volume.full_name)
 
         Get a Volume.
-
-Gets a volume from the metastore for a specific catalog and schema.
-
-The caller must be a metastore admin or an owner of (or have the **READ VOLUME** privilege on) the
-volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege
-on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
-
-:param name: str
-  The three-level (fully qualified) name of the volume
-:param include_browse: bool (optional)
-  Whether to include volumes in the response for which the principal can only access selective
-  metadata for
-
-:returns: :class:`VolumeInfo`
-
+        
+        Gets a volume from the metastore for a specific catalog and schema.
+        
+        The caller must be a metastore admin or an owner of (or have the **READ VOLUME** privilege on) the
+        volume. For the latter case, the caller must also be the owner or have the **USE_CATALOG** privilege
+        on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
+        
+        :param name: str
+          The three-level (fully qualified) name of the volume
+        :param include_browse: bool (optional)
+          Whether to include volumes in the response for which the principal can only access selective
+          metadata for
+        
+        :returns: :class:`VolumeInfo`
+        
 
     .. py:method:: update(name: str [, comment: Optional[str], new_name: Optional[str], owner: Optional[str]]) -> VolumeInfo
 
@@ -274,22 +274,23 @@ on the parent catalog and the **USE_SCHEMA** privilege on the parent schema.
             w.volumes.delete(name=created_volume.full_name)
 
         Update a Volume.
-
-Updates the specified volume under the specified parent catalog and schema.
-
-The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must
-also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
-privilege on the parent schema.
-
-Currently only the name, the owner or the comment of the volume could be updated.
-
-:param name: str
-  The three-level (fully qualified) name of the volume
-:param comment: str (optional)
-  The comment attached to the volume
-:param new_name: str (optional)
-  New name for the volume.
-:param owner: str (optional)
-  The identifier of the user who owns the volume
-
-:returns: :class:`VolumeInfo`
+        
+        Updates the specified volume under the specified parent catalog and schema.
+        
+        The caller must be a metastore admin or an owner of the volume. For the latter case, the caller must
+        also be the owner or have the **USE_CATALOG** privilege on the parent catalog and the **USE_SCHEMA**
+        privilege on the parent schema.
+        
+        Currently only the name, the owner or the comment of the volume could be updated.
+        
+        :param name: str
+          The three-level (fully qualified) name of the volume
+        :param comment: str (optional)
+          The comment attached to the volume
+        :param new_name: str (optional)
+          New name for the volume.
+        :param owner: str (optional)
+          The identifier of the user who owns the volume
+        
+        :returns: :class:`VolumeInfo`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/workspace_bindings.rst b/docs/workspace/catalog/workspace_bindings.rst
index 32f218d00..08a74b29e 100644
--- a/docs/workspace/catalog/workspace_bindings.rst
+++ b/docs/workspace/catalog/workspace_bindings.rst
@@ -5,19 +5,19 @@
 .. py:class:: WorkspaceBindingsAPI
 
     A securable in Databricks can be configured as __OPEN__ or __ISOLATED__. An __OPEN__ securable can be
-accessed from any workspace, while an __ISOLATED__ securable can only be accessed from a configured list
-of workspaces. This API allows you to configure (bind) securables to workspaces.
-
-NOTE: The __isolation_mode__ is configured for the securable itself (using its Update method) and the
-workspace bindings are only consulted when the securable's __isolation_mode__ is set to __ISOLATED__.
-
-A securable's workspace bindings can be configured by a metastore admin or the owner of the securable.
-
-The original path (/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}) is deprecated. Please use
-the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which introduces the
-ability to bind a securable in READ_ONLY mode (catalogs only).
-
-Securable types that support binding: - catalog - storage_credential - external_location
+    accessed from any workspace, while an __ISOLATED__ securable can only be accessed from a configured list
+    of workspaces. This API allows you to configure (bind) securables to workspaces.
+    
+    NOTE: The __isolation_mode__ is configured for the securable itself (using its Update method) and the
+    workspace bindings are only consulted when the securable's __isolation_mode__ is set to __ISOLATED__.
+    
+    A securable's workspace bindings can be configured by a metastore admin or the owner of the securable.
+    
+    The original path (/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}) is deprecated. Please use
+    the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which introduces the
+    ability to bind a securable in READ_ONLY mode (catalogs only).
+    
+    Securable types that support binding: - catalog - storage_credential - external_location
 
     .. py:method:: get(name: str) -> CurrentWorkspaceBindings
 
@@ -40,37 +40,37 @@ Securable types that support binding: - catalog - storage_credential - external_
             w.catalogs.delete(name=created.name, force=True)
 
         Get catalog workspace bindings.
-
-Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of the
-catalog.
-
-:param name: str
-  The name of the catalog.
-
-:returns: :class:`CurrentWorkspaceBindings`
-
+        
+        Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of the
+        catalog.
+        
+        :param name: str
+          The name of the catalog.
+        
+        :returns: :class:`CurrentWorkspaceBindings`
+        
 
     .. py:method:: get_bindings(securable_type: GetBindingsSecurableType, securable_name: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[WorkspaceBinding]
 
         Get securable workspace bindings.
-
-Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the
-securable.
-
-:param securable_type: :class:`GetBindingsSecurableType`
-  The type of the securable to bind to a workspace.
-:param securable_name: str
-  The name of the securable.
-:param max_results: int (optional)
-  Maximum number of workspace bindings to return. - When set to 0, the page length is set to a server
-  configured value (recommended); - When set to a value greater than 0, the page length is the minimum
-  of this value and a server configured value; - When set to a value less than 0, an invalid parameter
-  error is returned; - If not set, all the workspace bindings are returned (not recommended).
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-
-:returns: Iterator over :class:`WorkspaceBinding`
-
+        
+        Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the
+        securable.
+        
+        :param securable_type: :class:`GetBindingsSecurableType`
+          The type of the securable to bind to a workspace.
+        :param securable_name: str
+          The name of the securable.
+        :param max_results: int (optional)
+          Maximum number of workspace bindings to return. - When set to 0, the page length is set to a server
+          configured value (recommended); - When set to a value greater than 0, the page length is the minimum
+          of this value and a server configured value; - When set to a value less than 0, an invalid parameter
+          error is returned; - If not set, all the workspace bindings are returned (not recommended).
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`WorkspaceBinding`
+        
 
     .. py:method:: update(name: str [, assign_workspaces: Optional[List[int]], unassign_workspaces: Optional[List[int]]]) -> CurrentWorkspaceBindings
 
@@ -96,34 +96,35 @@ securable.
             w.catalogs.delete(name=created.name, force=True)
 
         Update catalog workspace bindings.
-
-Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner of the
-catalog.
-
-:param name: str
-  The name of the catalog.
-:param assign_workspaces: List[int] (optional)
-  A list of workspace IDs.
-:param unassign_workspaces: List[int] (optional)
-  A list of workspace IDs.
-
-:returns: :class:`CurrentWorkspaceBindings`
-
+        
+        Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner of the
+        catalog.
+        
+        :param name: str
+          The name of the catalog.
+        :param assign_workspaces: List[int] (optional)
+          A list of workspace IDs.
+        :param unassign_workspaces: List[int] (optional)
+          A list of workspace IDs.
+        
+        :returns: :class:`CurrentWorkspaceBindings`
+        
 
     .. py:method:: update_bindings(securable_type: UpdateBindingsSecurableType, securable_name: str [, add: Optional[List[WorkspaceBinding]], remove: Optional[List[WorkspaceBinding]]]) -> WorkspaceBindingsResponse
 
         Update securable workspace bindings.
-
-Updates workspace bindings of the securable. The caller must be a metastore admin or an owner of the
-securable.
-
-:param securable_type: :class:`UpdateBindingsSecurableType`
-  The type of the securable to bind to a workspace.
-:param securable_name: str
-  The name of the securable.
-:param add: List[:class:`WorkspaceBinding`] (optional)
-  List of workspace bindings
-:param remove: List[:class:`WorkspaceBinding`] (optional)
-  List of workspace bindings
-
-:returns: :class:`WorkspaceBindingsResponse`
+        
+        Updates workspace bindings of the securable. The caller must be a metastore admin or an owner of the
+        securable.
+        
+        :param securable_type: :class:`UpdateBindingsSecurableType`
+          The type of the securable to bind to a workspace.
+        :param securable_name: str
+          The name of the securable.
+        :param add: List[:class:`WorkspaceBinding`] (optional)
+          List of workspace bindings
+        :param remove: List[:class:`WorkspaceBinding`] (optional)
+          List of workspace bindings
+        
+        :returns: :class:`WorkspaceBindingsResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/cleanrooms/clean_room_assets.rst b/docs/workspace/cleanrooms/clean_room_assets.rst
index e58981eb9..fe282543a 100644
--- a/docs/workspace/cleanrooms/clean_room_assets.rst
+++ b/docs/workspace/cleanrooms/clean_room_assets.rst
@@ -5,89 +5,90 @@
 .. py:class:: CleanRoomAssetsAPI
 
     Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the
-clean room.
+    clean room.
 
     .. py:method:: create(clean_room_name: str [, asset: Optional[CleanRoomAsset]]) -> CleanRoomAsset
 
         Create an asset.
-
-Create a clean room asset —share an asset like a notebook or table into the clean room. For each UC
-asset that is added through this method, the clean room owner must also have enough privilege on the
-asset to consume it. The privilege must be maintained indefinitely for the clean room to be able to
-access the asset. Typically, you should use a group as the clean room owner.
-
-:param clean_room_name: str
-  Name of the clean room.
-:param asset: :class:`CleanRoomAsset` (optional)
-  Metadata of the clean room asset
-
-:returns: :class:`CleanRoomAsset`
-
+        
+        Create a clean room asset —share an asset like a notebook or table into the clean room. For each UC
+        asset that is added through this method, the clean room owner must also have enough privilege on the
+        asset to consume it. The privilege must be maintained indefinitely for the clean room to be able to
+        access the asset. Typically, you should use a group as the clean room owner.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset: :class:`CleanRoomAsset` (optional)
+          Metadata of the clean room asset
+        
+        :returns: :class:`CleanRoomAsset`
+        
 
     .. py:method:: delete(clean_room_name: str, asset_type: CleanRoomAssetAssetType, asset_full_name: str)
 
         Delete an asset.
-
-Delete a clean room asset - unshare/remove the asset from the clean room
-
-:param clean_room_name: str
-  Name of the clean room.
-:param asset_type: :class:`CleanRoomAssetAssetType`
-  The type of the asset.
-:param asset_full_name: str
-  The fully qualified name of the asset, it is same as the name field in CleanRoomAsset.
-
-
-
+        
+        Delete a clean room asset - unshare/remove the asset from the clean room
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset_type: :class:`CleanRoomAssetAssetType`
+          The type of the asset.
+        :param asset_full_name: str
+          The fully qualified name of the asset, it is same as the name field in CleanRoomAsset.
+        
+        
+        
 
     .. py:method:: get(clean_room_name: str, asset_type: CleanRoomAssetAssetType, asset_full_name: str) -> CleanRoomAsset
 
         Get an asset.
-
-Get the details of a clean room asset by its type and full name.
-
-:param clean_room_name: str
-  Name of the clean room.
-:param asset_type: :class:`CleanRoomAssetAssetType`
-  The type of the asset.
-:param asset_full_name: str
-  The fully qualified name of the asset, it is same as the name field in CleanRoomAsset.
-
-:returns: :class:`CleanRoomAsset`
-
+        
+        Get the details of a clean room asset by its type and full name.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset_type: :class:`CleanRoomAssetAssetType`
+          The type of the asset.
+        :param asset_full_name: str
+          The fully qualified name of the asset, it is same as the name field in CleanRoomAsset.
+        
+        :returns: :class:`CleanRoomAsset`
+        
 
     .. py:method:: list(clean_room_name: str [, page_token: Optional[str]]) -> Iterator[CleanRoomAsset]
 
         List assets.
-
-:param clean_room_name: str
-  Name of the clean room.
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-
-:returns: Iterator over :class:`CleanRoomAsset`
-
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`CleanRoomAsset`
+        
 
     .. py:method:: update(clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str [, asset: Optional[CleanRoomAsset]]) -> CleanRoomAsset
 
         Update an asset.
-
-Update a clean room asset. For example, updating the content of a notebook; changing the shared
-partitions of a table; etc.
-
-:param clean_room_name: str
-  Name of the clean room.
-:param asset_type: :class:`CleanRoomAssetAssetType`
-  The type of the asset.
-:param name: str
-  A fully qualified name that uniquely identifies the asset within the clean room. This is also the
-  name displayed in the clean room UI.
-  
-  For UC securable assets (tables, volumes, etc.), the format is
-  *shared_catalog*.*shared_schema*.*asset_name*
-  
-  For notebooks, the name is the notebook file name.
-:param asset: :class:`CleanRoomAsset` (optional)
-  Metadata of the clean room asset
-
-:returns: :class:`CleanRoomAsset`
+        
+        Update a clean room asset. For example, updating the content of a notebook; changing the shared
+        partitions of a table; etc.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset_type: :class:`CleanRoomAssetAssetType`
+          The type of the asset.
+        :param name: str
+          A fully qualified name that uniquely identifies the asset within the clean room. This is also the
+          name displayed in the clean room UI.
+          
+          For UC securable assets (tables, volumes, etc.), the format is
+          *shared_catalog*.*shared_schema*.*asset_name*
+          
+          For notebooks, the name is the notebook file name.
+        :param asset: :class:`CleanRoomAsset` (optional)
+          Metadata of the clean room asset
+        
+        :returns: :class:`CleanRoomAsset`
+        
\ No newline at end of file
diff --git a/docs/workspace/cleanrooms/clean_room_task_runs.rst b/docs/workspace/cleanrooms/clean_room_task_runs.rst
index f8c421231..dcf59037c 100644
--- a/docs/workspace/cleanrooms/clean_room_task_runs.rst
+++ b/docs/workspace/cleanrooms/clean_room_task_runs.rst
@@ -9,16 +9,17 @@
     .. py:method:: list(clean_room_name: str [, notebook_name: Optional[str], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[CleanRoomNotebookTaskRun]
 
         List notebook task runs.
-
-List all the historical notebook task runs in a clean room.
-
-:param clean_room_name: str
-  Name of the clean room.
-:param notebook_name: str (optional)
-  Notebook name
-:param page_size: int (optional)
-  The maximum number of task runs to return
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-
-:returns: Iterator over :class:`CleanRoomNotebookTaskRun`
+        
+        List all the historical notebook task runs in a clean room.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param notebook_name: str (optional)
+          Notebook name
+        :param page_size: int (optional)
+          The maximum number of task runs to return
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`CleanRoomNotebookTaskRun`
+        
\ No newline at end of file
diff --git a/docs/workspace/cleanrooms/clean_rooms.rst b/docs/workspace/cleanrooms/clean_rooms.rst
index 4a56740d9..8ef5d8827 100644
--- a/docs/workspace/cleanrooms/clean_rooms.rst
+++ b/docs/workspace/cleanrooms/clean_rooms.rst
@@ -5,90 +5,91 @@
 .. py:class:: CleanRoomsAPI
 
     A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting
-environment where multiple parties can work together on sensitive enterprise data without direct access to
-each other’s data.
+    environment where multiple parties can work together on sensitive enterprise data without direct access to
+    each other’s data.
 
     .. py:method:: create( [, clean_room: Optional[CleanRoom]]) -> CleanRoom
 
         Create a clean room.
-
-Create a new clean room with the specified collaborators. This method is asynchronous; the returned
-name field inside the clean_room field can be used to poll the clean room status, using the
-:method:cleanrooms/get method. When this method returns, the clean room will be in a PROVISIONING
-state, with only name, owner, comment, created_at and status populated. The clean room will be usable
-once it enters an ACTIVE state.
-
-The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore.
-
-:param clean_room: :class:`CleanRoom` (optional)
-
-:returns: :class:`CleanRoom`
-
+        
+        Create a new clean room with the specified collaborators. This method is asynchronous; the returned
+        name field inside the clean_room field can be used to poll the clean room status, using the
+        :method:cleanrooms/get method. When this method returns, the clean room will be in a PROVISIONING
+        state, with only name, owner, comment, created_at and status populated. The clean room will be usable
+        once it enters an ACTIVE state.
+        
+        The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore.
+        
+        :param clean_room: :class:`CleanRoom` (optional)
+        
+        :returns: :class:`CleanRoom`
+        
 
     .. py:method:: create_output_catalog(clean_room_name: str [, output_catalog: Optional[CleanRoomOutputCatalog]]) -> CreateCleanRoomOutputCatalogResponse
 
         Create an output catalog.
-
-Create the output catalog of the clean room.
-
-:param clean_room_name: str
-  Name of the clean room.
-:param output_catalog: :class:`CleanRoomOutputCatalog` (optional)
-
-:returns: :class:`CreateCleanRoomOutputCatalogResponse`
-
+        
+        Create the output catalog of the clean room.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param output_catalog: :class:`CleanRoomOutputCatalog` (optional)
+        
+        :returns: :class:`CreateCleanRoomOutputCatalogResponse`
+        
 
     .. py:method:: delete(name: str)
 
         Delete a clean room.
-
-Delete a clean room. After deletion, the clean room will be removed from the metastore. If the other
-collaborators have not deleted the clean room, they will still have the clean room in their metastore,
-but it will be in a DELETED state and no operations other than deletion can be performed on it.
-
-:param name: str
-  Name of the clean room.
-
-
-
+        
+        Delete a clean room. After deletion, the clean room will be removed from the metastore. If the other
+        collaborators have not deleted the clean room, they will still have the clean room in their metastore,
+        but it will be in a DELETED state and no operations other than deletion can be performed on it.
+        
+        :param name: str
+          Name of the clean room.
+        
+        
+        
 
     .. py:method:: get(name: str) -> CleanRoom
 
         Get a clean room.
-
-Get the details of a clean room given its name.
-
-:param name: str
-
-:returns: :class:`CleanRoom`
-
+        
+        Get the details of a clean room given its name.
+        
+        :param name: str
+        
+        :returns: :class:`CleanRoom`
+        
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[CleanRoom]
 
         List clean rooms.
-
-Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are
-returned.
-
-:param page_size: int (optional)
-  Maximum number of clean rooms to return (i.e., the page length). Defaults to 100.
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-
-:returns: Iterator over :class:`CleanRoom`
-
+        
+        Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are
+        returned.
+        
+        :param page_size: int (optional)
+          Maximum number of clean rooms to return (i.e., the page length). Defaults to 100.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`CleanRoom`
+        
 
     .. py:method:: update(name: str [, clean_room: Optional[CleanRoom]]) -> CleanRoom
 
         Update a clean room.
-
-Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM**
-privilege, or be metastore admin.
-
-When the caller is a metastore admin, only the __owner__ field can be updated.
-
-:param name: str
-  Name of the clean room.
-:param clean_room: :class:`CleanRoom` (optional)
-
-:returns: :class:`CleanRoom`
+        
+        Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM**
+        privilege, or be metastore admin.
+        
+        When the caller is a metastore admin, only the __owner__ field can be updated.
+        
+        :param name: str
+          Name of the clean room.
+        :param clean_room: :class:`CleanRoom` (optional)
+        
+        :returns: :class:`CleanRoom`
+        
\ No newline at end of file
diff --git a/docs/workspace/compute/cluster_policies.rst b/docs/workspace/compute/cluster_policies.rst
index 0ea0dc1fa..65066964c 100644
--- a/docs/workspace/compute/cluster_policies.rst
+++ b/docs/workspace/compute/cluster_policies.rst
@@ -5,22 +5,22 @@
 .. py:class:: ClusterPoliciesAPI
 
     You can use cluster policies to control users' ability to configure clusters based on a set of rules.
-These rules specify which attributes or attribute values can be used during cluster creation. Cluster
-policies have ACLs that limit their use to specific users and groups.
-
-With cluster policies, you can: - Auto-install cluster libraries on the next restart by listing them in
-the policy's "libraries" field (Public Preview). - Limit users to creating clusters with the prescribed
-settings. - Simplify the user interface, enabling more users to create clusters, by fixing and hiding some
-fields. - Manage costs by setting limits on attributes that impact the hourly rate.
-
-Cluster policy permissions limit which policies a user can select in the Policy drop-down when the user
-creates a cluster: - A user who has unrestricted cluster create permission can select the Unrestricted
-policy and create fully-configurable clusters. - A user who has both unrestricted cluster create
-permission and access to cluster policies can select the Unrestricted policy and policies they have access
-to. - A user that has access to only cluster policies, can select the policies they have access to.
-
-If no policies exist in the workspace, the Policy drop-down doesn't appear. Only admin users can create,
-edit, and delete policies. Admin users also have access to all policies.
+    These rules specify which attributes or attribute values can be used during cluster creation. Cluster
+    policies have ACLs that limit their use to specific users and groups.
+    
+    With cluster policies, you can: - Auto-install cluster libraries on the next restart by listing them in
+    the policy's "libraries" field (Public Preview). - Limit users to creating clusters with the prescribed
+    settings. - Simplify the user interface, enabling more users to create clusters, by fixing and hiding some
+    fields. - Manage costs by setting limits on attributes that impact the hourly rate.
+    
+    Cluster policy permissions limit which policies a user can select in the Policy drop-down when the user
+    creates a cluster: - A user who has unrestricted cluster create permission can select the Unrestricted
+    policy and create fully-configurable clusters. - A user who has both unrestricted cluster create
+    permission and access to cluster policies can select the Unrestricted policy and policies they have access
+    to. - A user that has access to only cluster policies, can select the policies they have access to.
+    
+    If no policies exist in the workspace, the Policy drop-down doesn't appear. Only admin users can create,
+    edit, and delete policies. Admin users also have access to all policies.
 
     .. py:method:: create( [, definition: Optional[str], description: Optional[str], libraries: Optional[List[Library]], max_clusters_per_user: Optional[int], name: Optional[str], policy_family_definition_overrides: Optional[str], policy_family_id: Optional[str]]) -> CreatePolicyResponse
 
@@ -48,53 +48,53 @@ edit, and delete policies. Admin users also have access to all policies.
             w.cluster_policies.delete(policy_id=created.policy_id)
 
         Create a new policy.
-
-Creates a new policy with prescribed settings.
-
-:param definition: str (optional)
-  Policy definition document expressed in [Databricks Cluster Policy Definition Language].
-  
-  [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
-:param description: str (optional)
-  Additional human-readable description of the cluster policy.
-:param libraries: List[:class:`Library`] (optional)
-  A list of libraries to be installed on the next cluster restart that uses this policy. The maximum
-  number of libraries is 500.
-:param max_clusters_per_user: int (optional)
-  Max number of clusters per user that can be active using this policy. If not present, there is no
-  max limit.
-:param name: str (optional)
-  Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100
-  characters.
-:param policy_family_definition_overrides: str (optional)
-  Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON
-  document must be passed as a string and cannot be embedded in the requests.
-  
-  You can use this to customize the policy definition inherited from the policy family. Policy rules
-  specified here are merged into the inherited policy definition.
-  
-  [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
-:param policy_family_id: str (optional)
-  ID of the policy family. The cluster policy's policy definition inherits the policy family's policy
-  definition.
-  
-  Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the
-  policy definition.
-
-:returns: :class:`CreatePolicyResponse`
-
+        
+        Creates a new policy with prescribed settings.
+        
+        :param definition: str (optional)
+          Policy definition document expressed in [Databricks Cluster Policy Definition Language].
+          
+          [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
+        :param description: str (optional)
+          Additional human-readable description of the cluster policy.
+        :param libraries: List[:class:`Library`] (optional)
+          A list of libraries to be installed on the next cluster restart that uses this policy. The maximum
+          number of libraries is 500.
+        :param max_clusters_per_user: int (optional)
+          Max number of clusters per user that can be active using this policy. If not present, there is no
+          max limit.
+        :param name: str (optional)
+          Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100
+          characters.
+        :param policy_family_definition_overrides: str (optional)
+          Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON
+          document must be passed as a string and cannot be embedded in the requests.
+          
+          You can use this to customize the policy definition inherited from the policy family. Policy rules
+          specified here are merged into the inherited policy definition.
+          
+          [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
+        :param policy_family_id: str (optional)
+          ID of the policy family. The cluster policy's policy definition inherits the policy family's policy
+          definition.
+          
+          Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the
+          policy definition.
+        
+        :returns: :class:`CreatePolicyResponse`
+        
 
     .. py:method:: delete(policy_id: str)
 
         Delete a cluster policy.
-
-Delete a policy for a cluster. Clusters governed by this policy can still run, but cannot be edited.
-
-:param policy_id: str
-  The ID of the policy to delete.
-
-
-
+        
+        Delete a policy for a cluster. Clusters governed by this policy can still run, but cannot be edited.
+        
+        :param policy_id: str
+          The ID of the policy to delete.
+        
+        
+        
 
     .. py:method:: edit(policy_id: str [, definition: Optional[str], description: Optional[str], libraries: Optional[List[Library]], max_clusters_per_user: Optional[int], name: Optional[str], policy_family_definition_overrides: Optional[str], policy_family_id: Optional[str]])
 
@@ -134,44 +134,44 @@ Delete a policy for a cluster. Clusters governed by this policy can still run, b
             w.cluster_policies.delete(policy_id=created.policy_id)
 
         Update a cluster policy.
-
-Update an existing policy for cluster. This operation may make some clusters governed by the previous
-policy invalid.
-
-:param policy_id: str
-  The ID of the policy to update.
-:param definition: str (optional)
-  Policy definition document expressed in [Databricks Cluster Policy Definition Language].
-  
-  [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
-:param description: str (optional)
-  Additional human-readable description of the cluster policy.
-:param libraries: List[:class:`Library`] (optional)
-  A list of libraries to be installed on the next cluster restart that uses this policy. The maximum
-  number of libraries is 500.
-:param max_clusters_per_user: int (optional)
-  Max number of clusters per user that can be active using this policy. If not present, there is no
-  max limit.
-:param name: str (optional)
-  Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100
-  characters.
-:param policy_family_definition_overrides: str (optional)
-  Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON
-  document must be passed as a string and cannot be embedded in the requests.
-  
-  You can use this to customize the policy definition inherited from the policy family. Policy rules
-  specified here are merged into the inherited policy definition.
-  
-  [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
-:param policy_family_id: str (optional)
-  ID of the policy family. The cluster policy's policy definition inherits the policy family's policy
-  definition.
-  
-  Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the
-  policy definition.
-
-
-
+        
+        Update an existing policy for cluster. This operation may make some clusters governed by the previous
+        policy invalid.
+        
+        :param policy_id: str
+          The ID of the policy to update.
+        :param definition: str (optional)
+          Policy definition document expressed in [Databricks Cluster Policy Definition Language].
+          
+          [Databricks Cluster Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
+        :param description: str (optional)
+          Additional human-readable description of the cluster policy.
+        :param libraries: List[:class:`Library`] (optional)
+          A list of libraries to be installed on the next cluster restart that uses this policy. The maximum
+          number of libraries is 500.
+        :param max_clusters_per_user: int (optional)
+          Max number of clusters per user that can be active using this policy. If not present, there is no
+          max limit.
+        :param name: str (optional)
+          Cluster Policy name requested by the user. This has to be unique. Length must be between 1 and 100
+          characters.
+        :param policy_family_definition_overrides: str (optional)
+          Policy definition JSON document expressed in [Databricks Policy Definition Language]. The JSON
+          document must be passed as a string and cannot be embedded in the requests.
+          
+          You can use this to customize the policy definition inherited from the policy family. Policy rules
+          specified here are merged into the inherited policy definition.
+          
+          [Databricks Policy Definition Language]: https://docs.databricks.com/administration-guide/clusters/policy-definition.html
+        :param policy_family_id: str (optional)
+          ID of the policy family. The cluster policy's policy definition inherits the policy family's policy
+          definition.
+          
+          Cannot be used with `definition`. Use `policy_family_definition_overrides` instead to customize the
+          policy definition.
+        
+        
+        
 
     .. py:method:: get(policy_id: str) -> Policy
 
@@ -201,39 +201,39 @@ policy invalid.
             w.cluster_policies.delete(policy_id=created.policy_id)
 
         Get a cluster policy.
-
-Get a cluster policy entity. Creation and editing is available to admins only.
-
-:param policy_id: str
-  Canonical unique identifier for the Cluster Policy.
-
-:returns: :class:`Policy`
-
+        
+        Get a cluster policy entity. Creation and editing is available to admins only.
+        
+        :param policy_id: str
+          Canonical unique identifier for the Cluster Policy.
+        
+        :returns: :class:`Policy`
+        
 
     .. py:method:: get_permission_levels(cluster_policy_id: str) -> GetClusterPolicyPermissionLevelsResponse
 
         Get cluster policy permission levels.
-
-Gets the permission levels that a user can have on an object.
-
-:param cluster_policy_id: str
-  The cluster policy for which to get or manage permissions.
-
-:returns: :class:`GetClusterPolicyPermissionLevelsResponse`
-
+        
+        Gets the permission levels that a user can have on an object.
+        
+        :param cluster_policy_id: str
+          The cluster policy for which to get or manage permissions.
+        
+        :returns: :class:`GetClusterPolicyPermissionLevelsResponse`
+        
 
     .. py:method:: get_permissions(cluster_policy_id: str) -> ClusterPolicyPermissions
 
         Get cluster policy permissions.
-
-Gets the permissions of a cluster policy. Cluster policies can inherit permissions from their root
-object.
-
-:param cluster_policy_id: str
-  The cluster policy for which to get or manage permissions.
-
-:returns: :class:`ClusterPolicyPermissions`
-
+        
+        Gets the permissions of a cluster policy. Cluster policies can inherit permissions from their root
+        object.
+        
+        :param cluster_policy_id: str
+          The cluster policy for which to get or manage permissions.
+        
+        :returns: :class:`ClusterPolicyPermissions`
+        
 
     .. py:method:: list( [, sort_column: Optional[ListSortColumn], sort_order: Optional[ListSortOrder]]) -> Iterator[Policy]
 
@@ -250,42 +250,43 @@ object.
             all = w.cluster_policies.list(compute.ListClusterPoliciesRequest())
 
         List cluster policies.
-
-Returns a list of policies accessible by the requesting user.
-
-:param sort_column: :class:`ListSortColumn` (optional)
-  The cluster policy attribute to sort by. * `POLICY_CREATION_TIME` - Sort result list by policy
-  creation time. * `POLICY_NAME` - Sort result list by policy name.
-:param sort_order: :class:`ListSortOrder` (optional)
-  The order in which the policies get listed. * `DESC` - Sort result list in descending order. * `ASC`
-  - Sort result list in ascending order.
-
-:returns: Iterator over :class:`Policy`
-
+        
+        Returns a list of policies accessible by the requesting user.
+        
+        :param sort_column: :class:`ListSortColumn` (optional)
+          The cluster policy attribute to sort by. * `POLICY_CREATION_TIME` - Sort result list by policy
+          creation time. * `POLICY_NAME` - Sort result list by policy name.
+        :param sort_order: :class:`ListSortOrder` (optional)
+          The order in which the policies get listed. * `DESC` - Sort result list in descending order. * `ASC`
+          - Sort result list in ascending order.
+        
+        :returns: Iterator over :class:`Policy`
+        
 
     .. py:method:: set_permissions(cluster_policy_id: str [, access_control_list: Optional[List[ClusterPolicyAccessControlRequest]]]) -> ClusterPolicyPermissions
 
         Set cluster policy permissions.
-
-Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-permissions if none are specified. Objects can inherit permissions from their root object.
-
-:param cluster_policy_id: str
-  The cluster policy for which to get or manage permissions.
-:param access_control_list: List[:class:`ClusterPolicyAccessControlRequest`] (optional)
-
-:returns: :class:`ClusterPolicyPermissions`
-
+        
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
+        
+        :param cluster_policy_id: str
+          The cluster policy for which to get or manage permissions.
+        :param access_control_list: List[:class:`ClusterPolicyAccessControlRequest`] (optional)
+        
+        :returns: :class:`ClusterPolicyPermissions`
+        
 
     .. py:method:: update_permissions(cluster_policy_id: str [, access_control_list: Optional[List[ClusterPolicyAccessControlRequest]]]) -> ClusterPolicyPermissions
 
         Update cluster policy permissions.
-
-Updates the permissions on a cluster policy. Cluster policies can inherit permissions from their root
-object.
-
-:param cluster_policy_id: str
-  The cluster policy for which to get or manage permissions.
-:param access_control_list: List[:class:`ClusterPolicyAccessControlRequest`] (optional)
-
-:returns: :class:`ClusterPolicyPermissions`
+        
+        Updates the permissions on a cluster policy. Cluster policies can inherit permissions from their root
+        object.
+        
+        :param cluster_policy_id: str
+          The cluster policy for which to get or manage permissions.
+        :param access_control_list: List[:class:`ClusterPolicyAccessControlRequest`] (optional)
+        
+        :returns: :class:`ClusterPolicyPermissions`
+        
\ No newline at end of file
diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst
index aef271caa..4e97857eb 100644
--- a/docs/workspace/compute/clusters.rst
+++ b/docs/workspace/compute/clusters.rst
@@ -5,25 +5,25 @@
 .. py:class:: ClustersExt
 
     The Clusters API allows you to create, start, edit, list, terminate, and delete clusters.
-
-Databricks maps cluster node instance types to compute units known as DBUs. See the instance type pricing
-page for a list of the supported instance types and their corresponding DBUs.
-
-A Databricks cluster is a set of computation resources and configurations on which you run data
-engineering, data science, and data analytics workloads, such as production ETL pipelines, streaming
-analytics, ad-hoc analytics, and machine learning.
-
-You run these workloads as a set of commands in a notebook or as an automated job. Databricks makes a
-distinction between all-purpose clusters and job clusters. You use all-purpose clusters to analyze data
-collaboratively using interactive notebooks. You use job clusters to run fast and robust automated jobs.
-
-You can create an all-purpose cluster using the UI, CLI, or REST API. You can manually terminate and
-restart an all-purpose cluster. Multiple users can share such clusters to do collaborative interactive
-analysis.
-
-IMPORTANT: Databricks retains cluster configuration information for terminated clusters for 30 days. To
-keep an all-purpose cluster configuration even after it has been terminated for more than 30 days, an
-administrator can pin a cluster to the cluster list.
+    
+    Databricks maps cluster node instance types to compute units known as DBUs. See the instance type pricing
+    page for a list of the supported instance types and their corresponding DBUs.
+    
+    A Databricks cluster is a set of computation resources and configurations on which you run data
+    engineering, data science, and data analytics workloads, such as production ETL pipelines, streaming
+    analytics, ad-hoc analytics, and machine learning.
+    
+    You run these workloads as a set of commands in a notebook or as an automated job. Databricks makes a
+    distinction between all-purpose clusters and job clusters. You use all-purpose clusters to analyze data
+    collaboratively using interactive notebooks. You use job clusters to run fast and robust automated jobs.
+    
+    You can create an all-purpose cluster using the UI, CLI, or REST API. You can manually terminate and
+    restart an all-purpose cluster. Multiple users can share such clusters to do collaborative interactive
+    analysis.
+    
+    IMPORTANT: Databricks retains cluster configuration information for terminated clusters for 30 days. To
+    keep an all-purpose cluster configuration even after it has been terminated for more than 30 days, an
+    administrator can pin a cluster to the cluster list.
 
     .. py:method:: change_owner(cluster_id: str, owner_username: str)
 
@@ -58,18 +58,18 @@ administrator can pin a cluster to the cluster list.
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Change cluster owner.
-
-Change the owner of the cluster. You must be an admin and the cluster must be terminated to perform
-this operation. The service principal application ID can be supplied as an argument to
-`owner_username`.
-
-:param cluster_id: str
-  
-:param owner_username: str
-  New owner of the cluster_id after this RPC.
-
-
-
+        
+        Change the owner of the cluster. You must be an admin and the cluster must be terminated to perform
+        this operation. The service principal application ID can be supplied as an argument to
+        `owner_username`.
+        
+        :param cluster_id: str
+          
+        :param owner_username: str
+          New owner of the cluster_id after this RPC.
+        
+        
+        
 
     .. py:method:: create(spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], clone_from: Optional[CloneCluster], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType]]) -> Wait[ClusterDetails]
 
@@ -99,171 +99,171 @@ this operation. The service principal application ID can be supplied as an argum
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Create new cluster.
-
-Creates a new Spark cluster. This method will acquire new instances from the cloud provider if
-necessary. Note: Databricks may not be able to acquire some of the requested nodes, due to cloud
-provider limitations (account limits, spot price, etc.) or transient network issues.
-
-If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will succeed.
-Otherwise the cluster will terminate with an informative error message.
-
-Rather than authoring the cluster's JSON definition from scratch, Databricks recommends filling out
-the [create compute UI] and then copying the generated JSON definition from the UI.
-
-[create compute UI]: https://docs.databricks.com/compute/configure.html
-
-:param spark_version: str
-  The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be
-  retrieved by using the :method:clusters/sparkVersions API call.
-:param apply_policy_default_values: bool (optional)
-  When set to true, fixed and default values from the policy will be used for fields that are omitted.
-  When set to false, only fixed values from the policy will be applied.
-:param autoscale: :class:`AutoScale` (optional)
-  Parameters needed in order to automatically scale clusters up and down based on load. Note:
-  autoscaling works best with DB runtime versions 3.0 or later.
-:param autotermination_minutes: int (optional)
-  Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this
-  cluster will not be automatically terminated. If specified, the threshold must be between 10 and
-  10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.
-:param aws_attributes: :class:`AwsAttributes` (optional)
-  Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation,
-  a set of default values will be used.
-:param azure_attributes: :class:`AzureAttributes` (optional)
-  Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a
-  set of default values will be used.
-:param clone_from: :class:`CloneCluster` (optional)
-  When specified, this clones libraries from a source cluster during the creation of a new cluster.
-:param cluster_log_conf: :class:`ClusterLogConf` (optional)
-  The configuration for delivering spark logs to a long-term storage destination. Three kinds of
-  destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
-  specified for one cluster. If the conf is given, the logs will be delivered to the destination every
-  `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination
-  of executor logs is `$destination/$clusterId/executor`.
-:param cluster_name: str (optional)
-  Cluster name requested by the user. This doesn't have to be unique. If not specified at creation,
-  the cluster name will be an empty string.
-:param custom_tags: Dict[str,str] (optional)
-  Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS
-  instances and EBS volumes) with these tags in addition to `default_tags`. Notes:
-  
-  - Currently, Databricks allows at most 45 custom tags
-  
-  - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags
-:param data_security_mode: :class:`DataSecurityMode` (optional)
-  Data security mode decides what data governance model to use when accessing data from a cluster.
-  
-  The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
-  choose the most appropriate access mode depending on your compute configuration. *
-  `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias
-  for `SINGLE_USER`.
-  
-  The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple
-  users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`:
-  A secure cluster that can only be exclusively used by a single user specified in `single_user_name`.
-  Most programming languages, cluster features and data governance features are available in this
-  mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are
-  fully isolated so that they cannot see each other's data and credentials. Most data governance
-  features are supported in this mode. But programming languages and cluster features might be
-  limited.
-  
-  The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
-  future Databricks Runtime versions:
-  
-  * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
-  `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency
-  clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on
-  standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC
-  nor passthrough enabled.
-:param docker_image: :class:`DockerImage` (optional)
-:param driver_instance_pool_id: str (optional)
-  The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses
-  the instance pool with id (instance_pool_id) if the driver pool is not assigned.
-:param driver_node_type_id: str (optional)
-  The node type of the Spark driver. Note that this field is optional; if unset, the driver node type
-  will be set as the same value as `node_type_id` defined above.
-:param enable_elastic_disk: bool (optional)
-  Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space
-  when its Spark workers are running low on disk space. This feature requires specific AWS permissions
-  to function correctly - refer to the User Guide for more details.
-:param enable_local_disk_encryption: bool (optional)
-  Whether to enable LUKS on cluster VMs' local disks
-:param gcp_attributes: :class:`GcpAttributes` (optional)
-  Attributes related to clusters running on Google Cloud Platform. If not specified at cluster
-  creation, a set of default values will be used.
-:param init_scripts: List[:class:`InitScriptInfo`] (optional)
-  The configuration for storing init scripts. Any number of destinations can be specified. The scripts
-  are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script
-  logs are sent to `//init_scripts`.
-:param instance_pool_id: str (optional)
-  The optional ID of the instance pool to which the cluster belongs.
-:param is_single_node: bool (optional)
-  This field can only be used with `kind`.
-  
-  When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`,
-  and `num_workers`
-:param kind: :class:`Kind` (optional)
-  The kind of compute described by this compute specification.
-  
-  Depending on `kind`, different validations and default values will be applied.
-  
-  The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
-:param node_type_id: str (optional)
-  This field encodes, through a single value, the resources available to each of the Spark nodes in
-  this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
-  intensive workloads. A list of available node types can be retrieved by using the
-  :method:clusters/listNodeTypes API call.
-:param num_workers: int (optional)
-  Number of worker nodes that this cluster should have. A cluster has one Spark Driver and
-  `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.
-  
-  Note: When reading the properties of a cluster, this field reflects the desired number of workers
-  rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10
-  workers, this field will immediately be updated to reflect the target size of 10 workers, whereas
-  the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are
-  provisioned.
-:param policy_id: str (optional)
-  The ID of the cluster policy used to create the cluster if applicable.
-:param runtime_engine: :class:`RuntimeEngine` (optional)
-  Determines the cluster's runtime engine, either standard or Photon.
-  
-  This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
-  `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
-  
-  If left unspecified, the runtime engine defaults to standard unless the spark_version contains
-  -photon-, in which case Photon will be used.
-:param single_user_name: str (optional)
-  Single user name if data_security_mode is `SINGLE_USER`
-:param spark_conf: Dict[str,str] (optional)
-  An object containing a set of optional, user-specified Spark configuration key-value pairs. Users
-  can also pass in a string of extra JVM options to the driver and the executors via
-  `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.
-:param spark_env_vars: Dict[str,str] (optional)
-  An object containing a set of optional, user-specified environment variable key-value pairs. Please
-  note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while
-  launching the driver and workers.
-  
-  In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them to
-  `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default databricks
-  managed environmental variables are included as well.
-  
-  Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS":
-  "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS
-  -Dspark.shuffle.service.enabled=true"}`
-:param ssh_public_keys: List[str] (optional)
-  SSH public key contents that will be added to each Spark node in this cluster. The corresponding
-  private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be
-  specified.
-:param use_ml_runtime: bool (optional)
-  This field can only be used with `kind`.
-  
-  `effective_spark_version` is determined by `spark_version` (DBR release), this field
-  `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
-:param workload_type: :class:`WorkloadType` (optional)
-
-:returns:
-  Long-running operation waiter for :class:`ClusterDetails`.
-  See :method:wait_get_cluster_running for more details.
-
+        
+        Creates a new Spark cluster. This method will acquire new instances from the cloud provider if
+        necessary. Note: Databricks may not be able to acquire some of the requested nodes, due to cloud
+        provider limitations (account limits, spot price, etc.) or transient network issues.
+        
+        If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will succeed.
+        Otherwise the cluster will terminate with an informative error message.
+        
+        Rather than authoring the cluster's JSON definition from scratch, Databricks recommends filling out
+        the [create compute UI] and then copying the generated JSON definition from the UI.
+        
+        [create compute UI]: https://docs.databricks.com/compute/configure.html
+        
+        :param spark_version: str
+          The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be
+          retrieved by using the :method:clusters/sparkVersions API call.
+        :param apply_policy_default_values: bool (optional)
+          When set to true, fixed and default values from the policy will be used for fields that are omitted.
+          When set to false, only fixed values from the policy will be applied.
+        :param autoscale: :class:`AutoScale` (optional)
+          Parameters needed in order to automatically scale clusters up and down based on load. Note:
+          autoscaling works best with DB runtime versions 3.0 or later.
+        :param autotermination_minutes: int (optional)
+          Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this
+          cluster will not be automatically terminated. If specified, the threshold must be between 10 and
+          10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.
+        :param aws_attributes: :class:`AwsAttributes` (optional)
+          Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation,
+          a set of default values will be used.
+        :param azure_attributes: :class:`AzureAttributes` (optional)
+          Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a
+          set of default values will be used.
+        :param clone_from: :class:`CloneCluster` (optional)
+          When specified, this clones libraries from a source cluster during the creation of a new cluster.
+        :param cluster_log_conf: :class:`ClusterLogConf` (optional)
+          The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+          destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+          specified for one cluster. If the conf is given, the logs will be delivered to the destination every
+          `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination
+          of executor logs is `$destination/$clusterId/executor`.
+        :param cluster_name: str (optional)
+          Cluster name requested by the user. This doesn't have to be unique. If not specified at creation,
+          the cluster name will be an empty string.
+        :param custom_tags: Dict[str,str] (optional)
+          Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS
+          instances and EBS volumes) with these tags in addition to `default_tags`. Notes:
+          
+          - Currently, Databricks allows at most 45 custom tags
+          
+          - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags
+        :param data_security_mode: :class:`DataSecurityMode` (optional)
+          Data security mode decides what data governance model to use when accessing data from a cluster.
+          
+          The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+          choose the most appropriate access mode depending on your compute configuration. *
+          `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias
+          for `SINGLE_USER`.
+          
+          The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple
+          users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`:
+          A secure cluster that can only be exclusively used by a single user specified in `single_user_name`.
+          Most programming languages, cluster features and data governance features are available in this
+          mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are
+          fully isolated so that they cannot see each other's data and credentials. Most data governance
+          features are supported in this mode. But programming languages and cluster features might be
+          limited.
+          
+          The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
+          future Databricks Runtime versions:
+          
+          * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
+          `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency
+          clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on
+          standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC
+          nor passthrough enabled.
+        :param docker_image: :class:`DockerImage` (optional)
+        :param driver_instance_pool_id: str (optional)
+          The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses
+          the instance pool with id (instance_pool_id) if the driver pool is not assigned.
+        :param driver_node_type_id: str (optional)
+          The node type of the Spark driver. Note that this field is optional; if unset, the driver node type
+          will be set as the same value as `node_type_id` defined above.
+        :param enable_elastic_disk: bool (optional)
+          Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space
+          when its Spark workers are running low on disk space. This feature requires specific AWS permissions
+          to function correctly - refer to the User Guide for more details.
+        :param enable_local_disk_encryption: bool (optional)
+          Whether to enable LUKS on cluster VMs' local disks
+        :param gcp_attributes: :class:`GcpAttributes` (optional)
+          Attributes related to clusters running on Google Cloud Platform. If not specified at cluster
+          creation, a set of default values will be used.
+        :param init_scripts: List[:class:`InitScriptInfo`] (optional)
+          The configuration for storing init scripts. Any number of destinations can be specified. The scripts
+          are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script
+          logs are sent to `//init_scripts`.
+        :param instance_pool_id: str (optional)
+          The optional ID of the instance pool to which the cluster belongs.
+        :param is_single_node: bool (optional)
+          This field can only be used with `kind`.
+          
+          When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`,
+          and `num_workers`
+        :param kind: :class:`Kind` (optional)
+          The kind of compute described by this compute specification.
+          
+          Depending on `kind`, different validations and default values will be applied.
+          
+          The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
+        :param node_type_id: str (optional)
+          This field encodes, through a single value, the resources available to each of the Spark nodes in
+          this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
+          intensive workloads. A list of available node types can be retrieved by using the
+          :method:clusters/listNodeTypes API call.
+        :param num_workers: int (optional)
+          Number of worker nodes that this cluster should have. A cluster has one Spark Driver and
+          `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.
+          
+          Note: When reading the properties of a cluster, this field reflects the desired number of workers
+          rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10
+          workers, this field will immediately be updated to reflect the target size of 10 workers, whereas
+          the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are
+          provisioned.
+        :param policy_id: str (optional)
+          The ID of the cluster policy used to create the cluster if applicable.
+        :param runtime_engine: :class:`RuntimeEngine` (optional)
+          Determines the cluster's runtime engine, either standard or Photon.
+          
+          This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+          `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+          
+          If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+          -photon-, in which case Photon will be used.
+        :param single_user_name: str (optional)
+          Single user name if data_security_mode is `SINGLE_USER`
+        :param spark_conf: Dict[str,str] (optional)
+          An object containing a set of optional, user-specified Spark configuration key-value pairs. Users
+          can also pass in a string of extra JVM options to the driver and the executors via
+          `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.
+        :param spark_env_vars: Dict[str,str] (optional)
+          An object containing a set of optional, user-specified environment variable key-value pairs. Please
+          note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while
+          launching the driver and workers.
+          
+          In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them to
+          `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default databricks
+          managed environmental variables are included as well.
+          
+          Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS":
+          "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS
+          -Dspark.shuffle.service.enabled=true"}`
+        :param ssh_public_keys: List[str] (optional)
+          SSH public key contents that will be added to each Spark node in this cluster. The corresponding
+          private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be
+          specified.
+        :param use_ml_runtime: bool (optional)
+          This field can only be used with `kind`.
+          
+          `effective_spark_version` is determined by `spark_version` (DBR release), this field
+          `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
+        :param workload_type: :class:`WorkloadType` (optional)
+        
+        :returns:
+          Long-running operation waiter for :class:`ClusterDetails`.
+          See :method:wait_get_cluster_running for more details.
+        
 
     .. py:method:: create_and_wait(spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], clone_from: Optional[CloneCluster], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails
 
@@ -298,18 +298,18 @@ the [create compute UI] and then copying the generated JSON definition from the
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Terminate cluster.
-
-Terminates the Spark cluster with the specified ID. The cluster is removed asynchronously. Once the
-termination has completed, the cluster will be in a `TERMINATED` state. If the cluster is already in a
-`TERMINATING` or `TERMINATED` state, nothing will happen.
-
-:param cluster_id: str
-  The cluster to be terminated.
-
-:returns:
-  Long-running operation waiter for :class:`ClusterDetails`.
-  See :method:wait_get_cluster_terminated for more details.
-
+        
+        Terminates the Spark cluster with the specified ID. The cluster is removed asynchronously. Once the
+        termination has completed, the cluster will be in a `TERMINATED` state. If the cluster is already in a
+        `TERMINATING` or `TERMINATED` state, nothing will happen.
+        
+        :param cluster_id: str
+          The cluster to be terminated.
+        
+        :returns:
+          Long-running operation waiter for :class:`ClusterDetails`.
+          See :method:wait_get_cluster_terminated for more details.
+        
 
     .. py:method:: delete_and_wait(cluster_id: str, timeout: datetime.timedelta = 0:20:00) -> ClusterDetails
 
@@ -349,171 +349,171 @@ termination has completed, the cluster will be in a `TERMINATED` state. If the c
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Update cluster configuration.
-
-Updates the configuration of a cluster to match the provided attributes and size. A cluster can be
-updated if it is in a `RUNNING` or `TERMINATED` state.
-
-If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes
-can take effect.
-
-If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time it
-is started using the `clusters/start` API, the new attributes will take effect. Any attempt to update
-a cluster in any other state will be rejected with an `INVALID_STATE` error code.
-
-Clusters created by the Databricks Jobs service cannot be edited.
-
-:param cluster_id: str
-  ID of the cluster
-:param spark_version: str
-  The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be
-  retrieved by using the :method:clusters/sparkVersions API call.
-:param apply_policy_default_values: bool (optional)
-  When set to true, fixed and default values from the policy will be used for fields that are omitted.
-  When set to false, only fixed values from the policy will be applied.
-:param autoscale: :class:`AutoScale` (optional)
-  Parameters needed in order to automatically scale clusters up and down based on load. Note:
-  autoscaling works best with DB runtime versions 3.0 or later.
-:param autotermination_minutes: int (optional)
-  Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this
-  cluster will not be automatically terminated. If specified, the threshold must be between 10 and
-  10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.
-:param aws_attributes: :class:`AwsAttributes` (optional)
-  Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation,
-  a set of default values will be used.
-:param azure_attributes: :class:`AzureAttributes` (optional)
-  Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a
-  set of default values will be used.
-:param cluster_log_conf: :class:`ClusterLogConf` (optional)
-  The configuration for delivering spark logs to a long-term storage destination. Three kinds of
-  destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
-  specified for one cluster. If the conf is given, the logs will be delivered to the destination every
-  `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination
-  of executor logs is `$destination/$clusterId/executor`.
-:param cluster_name: str (optional)
-  Cluster name requested by the user. This doesn't have to be unique. If not specified at creation,
-  the cluster name will be an empty string.
-:param custom_tags: Dict[str,str] (optional)
-  Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS
-  instances and EBS volumes) with these tags in addition to `default_tags`. Notes:
-  
-  - Currently, Databricks allows at most 45 custom tags
-  
-  - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags
-:param data_security_mode: :class:`DataSecurityMode` (optional)
-  Data security mode decides what data governance model to use when accessing data from a cluster.
-  
-  The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
-  choose the most appropriate access mode depending on your compute configuration. *
-  `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias
-  for `SINGLE_USER`.
-  
-  The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple
-  users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`:
-  A secure cluster that can only be exclusively used by a single user specified in `single_user_name`.
-  Most programming languages, cluster features and data governance features are available in this
-  mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are
-  fully isolated so that they cannot see each other's data and credentials. Most data governance
-  features are supported in this mode. But programming languages and cluster features might be
-  limited.
-  
-  The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
-  future Databricks Runtime versions:
-  
-  * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
-  `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency
-  clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on
-  standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC
-  nor passthrough enabled.
-:param docker_image: :class:`DockerImage` (optional)
-:param driver_instance_pool_id: str (optional)
-  The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses
-  the instance pool with id (instance_pool_id) if the driver pool is not assigned.
-:param driver_node_type_id: str (optional)
-  The node type of the Spark driver. Note that this field is optional; if unset, the driver node type
-  will be set as the same value as `node_type_id` defined above.
-:param enable_elastic_disk: bool (optional)
-  Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space
-  when its Spark workers are running low on disk space. This feature requires specific AWS permissions
-  to function correctly - refer to the User Guide for more details.
-:param enable_local_disk_encryption: bool (optional)
-  Whether to enable LUKS on cluster VMs' local disks
-:param gcp_attributes: :class:`GcpAttributes` (optional)
-  Attributes related to clusters running on Google Cloud Platform. If not specified at cluster
-  creation, a set of default values will be used.
-:param init_scripts: List[:class:`InitScriptInfo`] (optional)
-  The configuration for storing init scripts. Any number of destinations can be specified. The scripts
-  are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script
-  logs are sent to `//init_scripts`.
-:param instance_pool_id: str (optional)
-  The optional ID of the instance pool to which the cluster belongs.
-:param is_single_node: bool (optional)
-  This field can only be used with `kind`.
-  
-  When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`,
-  and `num_workers`
-:param kind: :class:`Kind` (optional)
-  The kind of compute described by this compute specification.
-  
-  Depending on `kind`, different validations and default values will be applied.
-  
-  The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
-:param node_type_id: str (optional)
-  This field encodes, through a single value, the resources available to each of the Spark nodes in
-  this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
-  intensive workloads. A list of available node types can be retrieved by using the
-  :method:clusters/listNodeTypes API call.
-:param num_workers: int (optional)
-  Number of worker nodes that this cluster should have. A cluster has one Spark Driver and
-  `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.
-  
-  Note: When reading the properties of a cluster, this field reflects the desired number of workers
-  rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10
-  workers, this field will immediately be updated to reflect the target size of 10 workers, whereas
-  the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are
-  provisioned.
-:param policy_id: str (optional)
-  The ID of the cluster policy used to create the cluster if applicable.
-:param runtime_engine: :class:`RuntimeEngine` (optional)
-  Determines the cluster's runtime engine, either standard or Photon.
-  
-  This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
-  `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
-  
-  If left unspecified, the runtime engine defaults to standard unless the spark_version contains
-  -photon-, in which case Photon will be used.
-:param single_user_name: str (optional)
-  Single user name if data_security_mode is `SINGLE_USER`
-:param spark_conf: Dict[str,str] (optional)
-  An object containing a set of optional, user-specified Spark configuration key-value pairs. Users
-  can also pass in a string of extra JVM options to the driver and the executors via
-  `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.
-:param spark_env_vars: Dict[str,str] (optional)
-  An object containing a set of optional, user-specified environment variable key-value pairs. Please
-  note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while
-  launching the driver and workers.
-  
-  In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them to
-  `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default databricks
-  managed environmental variables are included as well.
-  
-  Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS":
-  "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS
-  -Dspark.shuffle.service.enabled=true"}`
-:param ssh_public_keys: List[str] (optional)
-  SSH public key contents that will be added to each Spark node in this cluster. The corresponding
-  private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be
-  specified.
-:param use_ml_runtime: bool (optional)
-  This field can only be used with `kind`.
-  
-  `effective_spark_version` is determined by `spark_version` (DBR release), this field
-  `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
-:param workload_type: :class:`WorkloadType` (optional)
-
-:returns:
-  Long-running operation waiter for :class:`ClusterDetails`.
-  See :method:wait_get_cluster_running for more details.
-
+        
+        Updates the configuration of a cluster to match the provided attributes and size. A cluster can be
+        updated if it is in a `RUNNING` or `TERMINATED` state.
+        
+        If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes
+        can take effect.
+        
+        If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time it
+        is started using the `clusters/start` API, the new attributes will take effect. Any attempt to update
+        a cluster in any other state will be rejected with an `INVALID_STATE` error code.
+        
+        Clusters created by the Databricks Jobs service cannot be edited.
+        
+        :param cluster_id: str
+          ID of the cluster
+        :param spark_version: str
+          The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be
+          retrieved by using the :method:clusters/sparkVersions API call.
+        :param apply_policy_default_values: bool (optional)
+          When set to true, fixed and default values from the policy will be used for fields that are omitted.
+          When set to false, only fixed values from the policy will be applied.
+        :param autoscale: :class:`AutoScale` (optional)
+          Parameters needed in order to automatically scale clusters up and down based on load. Note:
+          autoscaling works best with DB runtime versions 3.0 or later.
+        :param autotermination_minutes: int (optional)
+          Automatically terminates the cluster after it is inactive for this time in minutes. If not set, this
+          cluster will not be automatically terminated. If specified, the threshold must be between 10 and
+          10000 minutes. Users can also set this value to 0 to explicitly disable automatic termination.
+        :param aws_attributes: :class:`AwsAttributes` (optional)
+          Attributes related to clusters running on Amazon Web Services. If not specified at cluster creation,
+          a set of default values will be used.
+        :param azure_attributes: :class:`AzureAttributes` (optional)
+          Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a
+          set of default values will be used.
+        :param cluster_log_conf: :class:`ClusterLogConf` (optional)
+          The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+          destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+          specified for one cluster. If the conf is given, the logs will be delivered to the destination every
+          `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination
+          of executor logs is `$destination/$clusterId/executor`.
+        :param cluster_name: str (optional)
+          Cluster name requested by the user. This doesn't have to be unique. If not specified at creation,
+          the cluster name will be an empty string.
+        :param custom_tags: Dict[str,str] (optional)
+          Additional tags for cluster resources. Databricks will tag all cluster resources (e.g., AWS
+          instances and EBS volumes) with these tags in addition to `default_tags`. Notes:
+          
+          - Currently, Databricks allows at most 45 custom tags
+          
+          - Clusters can only reuse cloud resources if the resources' tags are a subset of the cluster tags
+        :param data_security_mode: :class:`DataSecurityMode` (optional)
+          Data security mode decides what data governance model to use when accessing data from a cluster.
+          
+          The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+          choose the most appropriate access mode depending on your compute configuration. *
+          `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias
+          for `SINGLE_USER`.
+          
+          The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple
+          users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`:
+          A secure cluster that can only be exclusively used by a single user specified in `single_user_name`.
+          Most programming languages, cluster features and data governance features are available in this
+          mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are
+          fully isolated so that they cannot see each other's data and credentials. Most data governance
+          features are supported in this mode. But programming languages and cluster features might be
+          limited.
+          
+          The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
+          future Databricks Runtime versions:
+          
+          * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. *
+          `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency
+          clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on
+          standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC
+          nor passthrough enabled.
+        :param docker_image: :class:`DockerImage` (optional)
+        :param driver_instance_pool_id: str (optional)
+          The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster uses
+          the instance pool with id (instance_pool_id) if the driver pool is not assigned.
+        :param driver_node_type_id: str (optional)
+          The node type of the Spark driver. Note that this field is optional; if unset, the driver node type
+          will be set as the same value as `node_type_id` defined above.
+        :param enable_elastic_disk: bool (optional)
+          Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk space
+          when its Spark workers are running low on disk space. This feature requires specific AWS permissions
+          to function correctly - refer to the User Guide for more details.
+        :param enable_local_disk_encryption: bool (optional)
+          Whether to enable LUKS on cluster VMs' local disks
+        :param gcp_attributes: :class:`GcpAttributes` (optional)
+          Attributes related to clusters running on Google Cloud Platform. If not specified at cluster
+          creation, a set of default values will be used.
+        :param init_scripts: List[:class:`InitScriptInfo`] (optional)
+          The configuration for storing init scripts. Any number of destinations can be specified. The scripts
+          are executed sequentially in the order provided. If `cluster_log_conf` is specified, init script
+          logs are sent to `//init_scripts`.
+        :param instance_pool_id: str (optional)
+          The optional ID of the instance pool to which the cluster belongs.
+        :param is_single_node: bool (optional)
+          This field can only be used with `kind`.
+          
+          When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`,
+          and `num_workers`
+        :param kind: :class:`Kind` (optional)
+          The kind of compute described by this compute specification.
+          
+          Depending on `kind`, different validations and default values will be applied.
+          
+          The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
+        :param node_type_id: str (optional)
+          This field encodes, through a single value, the resources available to each of the Spark nodes in
+          this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
+          intensive workloads. A list of available node types can be retrieved by using the
+          :method:clusters/listNodeTypes API call.
+        :param num_workers: int (optional)
+          Number of worker nodes that this cluster should have. A cluster has one Spark Driver and
+          `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.
+          
+          Note: When reading the properties of a cluster, this field reflects the desired number of workers
+          rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10
+          workers, this field will immediately be updated to reflect the target size of 10 workers, whereas
+          the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are
+          provisioned.
+        :param policy_id: str (optional)
+          The ID of the cluster policy used to create the cluster if applicable.
+        :param runtime_engine: :class:`RuntimeEngine` (optional)
+          Determines the cluster's runtime engine, either standard or Photon.
+          
+          This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
+          `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
+          
+          If left unspecified, the runtime engine defaults to standard unless the spark_version contains
+          -photon-, in which case Photon will be used.
+        :param single_user_name: str (optional)
+          Single user name if data_security_mode is `SINGLE_USER`
+        :param spark_conf: Dict[str,str] (optional)
+          An object containing a set of optional, user-specified Spark configuration key-value pairs. Users
+          can also pass in a string of extra JVM options to the driver and the executors via
+          `spark.driver.extraJavaOptions` and `spark.executor.extraJavaOptions` respectively.
+        :param spark_env_vars: Dict[str,str] (optional)
+          An object containing a set of optional, user-specified environment variable key-value pairs. Please
+          note that key-value pair of the form (X,Y) will be exported as is (i.e., `export X='Y'`) while
+          launching the driver and workers.
+          
+          In order to specify an additional set of `SPARK_DAEMON_JAVA_OPTS`, we recommend appending them to
+          `$SPARK_DAEMON_JAVA_OPTS` as shown in the example below. This ensures that all default databricks
+          managed environmental variables are included as well.
+          
+          Example Spark environment variables: `{"SPARK_WORKER_MEMORY": "28000m", "SPARK_LOCAL_DIRS":
+          "/local_disk0"}` or `{"SPARK_DAEMON_JAVA_OPTS": "$SPARK_DAEMON_JAVA_OPTS
+          -Dspark.shuffle.service.enabled=true"}`
+        :param ssh_public_keys: List[str] (optional)
+          SSH public key contents that will be added to each Spark node in this cluster. The corresponding
+          private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be
+          specified.
+        :param use_ml_runtime: bool (optional)
+          This field can only be used with `kind`.
+          
+          `effective_spark_version` is determined by `spark_version` (DBR release), this field
+          `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
+        :param workload_type: :class:`WorkloadType` (optional)
+        
+        :returns:
+          Long-running operation waiter for :class:`ClusterDetails`.
+          See :method:wait_get_cluster_running for more details.
+        
 
     .. py:method:: edit_and_wait(cluster_id: str, spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails
 
@@ -573,30 +573,30 @@ Clusters created by the Databricks Jobs service cannot be edited.
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         List cluster activity events.
-
-Retrieves a list of events about the activity of a cluster. This API is paginated. If there are more
-events to read, the response includes all the nparameters necessary to request the next page of
-events.
-
-:param cluster_id: str
-  The ID of the cluster to retrieve events about.
-:param end_time: int (optional)
-  The end time in epoch milliseconds. If empty, returns events up to the current time.
-:param event_types: List[:class:`EventType`] (optional)
-  An optional set of event types to filter on. If empty, all event types are returned.
-:param limit: int (optional)
-  The maximum number of events to include in a page of events. Defaults to 50, and maximum allowed
-  value is 500.
-:param offset: int (optional)
-  The offset in the result set. Defaults to 0 (no offset). When an offset is specified and the results
-  are requested in descending order, the end_time field is required.
-:param order: :class:`GetEventsOrder` (optional)
-  The order to list events in; either "ASC" or "DESC". Defaults to "DESC".
-:param start_time: int (optional)
-  The start time in epoch milliseconds. If empty, returns events starting from the beginning of time.
-
-:returns: Iterator over :class:`ClusterEvent`
-
+        
+        Retrieves a list of events about the activity of a cluster. This API is paginated. If there are more
+        events to read, the response includes all the nparameters necessary to request the next page of
+        events.
+        
+        :param cluster_id: str
+          The ID of the cluster to retrieve events about.
+        :param end_time: int (optional)
+          The end time in epoch milliseconds. If empty, returns events up to the current time.
+        :param event_types: List[:class:`EventType`] (optional)
+          An optional set of event types to filter on. If empty, all event types are returned.
+        :param limit: int (optional)
+          The maximum number of events to include in a page of events. Defaults to 50, and maximum allowed
+          value is 500.
+        :param offset: int (optional)
+          The offset in the result set. Defaults to 0 (no offset). When an offset is specified and the results
+          are requested in descending order, the end_time field is required.
+        :param order: :class:`GetEventsOrder` (optional)
+          The order to list events in; either "ASC" or "DESC". Defaults to "DESC".
+        :param start_time: int (optional)
+          The start time in epoch milliseconds. If empty, returns events starting from the beginning of time.
+        
+        :returns: Iterator over :class:`ClusterEvent`
+        
 
     .. py:method:: get(cluster_id: str) -> ClusterDetails
 
@@ -628,39 +628,39 @@ events.
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Get cluster info.
-
-Retrieves the information for a cluster given its identifier. Clusters can be described while they are
-running, or up to 60 days after they are terminated.
-
-:param cluster_id: str
-  The cluster about which to retrieve information.
-
-:returns: :class:`ClusterDetails`
-
+        
+        Retrieves the information for a cluster given its identifier. Clusters can be described while they are
+        running, or up to 60 days after they are terminated.
+        
+        :param cluster_id: str
+          The cluster about which to retrieve information.
+        
+        :returns: :class:`ClusterDetails`
+        
 
     .. py:method:: get_permission_levels(cluster_id: str) -> GetClusterPermissionLevelsResponse
 
         Get cluster permission levels.
-
-Gets the permission levels that a user can have on an object.
-
-:param cluster_id: str
-  The cluster for which to get or manage permissions.
-
-:returns: :class:`GetClusterPermissionLevelsResponse`
-
+        
+        Gets the permission levels that a user can have on an object.
+        
+        :param cluster_id: str
+          The cluster for which to get or manage permissions.
+        
+        :returns: :class:`GetClusterPermissionLevelsResponse`
+        
 
     .. py:method:: get_permissions(cluster_id: str) -> ClusterPermissions
 
         Get cluster permissions.
-
-Gets the permissions of a cluster. Clusters can inherit permissions from their root object.
-
-:param cluster_id: str
-  The cluster for which to get or manage permissions.
-
-:returns: :class:`ClusterPermissions`
-
+        
+        Gets the permissions of a cluster. Clusters can inherit permissions from their root object.
+        
+        :param cluster_id: str
+          The cluster for which to get or manage permissions.
+        
+        :returns: :class:`ClusterPermissions`
+        
 
     .. py:method:: list( [, filter_by: Optional[ListClustersFilterBy], page_size: Optional[int], page_token: Optional[str], sort_by: Optional[ListClustersSortBy]]) -> Iterator[ClusterDetails]
 
@@ -677,23 +677,23 @@ Gets the permissions of a cluster. Clusters can inherit permissions from their r
             all = w.clusters.list(compute.ListClustersRequest())
 
         List clusters.
-
-Return information about all pinned and active clusters, and all clusters terminated within the last
-30 days. Clusters terminated prior to this period are not included.
-
-:param filter_by: :class:`ListClustersFilterBy` (optional)
-  Filters to apply to the list of clusters.
-:param page_size: int (optional)
-  Use this field to specify the maximum number of results to be returned by the server. The server may
-  further constrain the maximum number of results returned in a single page.
-:param page_token: str (optional)
-  Use next_page_token or prev_page_token returned from the previous request to list the next or
-  previous page of clusters respectively.
-:param sort_by: :class:`ListClustersSortBy` (optional)
-  Sort the list of clusters by a specific criteria.
-
-:returns: Iterator over :class:`ClusterDetails`
-
+        
+        Return information about all pinned and active clusters, and all clusters terminated within the last
+        30 days. Clusters terminated prior to this period are not included.
+        
+        :param filter_by: :class:`ListClustersFilterBy` (optional)
+          Filters to apply to the list of clusters.
+        :param page_size: int (optional)
+          Use this field to specify the maximum number of results to be returned by the server. The server may
+          further constrain the maximum number of results returned in a single page.
+        :param page_token: str (optional)
+          Use next_page_token or prev_page_token returned from the previous request to list the next or
+          previous page of clusters respectively.
+        :param sort_by: :class:`ListClustersSortBy` (optional)
+          Sort the list of clusters by a specific criteria.
+        
+        :returns: Iterator over :class:`ClusterDetails`
+        
 
     .. py:method:: list_node_types() -> ListNodeTypesResponse
 
@@ -709,37 +709,37 @@ Return information about all pinned and active clusters, and all clusters termin
             nodes = w.clusters.list_node_types()
 
         List node types.
-
-Returns a list of supported Spark node types. These node types can be used to launch a cluster.
-
-:returns: :class:`ListNodeTypesResponse`
-
+        
+        Returns a list of supported Spark node types. These node types can be used to launch a cluster.
+        
+        :returns: :class:`ListNodeTypesResponse`
+        
 
     .. py:method:: list_zones() -> ListAvailableZonesResponse
 
         List availability zones.
-
-Returns a list of availability zones where clusters can be created in (For example, us-west-2a). These
-zones can be used to launch a cluster.
-
-:returns: :class:`ListAvailableZonesResponse`
-
+        
+        Returns a list of availability zones where clusters can be created in (For example, us-west-2a). These
+        zones can be used to launch a cluster.
+        
+        :returns: :class:`ListAvailableZonesResponse`
+        
 
     .. py:method:: permanent_delete(cluster_id: str)
 
         Permanently delete cluster.
-
-Permanently deletes a Spark cluster. This cluster is terminated and resources are asynchronously
-removed.
-
-In addition, users will no longer see permanently deleted clusters in the cluster list, and API users
-can no longer perform any action on permanently deleted clusters.
-
-:param cluster_id: str
-  The cluster to be deleted.
-
-
-
+        
+        Permanently deletes a Spark cluster. This cluster is terminated and resources are asynchronously
+        removed.
+        
+        In addition, users will no longer see permanently deleted clusters in the cluster list, and API users
+        can no longer perform any action on permanently deleted clusters.
+        
+        :param cluster_id: str
+          The cluster to be deleted.
+        
+        
+        
 
     .. py:method:: pin(cluster_id: str)
 
@@ -771,15 +771,15 @@ can no longer perform any action on permanently deleted clusters.
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Pin cluster.
-
-Pinning a cluster ensures that the cluster will always be returned by the ListClusters API. Pinning a
-cluster that is already pinned will have no effect. This API can only be called by workspace admins.
-
-:param cluster_id: str
-  
-
-
-
+        
+        Pinning a cluster ensures that the cluster will always be returned by the ListClusters API. Pinning a
+        cluster that is already pinned will have no effect. This API can only be called by workspace admins.
+        
+        :param cluster_id: str
+          
+        
+        
+        
 
     .. py:method:: resize(cluster_id: str [, autoscale: Optional[AutoScale], num_workers: Optional[int]]) -> Wait[ClusterDetails]
 
@@ -811,29 +811,29 @@ cluster that is already pinned will have no effect. This API can only be called
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Resize cluster.
-
-Resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a
-`RUNNING` state.
-
-:param cluster_id: str
-  The cluster to be resized.
-:param autoscale: :class:`AutoScale` (optional)
-  Parameters needed in order to automatically scale clusters up and down based on load. Note:
-  autoscaling works best with DB runtime versions 3.0 or later.
-:param num_workers: int (optional)
-  Number of worker nodes that this cluster should have. A cluster has one Spark Driver and
-  `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.
-  
-  Note: When reading the properties of a cluster, this field reflects the desired number of workers
-  rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10
-  workers, this field will immediately be updated to reflect the target size of 10 workers, whereas
-  the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are
-  provisioned.
-
-:returns:
-  Long-running operation waiter for :class:`ClusterDetails`.
-  See :method:wait_get_cluster_running for more details.
-
+        
+        Resizes a cluster to have a desired number of workers. This will fail unless the cluster is in a
+        `RUNNING` state.
+        
+        :param cluster_id: str
+          The cluster to be resized.
+        :param autoscale: :class:`AutoScale` (optional)
+          Parameters needed in order to automatically scale clusters up and down based on load. Note:
+          autoscaling works best with DB runtime versions 3.0 or later.
+        :param num_workers: int (optional)
+          Number of worker nodes that this cluster should have. A cluster has one Spark Driver and
+          `num_workers` Executors for a total of `num_workers` + 1 Spark nodes.
+          
+          Note: When reading the properties of a cluster, this field reflects the desired number of workers
+          rather than the actual current number of workers. For instance, if a cluster is resized from 5 to 10
+          workers, this field will immediately be updated to reflect the target size of 10 workers, whereas
+          the workers listed in `spark_info` will gradually increase from 5 to 10 as the new nodes are
+          provisioned.
+        
+        :returns:
+          Long-running operation waiter for :class:`ClusterDetails`.
+          See :method:wait_get_cluster_running for more details.
+        
 
     .. py:method:: resize_and_wait(cluster_id: str [, autoscale: Optional[AutoScale], num_workers: Optional[int], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails
 
@@ -868,19 +868,19 @@ Resizes a cluster to have a desired number of workers. This will fail unless the
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Restart cluster.
-
-Restarts a Spark cluster with the supplied ID. If the cluster is not currently in a `RUNNING` state,
-nothing will happen.
-
-:param cluster_id: str
-  The cluster to be started.
-:param restart_user: str (optional)
-  
-
-:returns:
-  Long-running operation waiter for :class:`ClusterDetails`.
-  See :method:wait_get_cluster_running for more details.
-
+        
+        Restarts a Spark cluster with the supplied ID. If the cluster is not currently in a `RUNNING` state,
+        nothing will happen.
+        
+        :param cluster_id: str
+          The cluster to be started.
+        :param restart_user: str (optional)
+          
+        
+        :returns:
+          Long-running operation waiter for :class:`ClusterDetails`.
+          See :method:wait_get_cluster_running for more details.
+        
 
     .. py:method:: restart_and_wait(cluster_id: str [, restart_user: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails
 
@@ -900,22 +900,22 @@ nothing will happen.
 
         Selects smallest available node type given the conditions.
 
-:param min_memory_gb: int
-:param gb_per_core: int
-:param min_cores: int
-:param min_gpus: int
-:param local_disk: bool
-:param local_disk_min_size: bool
-:param category: bool
-:param photon_worker_capable: bool
-:param photon_driver_capable: bool
-:param graviton: bool
-:param is_io_cache_enabled: bool
-:param support_port_forwarding: bool
-:param fleet: bool
-
-:returns: `node_type` compatible string
-
+        :param min_memory_gb: int
+        :param gb_per_core: int
+        :param min_cores: int
+        :param min_gpus: int
+        :param local_disk: bool
+        :param local_disk_min_size: bool
+        :param category: bool
+        :param photon_worker_capable: bool
+        :param photon_driver_capable: bool
+        :param graviton: bool
+        :param is_io_cache_enabled: bool
+        :param support_port_forwarding: bool
+        :param fleet: bool
+
+        :returns: `node_type` compatible string
+        
 
     .. py:method:: select_spark_version(long_term_support: bool = False, beta: bool = False, latest: bool = True, ml: bool = False, genomics: bool = False, gpu: bool = False, scala: str = 2.12, spark_version: str, photon: bool = False, graviton: bool = False) -> str
 
@@ -932,42 +932,42 @@ nothing will happen.
 
         Selects the latest Databricks Runtime Version.
 
-:param long_term_support: bool
-:param beta: bool
-:param latest: bool
-:param ml: bool
-:param genomics: bool
-:param gpu: bool
-:param scala: str
-:param spark_version: str
-:param photon: bool
-:param graviton: bool
-
-:returns: `spark_version` compatible string
+        :param long_term_support: bool
+        :param beta: bool
+        :param latest: bool
+        :param ml: bool
+        :param genomics: bool
+        :param gpu: bool
+        :param scala: str
+        :param spark_version: str
+        :param photon: bool
+        :param graviton: bool
 
+        :returns: `spark_version` compatible string
+        
 
     .. py:method:: set_permissions(cluster_id: str [, access_control_list: Optional[List[ClusterAccessControlRequest]]]) -> ClusterPermissions
 
         Set cluster permissions.
-
-Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-permissions if none are specified. Objects can inherit permissions from their root object.
-
-:param cluster_id: str
-  The cluster for which to get or manage permissions.
-:param access_control_list: List[:class:`ClusterAccessControlRequest`] (optional)
-
-:returns: :class:`ClusterPermissions`
-
+        
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
+        
+        :param cluster_id: str
+          The cluster for which to get or manage permissions.
+        :param access_control_list: List[:class:`ClusterAccessControlRequest`] (optional)
+        
+        :returns: :class:`ClusterPermissions`
+        
 
     .. py:method:: spark_versions() -> GetSparkVersionsResponse
 
         List available Spark versions.
-
-Returns the list of available Spark versions. These versions can be used to launch a cluster.
-
-:returns: :class:`GetSparkVersionsResponse`
-
+        
+        Returns the list of available Spark versions. These versions can be used to launch a cluster.
+        
+        :returns: :class:`GetSparkVersionsResponse`
+        
 
     .. py:method:: start(cluster_id: str) -> Wait[ClusterDetails]
 
@@ -999,21 +999,21 @@ Returns the list of available Spark versions. These versions can be used to laun
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Start terminated cluster.
-
-Starts a terminated Spark cluster with the supplied ID. This works similar to `createCluster` except:
-
-* The previous cluster id and attributes are preserved. * The cluster starts with the last specified
-cluster size. * If the previous cluster was an autoscaling cluster, the current cluster starts with
-the minimum number of nodes. * If the cluster is not currently in a `TERMINATED` state, nothing will
-happen. * Clusters launched to run a job cannot be started.
-
-:param cluster_id: str
-  The cluster to be started.
-
-:returns:
-  Long-running operation waiter for :class:`ClusterDetails`.
-  See :method:wait_get_cluster_running for more details.
-
+        
+        Starts a terminated Spark cluster with the supplied ID. This works similar to `createCluster` except:
+        
+        * The previous cluster id and attributes are preserved. * The cluster starts with the last specified
+        cluster size. * If the previous cluster was an autoscaling cluster, the current cluster starts with
+        the minimum number of nodes. * If the cluster is not currently in a `TERMINATED` state, nothing will
+        happen. * Clusters launched to run a job cannot be started.
+        
+        :param cluster_id: str
+          The cluster to be started.
+        
+        :returns:
+          Long-running operation waiter for :class:`ClusterDetails`.
+          See :method:wait_get_cluster_running for more details.
+        
 
     .. py:method:: start_and_wait(cluster_id: str, timeout: datetime.timedelta = 0:20:00) -> ClusterDetails
 
@@ -1048,44 +1048,44 @@ happen. * Clusters launched to run a job cannot be started.
             w.clusters.permanent_delete(cluster_id=clstr.cluster_id)
 
         Unpin cluster.
-
-Unpinning a cluster will allow the cluster to eventually be removed from the ListClusters API.
-Unpinning a cluster that is not pinned will have no effect. This API can only be called by workspace
-admins.
-
-:param cluster_id: str
-  
-
-
-
+        
+        Unpinning a cluster will allow the cluster to eventually be removed from the ListClusters API.
+        Unpinning a cluster that is not pinned will have no effect. This API can only be called by workspace
+        admins.
+        
+        :param cluster_id: str
+          
+        
+        
+        
 
     .. py:method:: update(cluster_id: str, update_mask: str [, cluster: Optional[UpdateClusterResource]]) -> Wait[ClusterDetails]
 
         Update cluster configuration (partial).
-
-Updates the configuration of a cluster to match the partial set of attributes and size. Denote which
-fields to update using the `update_mask` field in the request body. A cluster can be updated if it is
-in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a `RUNNING` state, it will be
-restarted so that the new attributes can take effect. If a cluster is updated while in a `TERMINATED`
-state, it will remain `TERMINATED`. The updated attributes will take effect the next time the cluster
-is started using the `clusters/start` API. Attempts to update a cluster in any other state will be
-rejected with an `INVALID_STATE` error code. Clusters created by the Databricks Jobs service cannot be
-updated.
-
-:param cluster_id: str
-  ID of the cluster.
-:param update_mask: str
-  Specifies which fields of the cluster will be updated. This is required in the POST request. The
-  update mask should be supplied as a single string. To specify multiple fields, separate them with
-  commas (no spaces). To delete a field from a cluster configuration, add it to the `update_mask`
-  string but omit it from the `cluster` object.
-:param cluster: :class:`UpdateClusterResource` (optional)
-  The cluster to be updated.
-
-:returns:
-  Long-running operation waiter for :class:`ClusterDetails`.
-  See :method:wait_get_cluster_running for more details.
-
+        
+        Updates the configuration of a cluster to match the partial set of attributes and size. Denote which
+        fields to update using the `update_mask` field in the request body. A cluster can be updated if it is
+        in a `RUNNING` or `TERMINATED` state. If a cluster is updated while in a `RUNNING` state, it will be
+        restarted so that the new attributes can take effect. If a cluster is updated while in a `TERMINATED`
+        state, it will remain `TERMINATED`. The updated attributes will take effect the next time the cluster
+        is started using the `clusters/start` API. Attempts to update a cluster in any other state will be
+        rejected with an `INVALID_STATE` error code. Clusters created by the Databricks Jobs service cannot be
+        updated.
+        
+        :param cluster_id: str
+          ID of the cluster.
+        :param update_mask: str
+          Specifies which fields of the cluster will be updated. This is required in the POST request. The
+          update mask should be supplied as a single string. To specify multiple fields, separate them with
+          commas (no spaces). To delete a field from a cluster configuration, add it to the `update_mask`
+          string but omit it from the `cluster` object.
+        :param cluster: :class:`UpdateClusterResource` (optional)
+          The cluster to be updated.
+        
+        :returns:
+          Long-running operation waiter for :class:`ClusterDetails`.
+          See :method:wait_get_cluster_running for more details.
+        
 
     .. py:method:: update_and_wait(cluster_id: str, update_mask: str [, cluster: Optional[UpdateClusterResource], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails
 
@@ -1093,15 +1093,15 @@ updated.
     .. py:method:: update_permissions(cluster_id: str [, access_control_list: Optional[List[ClusterAccessControlRequest]]]) -> ClusterPermissions
 
         Update cluster permissions.
-
-Updates the permissions on a cluster. Clusters can inherit permissions from their root object.
-
-:param cluster_id: str
-  The cluster for which to get or manage permissions.
-:param access_control_list: List[:class:`ClusterAccessControlRequest`] (optional)
-
-:returns: :class:`ClusterPermissions`
-
+        
+        Updates the permissions on a cluster. Clusters can inherit permissions from their root object.
+        
+        :param cluster_id: str
+          The cluster for which to get or manage permissions.
+        :param access_control_list: List[:class:`ClusterAccessControlRequest`] (optional)
+        
+        :returns: :class:`ClusterPermissions`
+        
 
     .. py:method:: wait_get_cluster_running(cluster_id: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[ClusterDetails], None]]) -> ClusterDetails
 
diff --git a/docs/workspace/compute/command_execution.rst b/docs/workspace/compute/command_execution.rst
index 1b6f7e9fb..916a48ba5 100644
--- a/docs/workspace/compute/command_execution.rst
+++ b/docs/workspace/compute/command_execution.rst
@@ -5,24 +5,24 @@
 .. py:class:: CommandExecutionAPI
 
     This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters. This API
-only supports (classic) all-purpose clusters. Serverless compute is not supported.
+    only supports (classic) all-purpose clusters. Serverless compute is not supported.
 
     .. py:method:: cancel( [, cluster_id: Optional[str], command_id: Optional[str], context_id: Optional[str]]) -> Wait[CommandStatusResponse]
 
         Cancel a command.
-
-Cancels a currently running command within an execution context.
-
-The command ID is obtained from a prior successful call to __execute__.
-
-:param cluster_id: str (optional)
-:param command_id: str (optional)
-:param context_id: str (optional)
-
-:returns:
-  Long-running operation waiter for :class:`CommandStatusResponse`.
-  See :method:wait_command_status_command_execution_cancelled for more details.
-
+        
+        Cancels a currently running command within an execution context.
+        
+        The command ID is obtained from a prior successful call to __execute__.
+        
+        :param cluster_id: str (optional)
+        :param command_id: str (optional)
+        :param context_id: str (optional)
+        
+        :returns:
+          Long-running operation waiter for :class:`CommandStatusResponse`.
+          See :method:wait_command_status_command_execution_cancelled for more details.
+        
 
     .. py:method:: cancel_and_wait( [, cluster_id: Optional[str], command_id: Optional[str], context_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> CommandStatusResponse
 
@@ -30,29 +30,29 @@ The command ID is obtained from a prior successful call to __execute__.
     .. py:method:: command_status(cluster_id: str, context_id: str, command_id: str) -> CommandStatusResponse
 
         Get command info.
-
-Gets the status of and, if available, the results from a currently executing command.
-
-The command ID is obtained from a prior successful call to __execute__.
-
-:param cluster_id: str
-:param context_id: str
-:param command_id: str
-
-:returns: :class:`CommandStatusResponse`
-
+        
+        Gets the status of and, if available, the results from a currently executing command.
+        
+        The command ID is obtained from a prior successful call to __execute__.
+        
+        :param cluster_id: str
+        :param context_id: str
+        :param command_id: str
+        
+        :returns: :class:`CommandStatusResponse`
+        
 
     .. py:method:: context_status(cluster_id: str, context_id: str) -> ContextStatusResponse
 
         Get status.
-
-Gets the status for an execution context.
-
-:param cluster_id: str
-:param context_id: str
-
-:returns: :class:`ContextStatusResponse`
-
+        
+        Gets the status for an execution context.
+        
+        :param cluster_id: str
+        :param context_id: str
+        
+        :returns: :class:`ContextStatusResponse`
+        
 
     .. py:method:: create( [, cluster_id: Optional[str], language: Optional[Language]]) -> Wait[ContextStatusResponse]
 
@@ -76,19 +76,19 @@ Gets the status for an execution context.
             w.command_execution.destroy(cluster_id=cluster_id, context_id=context.id)
 
         Create an execution context.
-
-Creates an execution context for running cluster commands.
-
-If successful, this method returns the ID of the new execution context.
-
-:param cluster_id: str (optional)
-  Running cluster id
-:param language: :class:`Language` (optional)
-
-:returns:
-  Long-running operation waiter for :class:`ContextStatusResponse`.
-  See :method:wait_context_status_command_execution_running for more details.
-
+        
+        Creates an execution context for running cluster commands.
+        
+        If successful, this method returns the ID of the new execution context.
+        
+        :param cluster_id: str (optional)
+          Running cluster id
+        :param language: :class:`Language` (optional)
+        
+        :returns:
+          Long-running operation waiter for :class:`ContextStatusResponse`.
+          See :method:wait_context_status_command_execution_running for more details.
+        
 
     .. py:method:: create_and_wait( [, cluster_id: Optional[str], language: Optional[Language], timeout: datetime.timedelta = 0:20:00]) -> ContextStatusResponse
 
@@ -96,14 +96,14 @@ If successful, this method returns the ID of the new execution context.
     .. py:method:: destroy(cluster_id: str, context_id: str)
 
         Delete an execution context.
-
-Deletes an execution context.
-
-:param cluster_id: str
-:param context_id: str
-
-
-
+        
+        Deletes an execution context.
+        
+        :param cluster_id: str
+        :param context_id: str
+        
+        
+        
 
     .. py:method:: execute( [, cluster_id: Optional[str], command: Optional[str], context_id: Optional[str], language: Optional[Language]]) -> Wait[CommandStatusResponse]
 
@@ -132,23 +132,23 @@ Deletes an execution context.
             w.command_execution.destroy(cluster_id=cluster_id, context_id=context.id)
 
         Run a command.
-
-Runs a cluster command in the given execution context, using the provided language.
-
-If successful, it returns an ID for tracking the status of the command's execution.
-
-:param cluster_id: str (optional)
-  Running cluster id
-:param command: str (optional)
-  Executable code
-:param context_id: str (optional)
-  Running context id
-:param language: :class:`Language` (optional)
-
-:returns:
-  Long-running operation waiter for :class:`CommandStatusResponse`.
-  See :method:wait_command_status_command_execution_finished_or_error for more details.
-
+        
+        Runs a cluster command in the given execution context, using the provided language.
+        
+        If successful, it returns an ID for tracking the status of the command's execution.
+        
+        :param cluster_id: str (optional)
+          Running cluster id
+        :param command: str (optional)
+          Executable code
+        :param context_id: str (optional)
+          Running context id
+        :param language: :class:`Language` (optional)
+        
+        :returns:
+          Long-running operation waiter for :class:`CommandStatusResponse`.
+          See :method:wait_command_status_command_execution_finished_or_error for more details.
+        
 
     .. py:method:: execute_and_wait( [, cluster_id: Optional[str], command: Optional[str], context_id: Optional[str], language: Optional[Language], timeout: datetime.timedelta = 0:20:00]) -> CommandStatusResponse
 
diff --git a/docs/workspace/compute/global_init_scripts.rst b/docs/workspace/compute/global_init_scripts.rst
index 62d5e16c4..9d2372a6d 100644
--- a/docs/workspace/compute/global_init_scripts.rst
+++ b/docs/workspace/compute/global_init_scripts.rst
@@ -5,12 +5,12 @@
 .. py:class:: GlobalInitScriptsAPI
 
     The Global Init Scripts API enables Workspace administrators to configure global initialization scripts
-for their workspace. These scripts run on every node in every cluster in the workspace.
-
-**Important:** Existing clusters must be restarted to pick up any changes made to global init scripts.
-Global init scripts are run in order. If the init script returns with a bad exit code, the Apache Spark
-container fails to launch and init scripts with later position are skipped. If enough containers fail, the
-entire cluster fails with a `GLOBAL_INIT_SCRIPT_FAILURE` error code.
+    for their workspace. These scripts run on every node in every cluster in the workspace.
+    
+    **Important:** Existing clusters must be restarted to pick up any changes made to global init scripts.
+    Global init scripts are run in order. If the init script returns with a bad exit code, the Apache Spark
+    container fails to launch and init scripts with later position are skipped. If enough containers fail, the
+    entire cluster fails with a `GLOBAL_INIT_SCRIPT_FAILURE` error code.
 
     .. py:method:: create(name: str, script: str [, enabled: Optional[bool], position: Optional[int]]) -> CreateResponse
 
@@ -35,40 +35,40 @@ entire cluster fails with a `GLOBAL_INIT_SCRIPT_FAILURE` error code.
             w.global_init_scripts.delete(script_id=created.script_id)
 
         Create init script.
-
-Creates a new global init script in this workspace.
-
-:param name: str
-  The name of the script
-:param script: str
-  The Base64-encoded content of the script.
-:param enabled: bool (optional)
-  Specifies whether the script is enabled. The script runs only if enabled.
-:param position: int (optional)
-  The position of a global init script, where 0 represents the first script to run, 1 is the second
-  script to run, in ascending order.
-  
-  If you omit the numeric position for a new global init script, it defaults to last position. It will
-  run after all current scripts. Setting any value greater than the position of the last script is
-  equivalent to the last position. Example: Take three existing scripts with positions 0, 1, and 2.
-  Any position of (3) or greater puts the script in the last position. If an explicit position value
-  conflicts with an existing script value, your request succeeds, but the original script at that
-  position and all later scripts have their positions incremented by 1.
-
-:returns: :class:`CreateResponse`
-
+        
+        Creates a new global init script in this workspace.
+        
+        :param name: str
+          The name of the script
+        :param script: str
+          The Base64-encoded content of the script.
+        :param enabled: bool (optional)
+          Specifies whether the script is enabled. The script runs only if enabled.
+        :param position: int (optional)
+          The position of a global init script, where 0 represents the first script to run, 1 is the second
+          script to run, in ascending order.
+          
+          If you omit the numeric position for a new global init script, it defaults to last position. It will
+          run after all current scripts. Setting any value greater than the position of the last script is
+          equivalent to the last position. Example: Take three existing scripts with positions 0, 1, and 2.
+          Any position of (3) or greater puts the script in the last position. If an explicit position value
+          conflicts with an existing script value, your request succeeds, but the original script at that
+          position and all later scripts have their positions incremented by 1.
+        
+        :returns: :class:`CreateResponse`
+        
 
     .. py:method:: delete(script_id: str)
 
         Delete init script.
-
-Deletes a global init script.
-
-:param script_id: str
-  The ID of the global init script.
-
-
-
+        
+        Deletes a global init script.
+        
+        :param script_id: str
+          The ID of the global init script.
+        
+        
+        
 
     .. py:method:: get(script_id: str) -> GlobalInitScriptDetailsWithContent
 
@@ -95,14 +95,14 @@ Deletes a global init script.
             w.global_init_scripts.delete(script_id=created.script_id)
 
         Get an init script.
-
-Gets all the details of a script, including its Base64-encoded contents.
-
-:param script_id: str
-  The ID of the global init script.
-
-:returns: :class:`GlobalInitScriptDetailsWithContent`
-
+        
+        Gets all the details of a script, including its Base64-encoded contents.
+        
+        :param script_id: str
+          The ID of the global init script.
+        
+        :returns: :class:`GlobalInitScriptDetailsWithContent`
+        
 
     .. py:method:: list() -> Iterator[GlobalInitScriptDetails]
 
@@ -118,13 +118,13 @@ Gets all the details of a script, including its Base64-encoded contents.
             all = w.global_init_scripts.list()
 
         Get init scripts.
-
-Get a list of all global init scripts for this workspace. This returns all properties for each script
-but **not** the script contents. To retrieve the contents of a script, use the [get a global init
-script](:method:globalinitscripts/get) operation.
-
-:returns: Iterator over :class:`GlobalInitScriptDetails`
-
+        
+        Get a list of all global init scripts for this workspace. This returns all properties for each script
+        but **not** the script contents. To retrieve the contents of a script, use the [get a global init
+        script](:method:globalinitscripts/get) operation.
+        
+        :returns: Iterator over :class:`GlobalInitScriptDetails`
+        
 
     .. py:method:: update(script_id: str, name: str, script: str [, enabled: Optional[bool], position: Optional[int]])
 
@@ -153,27 +153,28 @@ script](:method:globalinitscripts/get) operation.
             w.global_init_scripts.delete(script_id=created.script_id)
 
         Update init script.
-
-Updates a global init script, specifying only the fields to change. All fields are optional.
-Unspecified fields retain their current value.
-
-:param script_id: str
-  The ID of the global init script.
-:param name: str
-  The name of the script
-:param script: str
-  The Base64-encoded content of the script.
-:param enabled: bool (optional)
-  Specifies whether the script is enabled. The script runs only if enabled.
-:param position: int (optional)
-  The position of a script, where 0 represents the first script to run, 1 is the second script to run,
-  in ascending order. To move the script to run first, set its position to 0.
-  
-  To move the script to the end, set its position to any value greater or equal to the position of the
-  last script. Example, three existing scripts with positions 0, 1, and 2. Any position value of 2 or
-  greater puts the script in the last position (2).
-  
-  If an explicit position value conflicts with an existing script, your request succeeds, but the
-  original script at that position and all later scripts have their positions incremented by 1.
-
-
+        
+        Updates a global init script, specifying only the fields to change. All fields are optional.
+        Unspecified fields retain their current value.
+        
+        :param script_id: str
+          The ID of the global init script.
+        :param name: str
+          The name of the script
+        :param script: str
+          The Base64-encoded content of the script.
+        :param enabled: bool (optional)
+          Specifies whether the script is enabled. The script runs only if enabled.
+        :param position: int (optional)
+          The position of a script, where 0 represents the first script to run, 1 is the second script to run,
+          in ascending order. To move the script to run first, set its position to 0.
+          
+          To move the script to the end, set its position to any value greater or equal to the position of the
+          last script. Example, three existing scripts with positions 0, 1, and 2. Any position value of 2 or
+          greater puts the script in the last position (2).
+          
+          If an explicit position value conflicts with an existing script, your request succeeds, but the
+          original script at that position and all later scripts have their positions incremented by 1.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/compute/instance_pools.rst b/docs/workspace/compute/instance_pools.rst
index 61c55d0e0..333c44938 100644
--- a/docs/workspace/compute/instance_pools.rst
+++ b/docs/workspace/compute/instance_pools.rst
@@ -5,19 +5,19 @@
 .. py:class:: InstancePoolsAPI
 
     Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud
-instances which reduces a cluster start and auto-scaling times.
-
-Databricks pools reduce cluster start and auto-scaling times by maintaining a set of idle, ready-to-use
-instances. When a cluster is attached to a pool, cluster nodes are created using the pool’s idle
-instances. If the pool has no idle instances, the pool expands by allocating a new instance from the
-instance provider in order to accommodate the cluster’s request. When a cluster releases an instance, it
-returns to the pool and is free for another cluster to use. Only clusters attached to a pool can use that
-pool’s idle instances.
-
-You can specify a different pool for the driver node and worker nodes, or use the same pool for both.
-
-Databricks does not charge DBUs while instances are idle in the pool. Instance provider billing does
-apply. See pricing.
+    instances which reduces a cluster start and auto-scaling times.
+    
+    Databricks pools reduce cluster start and auto-scaling times by maintaining a set of idle, ready-to-use
+    instances. When a cluster is attached to a pool, cluster nodes are created using the pool’s idle
+    instances. If the pool has no idle instances, the pool expands by allocating a new instance from the
+    instance provider in order to accommodate the cluster’s request. When a cluster releases an instance, it
+    returns to the pool and is free for another cluster to use. Only clusters attached to a pool can use that
+    pool’s idle instances.
+    
+    You can specify a different pool for the driver node and worker nodes, or use the same pool for both.
+    
+    Databricks does not charge DBUs while instances are idle in the pool. Instance provider billing does
+    apply. See pricing.
 
     .. py:method:: create(instance_pool_name: str, node_type_id: str [, aws_attributes: Optional[InstancePoolAwsAttributes], azure_attributes: Optional[InstancePoolAzureAttributes], custom_tags: Optional[Dict[str, str]], disk_spec: Optional[DiskSpec], enable_elastic_disk: Optional[bool], gcp_attributes: Optional[InstancePoolGcpAttributes], idle_instance_autotermination_minutes: Optional[int], max_capacity: Optional[int], min_idle_instances: Optional[int], preloaded_docker_images: Optional[List[DockerImage]], preloaded_spark_versions: Optional[List[str]]]) -> CreateInstancePoolResponse
 
@@ -40,70 +40,70 @@ apply. See pricing.
             w.instance_pools.delete(instance_pool_id=created.instance_pool_id)
 
         Create a new instance pool.
-
-Creates a new instance pool using idle and ready-to-use cloud instances.
-
-:param instance_pool_name: str
-  Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100
-  characters.
-:param node_type_id: str
-  This field encodes, through a single value, the resources available to each of the Spark nodes in
-  this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
-  intensive workloads. A list of available node types can be retrieved by using the
-  :method:clusters/listNodeTypes API call.
-:param aws_attributes: :class:`InstancePoolAwsAttributes` (optional)
-  Attributes related to instance pools running on Amazon Web Services. If not specified at pool
-  creation, a set of default values will be used.
-:param azure_attributes: :class:`InstancePoolAzureAttributes` (optional)
-  Attributes related to instance pools running on Azure. If not specified at pool creation, a set of
-  default values will be used.
-:param custom_tags: Dict[str,str] (optional)
-  Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and
-  EBS volumes) with these tags in addition to `default_tags`. Notes:
-  
-  - Currently, Databricks allows at most 45 custom tags
-:param disk_spec: :class:`DiskSpec` (optional)
-  Defines the specification of the disks that will be attached to all spark containers.
-:param enable_elastic_disk: bool (optional)
-  Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire
-  additional disk space when its Spark workers are running low on disk space. In AWS, this feature
-  requires specific AWS permissions to function correctly - refer to the User Guide for more details.
-:param gcp_attributes: :class:`InstancePoolGcpAttributes` (optional)
-  Attributes related to instance pools running on Google Cloud Platform. If not specified at pool
-  creation, a set of default values will be used.
-:param idle_instance_autotermination_minutes: int (optional)
-  Automatically terminates the extra instances in the pool cache after they are inactive for this time
-  in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances
-  will be automatically terminated after a default timeout. If specified, the threshold must be
-  between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle instances
-  from the cache if min cache size could still hold.
-:param max_capacity: int (optional)
-  Maximum number of outstanding instances to keep in the pool, including both instances used by
-  clusters and idle instances. Clusters that require further instance provisioning will fail during
-  upsize requests.
-:param min_idle_instances: int (optional)
-  Minimum number of idle instances to keep in the instance pool
-:param preloaded_docker_images: List[:class:`DockerImage`] (optional)
-  Custom Docker Image BYOC
-:param preloaded_spark_versions: List[str] (optional)
-  A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters
-  started with the preloaded Spark version will start faster. A list of available Spark versions can
-  be retrieved by using the :method:clusters/sparkVersions API call.
-
-:returns: :class:`CreateInstancePoolResponse`
-
+        
+        Creates a new instance pool using idle and ready-to-use cloud instances.
+        
+        :param instance_pool_name: str
+          Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100
+          characters.
+        :param node_type_id: str
+          This field encodes, through a single value, the resources available to each of the Spark nodes in
+          this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
+          intensive workloads. A list of available node types can be retrieved by using the
+          :method:clusters/listNodeTypes API call.
+        :param aws_attributes: :class:`InstancePoolAwsAttributes` (optional)
+          Attributes related to instance pools running on Amazon Web Services. If not specified at pool
+          creation, a set of default values will be used.
+        :param azure_attributes: :class:`InstancePoolAzureAttributes` (optional)
+          Attributes related to instance pools running on Azure. If not specified at pool creation, a set of
+          default values will be used.
+        :param custom_tags: Dict[str,str] (optional)
+          Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and
+          EBS volumes) with these tags in addition to `default_tags`. Notes:
+          
+          - Currently, Databricks allows at most 45 custom tags
+        :param disk_spec: :class:`DiskSpec` (optional)
+          Defines the specification of the disks that will be attached to all spark containers.
+        :param enable_elastic_disk: bool (optional)
+          Autoscaling Local Storage: when enabled, this instances in this pool will dynamically acquire
+          additional disk space when its Spark workers are running low on disk space. In AWS, this feature
+          requires specific AWS permissions to function correctly - refer to the User Guide for more details.
+        :param gcp_attributes: :class:`InstancePoolGcpAttributes` (optional)
+          Attributes related to instance pools running on Google Cloud Platform. If not specified at pool
+          creation, a set of default values will be used.
+        :param idle_instance_autotermination_minutes: int (optional)
+          Automatically terminates the extra instances in the pool cache after they are inactive for this time
+          in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances
+          will be automatically terminated after a default timeout. If specified, the threshold must be
+          between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle instances
+          from the cache if min cache size could still hold.
+        :param max_capacity: int (optional)
+          Maximum number of outstanding instances to keep in the pool, including both instances used by
+          clusters and idle instances. Clusters that require further instance provisioning will fail during
+          upsize requests.
+        :param min_idle_instances: int (optional)
+          Minimum number of idle instances to keep in the instance pool
+        :param preloaded_docker_images: List[:class:`DockerImage`] (optional)
+          Custom Docker Image BYOC
+        :param preloaded_spark_versions: List[str] (optional)
+          A list containing at most one preloaded Spark image version for the pool. Pool-backed clusters
+          started with the preloaded Spark version will start faster. A list of available Spark versions can
+          be retrieved by using the :method:clusters/sparkVersions API call.
+        
+        :returns: :class:`CreateInstancePoolResponse`
+        
 
     .. py:method:: delete(instance_pool_id: str)
 
         Delete an instance pool.
-
-Deletes the instance pool permanently. The idle instances in the pool are terminated asynchronously.
-
-:param instance_pool_id: str
-  The instance pool to be terminated.
-
-
-
+        
+        Deletes the instance pool permanently. The idle instances in the pool are terminated asynchronously.
+        
+        :param instance_pool_id: str
+          The instance pool to be terminated.
+        
+        
+        
 
     .. py:method:: edit(instance_pool_id: str, instance_pool_name: str, node_type_id: str [, custom_tags: Optional[Dict[str, str]], idle_instance_autotermination_minutes: Optional[int], max_capacity: Optional[int], min_idle_instances: Optional[int]])
 
@@ -130,39 +130,39 @@ Deletes the instance pool permanently. The idle instances in the pool are termin
             w.instance_pools.delete(instance_pool_id=created.instance_pool_id)
 
         Edit an existing instance pool.
-
-Modifies the configuration of an existing instance pool.
-
-:param instance_pool_id: str
-  Instance pool ID
-:param instance_pool_name: str
-  Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100
-  characters.
-:param node_type_id: str
-  This field encodes, through a single value, the resources available to each of the Spark nodes in
-  this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
-  intensive workloads. A list of available node types can be retrieved by using the
-  :method:clusters/listNodeTypes API call.
-:param custom_tags: Dict[str,str] (optional)
-  Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and
-  EBS volumes) with these tags in addition to `default_tags`. Notes:
-  
-  - Currently, Databricks allows at most 45 custom tags
-:param idle_instance_autotermination_minutes: int (optional)
-  Automatically terminates the extra instances in the pool cache after they are inactive for this time
-  in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances
-  will be automatically terminated after a default timeout. If specified, the threshold must be
-  between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle instances
-  from the cache if min cache size could still hold.
-:param max_capacity: int (optional)
-  Maximum number of outstanding instances to keep in the pool, including both instances used by
-  clusters and idle instances. Clusters that require further instance provisioning will fail during
-  upsize requests.
-:param min_idle_instances: int (optional)
-  Minimum number of idle instances to keep in the instance pool
-
-
-
+        
+        Modifies the configuration of an existing instance pool.
+        
+        :param instance_pool_id: str
+          Instance pool ID
+        :param instance_pool_name: str
+          Pool name requested by the user. Pool name must be unique. Length must be between 1 and 100
+          characters.
+        :param node_type_id: str
+          This field encodes, through a single value, the resources available to each of the Spark nodes in
+          this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
+          intensive workloads. A list of available node types can be retrieved by using the
+          :method:clusters/listNodeTypes API call.
+        :param custom_tags: Dict[str,str] (optional)
+          Additional tags for pool resources. Databricks will tag all pool resources (e.g., AWS instances and
+          EBS volumes) with these tags in addition to `default_tags`. Notes:
+          
+          - Currently, Databricks allows at most 45 custom tags
+        :param idle_instance_autotermination_minutes: int (optional)
+          Automatically terminates the extra instances in the pool cache after they are inactive for this time
+          in minutes if min_idle_instances requirement is already met. If not set, the extra pool instances
+          will be automatically terminated after a default timeout. If specified, the threshold must be
+          between 0 and 10000 minutes. Users can also set this value to 0 to instantly remove idle instances
+          from the cache if min cache size could still hold.
+        :param max_capacity: int (optional)
+          Maximum number of outstanding instances to keep in the pool, including both instances used by
+          clusters and idle instances. Clusters that require further instance provisioning will fail during
+          upsize requests.
+        :param min_idle_instances: int (optional)
+          Minimum number of idle instances to keep in the instance pool
+        
+        
+        
 
     .. py:method:: get(instance_pool_id: str) -> GetInstancePool
 
@@ -187,39 +187,39 @@ Modifies the configuration of an existing instance pool.
             w.instance_pools.delete(instance_pool_id=created.instance_pool_id)
 
         Get instance pool information.
-
-Retrieve the information for an instance pool based on its identifier.
-
-:param instance_pool_id: str
-  The canonical unique identifier for the instance pool.
-
-:returns: :class:`GetInstancePool`
-
+        
+        Retrieve the information for an instance pool based on its identifier.
+        
+        :param instance_pool_id: str
+          The canonical unique identifier for the instance pool.
+        
+        :returns: :class:`GetInstancePool`
+        
 
     .. py:method:: get_permission_levels(instance_pool_id: str) -> GetInstancePoolPermissionLevelsResponse
 
         Get instance pool permission levels.
-
-Gets the permission levels that a user can have on an object.
-
-:param instance_pool_id: str
-  The instance pool for which to get or manage permissions.
-
-:returns: :class:`GetInstancePoolPermissionLevelsResponse`
-
+        
+        Gets the permission levels that a user can have on an object.
+        
+        :param instance_pool_id: str
+          The instance pool for which to get or manage permissions.
+        
+        :returns: :class:`GetInstancePoolPermissionLevelsResponse`
+        
 
     .. py:method:: get_permissions(instance_pool_id: str) -> InstancePoolPermissions
 
         Get instance pool permissions.
-
-Gets the permissions of an instance pool. Instance pools can inherit permissions from their root
-object.
-
-:param instance_pool_id: str
-  The instance pool for which to get or manage permissions.
-
-:returns: :class:`InstancePoolPermissions`
-
+        
+        Gets the permissions of an instance pool. Instance pools can inherit permissions from their root
+        object.
+        
+        :param instance_pool_id: str
+          The instance pool for which to get or manage permissions.
+        
+        :returns: :class:`InstancePoolPermissions`
+        
 
     .. py:method:: list() -> Iterator[InstancePoolAndStats]
 
@@ -235,35 +235,36 @@ object.
             all = w.instance_pools.list()
 
         List instance pool info.
-
-Gets a list of instance pools with their statistics.
-
-:returns: Iterator over :class:`InstancePoolAndStats`
-
+        
+        Gets a list of instance pools with their statistics.
+        
+        :returns: Iterator over :class:`InstancePoolAndStats`
+        
 
     .. py:method:: set_permissions(instance_pool_id: str [, access_control_list: Optional[List[InstancePoolAccessControlRequest]]]) -> InstancePoolPermissions
 
         Set instance pool permissions.
-
-Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-permissions if none are specified. Objects can inherit permissions from their root object.
-
-:param instance_pool_id: str
-  The instance pool for which to get or manage permissions.
-:param access_control_list: List[:class:`InstancePoolAccessControlRequest`] (optional)
-
-:returns: :class:`InstancePoolPermissions`
-
+        
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
+        
+        :param instance_pool_id: str
+          The instance pool for which to get or manage permissions.
+        :param access_control_list: List[:class:`InstancePoolAccessControlRequest`] (optional)
+        
+        :returns: :class:`InstancePoolPermissions`
+        
 
     .. py:method:: update_permissions(instance_pool_id: str [, access_control_list: Optional[List[InstancePoolAccessControlRequest]]]) -> InstancePoolPermissions
 
         Update instance pool permissions.
-
-Updates the permissions on an instance pool. Instance pools can inherit permissions from their root
-object.
-
-:param instance_pool_id: str
-  The instance pool for which to get or manage permissions.
-:param access_control_list: List[:class:`InstancePoolAccessControlRequest`] (optional)
-
-:returns: :class:`InstancePoolPermissions`
+        
+        Updates the permissions on an instance pool. Instance pools can inherit permissions from their root
+        object.
+        
+        :param instance_pool_id: str
+          The instance pool for which to get or manage permissions.
+        :param access_control_list: List[:class:`InstancePoolAccessControlRequest`] (optional)
+        
+        :returns: :class:`InstancePoolPermissions`
+        
\ No newline at end of file
diff --git a/docs/workspace/compute/instance_profiles.rst b/docs/workspace/compute/instance_profiles.rst
index 4b863deb8..a7a25f869 100644
--- a/docs/workspace/compute/instance_profiles.rst
+++ b/docs/workspace/compute/instance_profiles.rst
@@ -5,10 +5,10 @@
 .. py:class:: InstanceProfilesAPI
 
     The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch
-clusters with. Regular users can list the instance profiles available to them. See [Secure access to S3
-buckets] using instance profiles for more information.
-
-[Secure access to S3 buckets]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/instance-profiles.html
+    clusters with. Regular users can list the instance profiles available to them. See [Secure access to S3
+    buckets] using instance profiles for more information.
+    
+    [Secure access to S3 buckets]: https://docs.databricks.com/administration-guide/cloud-configurations/aws/instance-profiles.html
 
     .. py:method:: add(instance_profile_arn: str [, iam_role_arn: Optional[str], is_meta_instance_profile: Optional[bool], skip_validation: Optional[bool]])
 
@@ -28,34 +28,34 @@ buckets] using instance profiles for more information.
                                     iam_role_arn="arn:aws:iam::000000000000:role/bcd")
 
         Register an instance profile.
-
-In the UI, you can select the instance profile when launching clusters. This API is only available to
-admin users.
-
-:param instance_profile_arn: str
-  The AWS ARN of the instance profile to register with Databricks. This field is required.
-:param iam_role_arn: str (optional)
-  The AWS IAM role ARN of the role associated with the instance profile. This field is required if
-  your role name and instance profile name do not match and you want to use the instance profile with
-  [Databricks SQL Serverless].
-  
-  Otherwise, this field is optional.
-  
-  [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html
-:param is_meta_instance_profile: bool (optional)
-  Boolean flag indicating whether the instance profile should only be used in credential passthrough
-  scenarios. If true, it means the instance profile contains an meta IAM role which could assume a
-  wide range of roles. Therefore it should always be used with authorization. This field is optional,
-  the default value is `false`.
-:param skip_validation: bool (optional)
-  By default, Databricks validates that it has sufficient permissions to launch instances with the
-  instance profile. This validation uses AWS dry-run mode for the RunInstances API. If validation
-  fails with an error message that does not indicate an IAM related permission issue, (e.g. “Your
-  requested instance type is not supported in your requested availability zone”), you can pass this
-  flag to skip the validation and forcibly add the instance profile.
-
-
-
+        
+        In the UI, you can select the instance profile when launching clusters. This API is only available to
+        admin users.
+        
+        :param instance_profile_arn: str
+          The AWS ARN of the instance profile to register with Databricks. This field is required.
+        :param iam_role_arn: str (optional)
+          The AWS IAM role ARN of the role associated with the instance profile. This field is required if
+          your role name and instance profile name do not match and you want to use the instance profile with
+          [Databricks SQL Serverless].
+          
+          Otherwise, this field is optional.
+          
+          [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html
+        :param is_meta_instance_profile: bool (optional)
+          Boolean flag indicating whether the instance profile should only be used in credential passthrough
+          scenarios. If true, it means the instance profile contains an meta IAM role which could assume a
+          wide range of roles. Therefore it should always be used with authorization. This field is optional,
+          the default value is `false`.
+        :param skip_validation: bool (optional)
+          By default, Databricks validates that it has sufficient permissions to launch instances with the
+          instance profile. This validation uses AWS dry-run mode for the RunInstances API. If validation
+          fails with an error message that does not indicate an IAM related permission issue, (e.g. “Your
+          requested instance type is not supported in your requested availability zone”), you can pass this
+          flag to skip the validation and forcibly add the instance profile.
+        
+        
+        
 
     .. py:method:: edit(instance_profile_arn: str [, iam_role_arn: Optional[str], is_meta_instance_profile: Optional[bool]])
 
@@ -73,38 +73,38 @@ admin users.
             w.instance_profiles.edit(instance_profile_arn=arn, iam_role_arn="arn:aws:iam::000000000000:role/bcdf")
 
         Edit an instance profile.
-
-The only supported field to change is the optional IAM role ARN associated with the instance profile.
-It is required to specify the IAM role ARN if both of the following are true:
-
-* Your role name and instance profile name do not match. The name is the part after the last slash in
-each ARN. * You want to use the instance profile with [Databricks SQL Serverless].
-
-To understand where these fields are in the AWS console, see [Enable serverless SQL warehouses].
-
-This API is only available to admin users.
-
-[Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html
-[Enable serverless SQL warehouses]: https://docs.databricks.com/sql/admin/serverless.html
-
-:param instance_profile_arn: str
-  The AWS ARN of the instance profile to register with Databricks. This field is required.
-:param iam_role_arn: str (optional)
-  The AWS IAM role ARN of the role associated with the instance profile. This field is required if
-  your role name and instance profile name do not match and you want to use the instance profile with
-  [Databricks SQL Serverless].
-  
-  Otherwise, this field is optional.
-  
-  [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html
-:param is_meta_instance_profile: bool (optional)
-  Boolean flag indicating whether the instance profile should only be used in credential passthrough
-  scenarios. If true, it means the instance profile contains an meta IAM role which could assume a
-  wide range of roles. Therefore it should always be used with authorization. This field is optional,
-  the default value is `false`.
-
-
-
+        
+        The only supported field to change is the optional IAM role ARN associated with the instance profile.
+        It is required to specify the IAM role ARN if both of the following are true:
+        
+        * Your role name and instance profile name do not match. The name is the part after the last slash in
+        each ARN. * You want to use the instance profile with [Databricks SQL Serverless].
+        
+        To understand where these fields are in the AWS console, see [Enable serverless SQL warehouses].
+        
+        This API is only available to admin users.
+        
+        [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html
+        [Enable serverless SQL warehouses]: https://docs.databricks.com/sql/admin/serverless.html
+        
+        :param instance_profile_arn: str
+          The AWS ARN of the instance profile to register with Databricks. This field is required.
+        :param iam_role_arn: str (optional)
+          The AWS IAM role ARN of the role associated with the instance profile. This field is required if
+          your role name and instance profile name do not match and you want to use the instance profile with
+          [Databricks SQL Serverless].
+          
+          Otherwise, this field is optional.
+          
+          [Databricks SQL Serverless]: https://docs.databricks.com/sql/admin/serverless.html
+        :param is_meta_instance_profile: bool (optional)
+          Boolean flag indicating whether the instance profile should only be used in credential passthrough
+          scenarios. If true, it means the instance profile contains an meta IAM role which could assume a
+          wide range of roles. Therefore it should always be used with authorization. This field is optional,
+          the default value is `false`.
+        
+        
+        
 
     .. py:method:: list() -> Iterator[InstanceProfile]
 
@@ -120,24 +120,25 @@ This API is only available to admin users.
             all = w.instance_profiles.list()
 
         List available instance profiles.
-
-List the instance profiles that the calling user can use to launch a cluster.
-
-This API is available to all users.
-
-:returns: Iterator over :class:`InstanceProfile`
-
+        
+        List the instance profiles that the calling user can use to launch a cluster.
+        
+        This API is available to all users.
+        
+        :returns: Iterator over :class:`InstanceProfile`
+        
 
     .. py:method:: remove(instance_profile_arn: str)
 
         Remove the instance profile.
-
-Remove the instance profile with the provided ARN. Existing clusters with this instance profile will
-continue to function.
-
-This API is only accessible to admin users.
-
-:param instance_profile_arn: str
-  The ARN of the instance profile to remove. This field is required.
-
-
+        
+        Remove the instance profile with the provided ARN. Existing clusters with this instance profile will
+        continue to function.
+        
+        This API is only accessible to admin users.
+        
+        :param instance_profile_arn: str
+          The ARN of the instance profile to remove. This field is required.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/compute/libraries.rst b/docs/workspace/compute/libraries.rst
index 305039e26..64f688fdc 100644
--- a/docs/workspace/compute/libraries.rst
+++ b/docs/workspace/compute/libraries.rst
@@ -5,70 +5,71 @@
 .. py:class:: LibrariesAPI
 
     The Libraries API allows you to install and uninstall libraries and get the status of libraries on a
-cluster.
-
-To make third-party or custom code available to notebooks and jobs running on your clusters, you can
-install a library. Libraries can be written in Python, Java, Scala, and R. You can upload Python, Java,
-Scala and R libraries and point to external packages in PyPI, Maven, and CRAN repositories.
-
-Cluster libraries can be used by all notebooks running on a cluster. You can install a cluster library
-directly from a public repository such as PyPI or Maven, using a previously installed workspace library,
-or using an init script.
-
-When you uninstall a library from a cluster, the library is removed only when you restart the cluster.
-Until you restart the cluster, the status of the uninstalled library appears as Uninstall pending restart.
+    cluster.
+    
+    To make third-party or custom code available to notebooks and jobs running on your clusters, you can
+    install a library. Libraries can be written in Python, Java, Scala, and R. You can upload Python, Java,
+    Scala and R libraries and point to external packages in PyPI, Maven, and CRAN repositories.
+    
+    Cluster libraries can be used by all notebooks running on a cluster. You can install a cluster library
+    directly from a public repository such as PyPI or Maven, using a previously installed workspace library,
+    or using an init script.
+    
+    When you uninstall a library from a cluster, the library is removed only when you restart the cluster.
+    Until you restart the cluster, the status of the uninstalled library appears as Uninstall pending restart.
 
     .. py:method:: all_cluster_statuses() -> Iterator[ClusterLibraryStatuses]
 
         Get all statuses.
-
-Get the status of all libraries on all clusters. A status is returned for all libraries installed on
-this cluster via the API or the libraries UI.
-
-:returns: Iterator over :class:`ClusterLibraryStatuses`
-
+        
+        Get the status of all libraries on all clusters. A status is returned for all libraries installed on
+        this cluster via the API or the libraries UI.
+        
+        :returns: Iterator over :class:`ClusterLibraryStatuses`
+        
 
     .. py:method:: cluster_status(cluster_id: str) -> Iterator[LibraryFullStatus]
 
         Get status.
-
-Get the status of libraries on a cluster. A status is returned for all libraries installed on this
-cluster via the API or the libraries UI. The order of returned libraries is as follows: 1. Libraries
-set to be installed on this cluster, in the order that the libraries were added to the cluster, are
-returned first. 2. Libraries that were previously requested to be installed on this cluster or, but
-are now marked for removal, in no particular order, are returned last.
-
-:param cluster_id: str
-  Unique identifier of the cluster whose status should be retrieved.
-
-:returns: Iterator over :class:`LibraryFullStatus`
-
+        
+        Get the status of libraries on a cluster. A status is returned for all libraries installed on this
+        cluster via the API or the libraries UI. The order of returned libraries is as follows: 1. Libraries
+        set to be installed on this cluster, in the order that the libraries were added to the cluster, are
+        returned first. 2. Libraries that were previously requested to be installed on this cluster or, but
+        are now marked for removal, in no particular order, are returned last.
+        
+        :param cluster_id: str
+          Unique identifier of the cluster whose status should be retrieved.
+        
+        :returns: Iterator over :class:`LibraryFullStatus`
+        
 
     .. py:method:: install(cluster_id: str, libraries: List[Library])
 
         Add a library.
-
-Add libraries to install on a cluster. The installation is asynchronous; it happens in the background
-after the completion of this request.
-
-:param cluster_id: str
-  Unique identifier for the cluster on which to install these libraries.
-:param libraries: List[:class:`Library`]
-  The libraries to install.
-
-
-
+        
+        Add libraries to install on a cluster. The installation is asynchronous; it happens in the background
+        after the completion of this request.
+        
+        :param cluster_id: str
+          Unique identifier for the cluster on which to install these libraries.
+        :param libraries: List[:class:`Library`]
+          The libraries to install.
+        
+        
+        
 
     .. py:method:: uninstall(cluster_id: str, libraries: List[Library])
 
         Uninstall libraries.
-
-Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster is
-restarted. A request to uninstall a library that is not currently installed is ignored.
-
-:param cluster_id: str
-  Unique identifier for the cluster on which to uninstall these libraries.
-:param libraries: List[:class:`Library`]
-  The libraries to uninstall.
-
-
+        
+        Set libraries to uninstall from a cluster. The libraries won't be uninstalled until the cluster is
+        restarted. A request to uninstall a library that is not currently installed is ignored.
+        
+        :param cluster_id: str
+          Unique identifier for the cluster on which to uninstall these libraries.
+        :param libraries: List[:class:`Library`]
+          The libraries to uninstall.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/compute/policy_compliance_for_clusters.rst b/docs/workspace/compute/policy_compliance_for_clusters.rst
index b22207cd3..90c3aeb98 100644
--- a/docs/workspace/compute/policy_compliance_for_clusters.rst
+++ b/docs/workspace/compute/policy_compliance_for_clusters.rst
@@ -5,66 +5,67 @@
 .. py:class:: PolicyComplianceForClustersAPI
 
     The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your
-workspace.
-
-A cluster is compliant with its policy if its configuration satisfies all its policy rules. Clusters could
-be out of compliance if their policy was updated after the cluster was last edited.
-
-The get and list compliance APIs allow you to view the policy compliance status of a cluster. The enforce
-compliance API allows you to update a cluster to be compliant with the current version of its policy.
+    workspace.
+    
+    A cluster is compliant with its policy if its configuration satisfies all its policy rules. Clusters could
+    be out of compliance if their policy was updated after the cluster was last edited.
+    
+    The get and list compliance APIs allow you to view the policy compliance status of a cluster. The enforce
+    compliance API allows you to update a cluster to be compliant with the current version of its policy.
 
     .. py:method:: enforce_compliance(cluster_id: str [, validate_only: Optional[bool]]) -> EnforceClusterComplianceResponse
 
         Enforce cluster policy compliance.
-
-Updates a cluster to be compliant with the current version of its policy. A cluster can be updated if
-it is in a `RUNNING` or `TERMINATED` state.
-
-If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes
-can take effect.
-
-If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time the
-cluster is started, the new attributes will take effect.
-
-Clusters created by the Databricks Jobs, DLT, or Models services cannot be enforced by this API.
-Instead, use the "Enforce job policy compliance" API to enforce policy compliance on jobs.
-
-:param cluster_id: str
-  The ID of the cluster you want to enforce policy compliance on.
-:param validate_only: bool (optional)
-  If set, previews the changes that would be made to a cluster to enforce compliance but does not
-  update the cluster.
-
-:returns: :class:`EnforceClusterComplianceResponse`
-
+        
+        Updates a cluster to be compliant with the current version of its policy. A cluster can be updated if
+        it is in a `RUNNING` or `TERMINATED` state.
+        
+        If a cluster is updated while in a `RUNNING` state, it will be restarted so that the new attributes
+        can take effect.
+        
+        If a cluster is updated while in a `TERMINATED` state, it will remain `TERMINATED`. The next time the
+        cluster is started, the new attributes will take effect.
+        
+        Clusters created by the Databricks Jobs, DLT, or Models services cannot be enforced by this API.
+        Instead, use the "Enforce job policy compliance" API to enforce policy compliance on jobs.
+        
+        :param cluster_id: str
+          The ID of the cluster you want to enforce policy compliance on.
+        :param validate_only: bool (optional)
+          If set, previews the changes that would be made to a cluster to enforce compliance but does not
+          update the cluster.
+        
+        :returns: :class:`EnforceClusterComplianceResponse`
+        
 
     .. py:method:: get_compliance(cluster_id: str) -> GetClusterComplianceResponse
 
         Get cluster policy compliance.
-
-Returns the policy compliance status of a cluster. Clusters could be out of compliance if their policy
-was updated after the cluster was last edited.
-
-:param cluster_id: str
-  The ID of the cluster to get the compliance status
-
-:returns: :class:`GetClusterComplianceResponse`
-
+        
+        Returns the policy compliance status of a cluster. Clusters could be out of compliance if their policy
+        was updated after the cluster was last edited.
+        
+        :param cluster_id: str
+          The ID of the cluster to get the compliance status
+        
+        :returns: :class:`GetClusterComplianceResponse`
+        
 
     .. py:method:: list_compliance(policy_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ClusterCompliance]
 
         List cluster policy compliance.
-
-Returns the policy compliance status of all clusters that use a given policy. Clusters could be out of
-compliance if their policy was updated after the cluster was last edited.
-
-:param policy_id: str
-  Canonical unique identifier for the cluster policy.
-:param page_size: int (optional)
-  Use this field to specify the maximum number of results to be returned by the server. The server may
-  further constrain the maximum number of results returned in a single page.
-:param page_token: str (optional)
-  A page token that can be used to navigate to the next page or previous page as returned by
-  `next_page_token` or `prev_page_token`.
-
-:returns: Iterator over :class:`ClusterCompliance`
+        
+        Returns the policy compliance status of all clusters that use a given policy. Clusters could be out of
+        compliance if their policy was updated after the cluster was last edited.
+        
+        :param policy_id: str
+          Canonical unique identifier for the cluster policy.
+        :param page_size: int (optional)
+          Use this field to specify the maximum number of results to be returned by the server. The server may
+          further constrain the maximum number of results returned in a single page.
+        :param page_token: str (optional)
+          A page token that can be used to navigate to the next page or previous page as returned by
+          `next_page_token` or `prev_page_token`.
+        
+        :returns: Iterator over :class:`ClusterCompliance`
+        
\ No newline at end of file
diff --git a/docs/workspace/compute/policy_families.rst b/docs/workspace/compute/policy_families.rst
index ad8061a91..56e4f4275 100644
--- a/docs/workspace/compute/policy_families.rst
+++ b/docs/workspace/compute/policy_families.rst
@@ -5,14 +5,14 @@
 .. py:class:: PolicyFamiliesAPI
 
     View available policy families. A policy family contains a policy definition providing best practices for
-configuring clusters for a particular use case.
-
-Databricks manages and provides policy families for several common cluster use cases. You cannot create,
-edit, or delete policy families.
-
-Policy families cannot be used directly to create clusters. Instead, you create cluster policies using a
-policy family. Cluster policies created using a policy family inherit the policy family's policy
-definition.
+    configuring clusters for a particular use case.
+    
+    Databricks manages and provides policy families for several common cluster use cases. You cannot create,
+    edit, or delete policy families.
+    
+    Policy families cannot be used directly to create clusters. Instead, you create cluster policies using a
+    policy family. Cluster policies created using a policy family inherit the policy family's policy
+    definition.
 
     .. py:method:: get(policy_family_id: str [, version: Optional[int]]) -> PolicyFamily
 
@@ -31,16 +31,16 @@ definition.
             first_family = w.policy_families.get(policy_family_id=all[0].policy_family_id)
 
         Get policy family information.
-
-Retrieve the information for an policy family based on its identifier and version
-
-:param policy_family_id: str
-  The family ID about which to retrieve information.
-:param version: int (optional)
-  The version number for the family to fetch. Defaults to the latest version.
-
-:returns: :class:`PolicyFamily`
-
+        
+        Retrieve the information for an policy family based on its identifier and version
+        
+        :param policy_family_id: str
+          The family ID about which to retrieve information.
+        :param version: int (optional)
+          The version number for the family to fetch. Defaults to the latest version.
+        
+        :returns: :class:`PolicyFamily`
+        
 
     .. py:method:: list( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[PolicyFamily]
 
@@ -57,13 +57,14 @@ Retrieve the information for an policy family based on its identifier and versio
             all = w.policy_families.list(compute.ListPolicyFamiliesRequest())
 
         List policy families.
-
-Returns the list of policy definition types available to use at their latest version. This API is
-paginated.
-
-:param max_results: int (optional)
-  Maximum number of policy families to return.
-:param page_token: str (optional)
-  A token that can be used to get the next page of results.
-
-:returns: Iterator over :class:`PolicyFamily`
+        
+        Returns the list of policy definition types available to use at their latest version. This API is
+        paginated.
+        
+        :param max_results: int (optional)
+          Maximum number of policy families to return.
+        :param page_token: str (optional)
+          A token that can be used to get the next page of results.
+        
+        :returns: Iterator over :class:`PolicyFamily`
+        
\ No newline at end of file
diff --git a/docs/workspace/dashboards/genie.rst b/docs/workspace/dashboards/genie.rst
index aed179d96..3908c6472 100644
--- a/docs/workspace/dashboards/genie.rst
+++ b/docs/workspace/dashboards/genie.rst
@@ -5,28 +5,28 @@
 .. py:class:: GenieAPI
 
     Genie provides a no-code experience for business users, powered by AI/BI. Analysts set up spaces that
-business users can use to ask questions using natural language. Genie uses data registered to Unity
-Catalog and requires at least CAN USE permission on a Pro or Serverless SQL warehouse. Also, Databricks
-Assistant must be enabled.
+    business users can use to ask questions using natural language. Genie uses data registered to Unity
+    Catalog and requires at least CAN USE permission on a Pro or Serverless SQL warehouse. Also, Databricks
+    Assistant must be enabled.
 
     .. py:method:: create_message(space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage]
 
         Create conversation message.
-
-Create new message in [conversation](:method:genie/startconversation). The AI response uses all
-previously created messages in the conversation to respond.
-
-:param space_id: str
-  The ID associated with the Genie space where the conversation is started.
-:param conversation_id: str
-  The ID associated with the conversation.
-:param content: str
-  User message content.
-
-:returns:
-  Long-running operation waiter for :class:`GenieMessage`.
-  See :method:wait_get_message_genie_completed for more details.
-
+        
+        Create new message in [conversation](:method:genie/startconversation). The AI response uses all
+        previously created messages in the conversation to respond.
+        
+        :param space_id: str
+          The ID associated with the Genie space where the conversation is started.
+        :param conversation_id: str
+          The ID associated with the conversation.
+        :param content: str
+          User message content.
+        
+        :returns:
+          Long-running operation waiter for :class:`GenieMessage`.
+          See :method:wait_get_message_genie_completed for more details.
+        
 
     .. py:method:: create_message_and_wait(space_id: str, conversation_id: str, content: str, timeout: datetime.timedelta = 0:20:00) -> GenieMessage
 
@@ -34,86 +34,86 @@ previously created messages in the conversation to respond.
     .. py:method:: execute_message_query(space_id: str, conversation_id: str, message_id: str) -> GenieGetMessageQueryResultResponse
 
         Execute SQL query in a conversation message.
-
-Execute the SQL query in the message.
-
-:param space_id: str
-  Genie space ID
-:param conversation_id: str
-  Conversation ID
-:param message_id: str
-  Message ID
-
-:returns: :class:`GenieGetMessageQueryResultResponse`
-
+        
+        Execute the SQL query in the message.
+        
+        :param space_id: str
+          Genie space ID
+        :param conversation_id: str
+          Conversation ID
+        :param message_id: str
+          Message ID
+        
+        :returns: :class:`GenieGetMessageQueryResultResponse`
+        
 
     .. py:method:: get_message(space_id: str, conversation_id: str, message_id: str) -> GenieMessage
 
         Get conversation message.
-
-Get message from conversation.
-
-:param space_id: str
-  The ID associated with the Genie space where the target conversation is located.
-:param conversation_id: str
-  The ID associated with the target conversation.
-:param message_id: str
-  The ID associated with the target message from the identified conversation.
-
-:returns: :class:`GenieMessage`
-
+        
+        Get message from conversation.
+        
+        :param space_id: str
+          The ID associated with the Genie space where the target conversation is located.
+        :param conversation_id: str
+          The ID associated with the target conversation.
+        :param message_id: str
+          The ID associated with the target message from the identified conversation.
+        
+        :returns: :class:`GenieMessage`
+        
 
     .. py:method:: get_message_query_result(space_id: str, conversation_id: str, message_id: str) -> GenieGetMessageQueryResultResponse
 
         Get conversation message SQL query result.
-
-Get the result of SQL query if the message has a query attachment. This is only available if a message
-has a query attachment and the message status is `EXECUTING_QUERY`.
-
-:param space_id: str
-  Genie space ID
-:param conversation_id: str
-  Conversation ID
-:param message_id: str
-  Message ID
-
-:returns: :class:`GenieGetMessageQueryResultResponse`
-
+        
+        Get the result of SQL query if the message has a query attachment. This is only available if a message
+        has a query attachment and the message status is `EXECUTING_QUERY`.
+        
+        :param space_id: str
+          Genie space ID
+        :param conversation_id: str
+          Conversation ID
+        :param message_id: str
+          Message ID
+        
+        :returns: :class:`GenieGetMessageQueryResultResponse`
+        
 
     .. py:method:: get_message_query_result_by_attachment(space_id: str, conversation_id: str, message_id: str, attachment_id: str) -> GenieGetMessageQueryResultResponse
 
         Get conversation message SQL query result by attachment id.
-
-Get the result of SQL query by attachment id This is only available if a message has a query
-attachment and the message status is `EXECUTING_QUERY`.
-
-:param space_id: str
-  Genie space ID
-:param conversation_id: str
-  Conversation ID
-:param message_id: str
-  Message ID
-:param attachment_id: str
-  Attachment ID
-
-:returns: :class:`GenieGetMessageQueryResultResponse`
-
+        
+        Get the result of SQL query by attachment id This is only available if a message has a query
+        attachment and the message status is `EXECUTING_QUERY`.
+        
+        :param space_id: str
+          Genie space ID
+        :param conversation_id: str
+          Conversation ID
+        :param message_id: str
+          Message ID
+        :param attachment_id: str
+          Attachment ID
+        
+        :returns: :class:`GenieGetMessageQueryResultResponse`
+        
 
     .. py:method:: start_conversation(space_id: str, content: str) -> Wait[GenieMessage]
 
         Start conversation.
-
-Start a new conversation.
-
-:param space_id: str
-  The ID associated with the Genie space where you want to start a conversation.
-:param content: str
-  The text of the message that starts the conversation.
-
-:returns:
-  Long-running operation waiter for :class:`GenieMessage`.
-  See :method:wait_get_message_genie_completed for more details.
-
+        
+        Start a new conversation.
+        
+        :param space_id: str
+          The ID associated with the Genie space where you want to start a conversation.
+        :param content: str
+          The text of the message that starts the conversation.
+        
+        :returns:
+          Long-running operation waiter for :class:`GenieMessage`.
+          See :method:wait_get_message_genie_completed for more details.
+        
 
     .. py:method:: start_conversation_and_wait(space_id: str, content: str, timeout: datetime.timedelta = 0:20:00) -> GenieMessage
 
diff --git a/docs/workspace/dashboards/lakeview.rst b/docs/workspace/dashboards/lakeview.rst
index b8c64f15d..c37479dcb 100644
--- a/docs/workspace/dashboards/lakeview.rst
+++ b/docs/workspace/dashboards/lakeview.rst
@@ -5,256 +5,257 @@
 .. py:class:: LakeviewAPI
 
     These APIs provide specific management operations for Lakeview dashboards. Generic resource management can
-be done with Workspace API (import, export, get-status, list, delete).
+    be done with Workspace API (import, export, get-status, list, delete).
 
     .. py:method:: create( [, dashboard: Optional[Dashboard]]) -> Dashboard
 
         Create dashboard.
-
-Create a draft dashboard.
-
-:param dashboard: :class:`Dashboard` (optional)
-
-:returns: :class:`Dashboard`
-
+        
+        Create a draft dashboard.
+        
+        :param dashboard: :class:`Dashboard` (optional)
+        
+        :returns: :class:`Dashboard`
+        
 
     .. py:method:: create_schedule(dashboard_id: str [, schedule: Optional[Schedule]]) -> Schedule
 
         Create dashboard schedule.
-
-:param dashboard_id: str
-  UUID identifying the dashboard to which the schedule belongs.
-:param schedule: :class:`Schedule` (optional)
-
-:returns: :class:`Schedule`
-
+        
+        :param dashboard_id: str
+          UUID identifying the dashboard to which the schedule belongs.
+        :param schedule: :class:`Schedule` (optional)
+        
+        :returns: :class:`Schedule`
+        
 
     .. py:method:: create_subscription(dashboard_id: str, schedule_id: str [, subscription: Optional[Subscription]]) -> Subscription
 
         Create schedule subscription.
-
-:param dashboard_id: str
-  UUID identifying the dashboard to which the subscription belongs.
-:param schedule_id: str
-  UUID identifying the schedule to which the subscription belongs.
-:param subscription: :class:`Subscription` (optional)
-
-:returns: :class:`Subscription`
-
+        
+        :param dashboard_id: str
+          UUID identifying the dashboard to which the subscription belongs.
+        :param schedule_id: str
+          UUID identifying the schedule to which the subscription belongs.
+        :param subscription: :class:`Subscription` (optional)
+        
+        :returns: :class:`Subscription`
+        
 
     .. py:method:: delete_schedule(dashboard_id: str, schedule_id: str [, etag: Optional[str]])
 
         Delete dashboard schedule.
-
-:param dashboard_id: str
-  UUID identifying the dashboard to which the schedule belongs.
-:param schedule_id: str
-  UUID identifying the schedule.
-:param etag: str (optional)
-  The etag for the schedule. Optionally, it can be provided to verify that the schedule has not been
-  modified from its last retrieval.
-
-
-
+        
+        :param dashboard_id: str
+          UUID identifying the dashboard to which the schedule belongs.
+        :param schedule_id: str
+          UUID identifying the schedule.
+        :param etag: str (optional)
+          The etag for the schedule. Optionally, it can be provided to verify that the schedule has not been
+          modified from its last retrieval.
+        
+        
+        
 
     .. py:method:: delete_subscription(dashboard_id: str, schedule_id: str, subscription_id: str [, etag: Optional[str]])
 
         Delete schedule subscription.
-
-:param dashboard_id: str
-  UUID identifying the dashboard which the subscription belongs.
-:param schedule_id: str
-  UUID identifying the schedule which the subscription belongs.
-:param subscription_id: str
-  UUID identifying the subscription.
-:param etag: str (optional)
-  The etag for the subscription. Can be optionally provided to ensure that the subscription has not
-  been modified since the last read.
-
-
-
+        
+        :param dashboard_id: str
+          UUID identifying the dashboard which the subscription belongs.
+        :param schedule_id: str
+          UUID identifying the schedule which the subscription belongs.
+        :param subscription_id: str
+          UUID identifying the subscription.
+        :param etag: str (optional)
+          The etag for the subscription. Can be optionally provided to ensure that the subscription has not
+          been modified since the last read.
+        
+        
+        
 
     .. py:method:: get(dashboard_id: str) -> Dashboard
 
         Get dashboard.
-
-Get a draft dashboard.
-
-:param dashboard_id: str
-  UUID identifying the dashboard.
-
-:returns: :class:`Dashboard`
-
+        
+        Get a draft dashboard.
+        
+        :param dashboard_id: str
+          UUID identifying the dashboard.
+        
+        :returns: :class:`Dashboard`
+        
 
     .. py:method:: get_published(dashboard_id: str) -> PublishedDashboard
 
         Get published dashboard.
-
-Get the current published dashboard.
-
-:param dashboard_id: str
-  UUID identifying the published dashboard.
-
-:returns: :class:`PublishedDashboard`
-
+        
+        Get the current published dashboard.
+        
+        :param dashboard_id: str
+          UUID identifying the published dashboard.
+        
+        :returns: :class:`PublishedDashboard`
+        
 
     .. py:method:: get_schedule(dashboard_id: str, schedule_id: str) -> Schedule
 
         Get dashboard schedule.
-
-:param dashboard_id: str
-  UUID identifying the dashboard to which the schedule belongs.
-:param schedule_id: str
-  UUID identifying the schedule.
-
-:returns: :class:`Schedule`
-
+        
+        :param dashboard_id: str
+          UUID identifying the dashboard to which the schedule belongs.
+        :param schedule_id: str
+          UUID identifying the schedule.
+        
+        :returns: :class:`Schedule`
+        
 
     .. py:method:: get_subscription(dashboard_id: str, schedule_id: str, subscription_id: str) -> Subscription
 
         Get schedule subscription.
-
-:param dashboard_id: str
-  UUID identifying the dashboard which the subscription belongs.
-:param schedule_id: str
-  UUID identifying the schedule which the subscription belongs.
-:param subscription_id: str
-  UUID identifying the subscription.
-
-:returns: :class:`Subscription`
-
+        
+        :param dashboard_id: str
+          UUID identifying the dashboard which the subscription belongs.
+        :param schedule_id: str
+          UUID identifying the schedule which the subscription belongs.
+        :param subscription_id: str
+          UUID identifying the subscription.
+        
+        :returns: :class:`Subscription`
+        
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str], show_trashed: Optional[bool], view: Optional[DashboardView]]) -> Iterator[Dashboard]
 
         List dashboards.
-
-:param page_size: int (optional)
-  The number of dashboards to return per page.
-:param page_token: str (optional)
-  A page token, received from a previous `ListDashboards` call. This token can be used to retrieve the
-  subsequent page.
-:param show_trashed: bool (optional)
-  The flag to include dashboards located in the trash. If unspecified, only active dashboards will be
-  returned.
-:param view: :class:`DashboardView` (optional)
-  `DASHBOARD_VIEW_BASIC`only includes summary metadata from the dashboard.
-
-:returns: Iterator over :class:`Dashboard`
-
+        
+        :param page_size: int (optional)
+          The number of dashboards to return per page.
+        :param page_token: str (optional)
+          A page token, received from a previous `ListDashboards` call. This token can be used to retrieve the
+          subsequent page.
+        :param show_trashed: bool (optional)
+          The flag to include dashboards located in the trash. If unspecified, only active dashboards will be
+          returned.
+        :param view: :class:`DashboardView` (optional)
+          `DASHBOARD_VIEW_BASIC`only includes summary metadata from the dashboard.
+        
+        :returns: Iterator over :class:`Dashboard`
+        
 
     .. py:method:: list_schedules(dashboard_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Schedule]
 
         List dashboard schedules.
-
-:param dashboard_id: str
-  UUID identifying the dashboard to which the schedules belongs.
-:param page_size: int (optional)
-  The number of schedules to return per page.
-:param page_token: str (optional)
-  A page token, received from a previous `ListSchedules` call. Use this to retrieve the subsequent
-  page.
-
-:returns: Iterator over :class:`Schedule`
-
+        
+        :param dashboard_id: str
+          UUID identifying the dashboard to which the schedules belongs.
+        :param page_size: int (optional)
+          The number of schedules to return per page.
+        :param page_token: str (optional)
+          A page token, received from a previous `ListSchedules` call. Use this to retrieve the subsequent
+          page.
+        
+        :returns: Iterator over :class:`Schedule`
+        
 
     .. py:method:: list_subscriptions(dashboard_id: str, schedule_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Subscription]
 
         List schedule subscriptions.
-
-:param dashboard_id: str
-  UUID identifying the dashboard which the subscriptions belongs.
-:param schedule_id: str
-  UUID identifying the schedule which the subscriptions belongs.
-:param page_size: int (optional)
-  The number of subscriptions to return per page.
-:param page_token: str (optional)
-  A page token, received from a previous `ListSubscriptions` call. Use this to retrieve the subsequent
-  page.
-
-:returns: Iterator over :class:`Subscription`
-
+        
+        :param dashboard_id: str
+          UUID identifying the dashboard which the subscriptions belongs.
+        :param schedule_id: str
+          UUID identifying the schedule which the subscriptions belongs.
+        :param page_size: int (optional)
+          The number of subscriptions to return per page.
+        :param page_token: str (optional)
+          A page token, received from a previous `ListSubscriptions` call. Use this to retrieve the subsequent
+          page.
+        
+        :returns: Iterator over :class:`Subscription`
+        
 
     .. py:method:: migrate(source_dashboard_id: str [, display_name: Optional[str], parent_path: Optional[str], update_parameter_syntax: Optional[bool]]) -> Dashboard
 
         Migrate dashboard.
-
-Migrates a classic SQL dashboard to Lakeview.
-
-:param source_dashboard_id: str
-  UUID of the dashboard to be migrated.
-:param display_name: str (optional)
-  Display name for the new Lakeview dashboard.
-:param parent_path: str (optional)
-  The workspace path of the folder to contain the migrated Lakeview dashboard.
-:param update_parameter_syntax: bool (optional)
-  Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named syntax
-  (:param) when converting datasets in the dashboard.
-
-:returns: :class:`Dashboard`
-
+        
+        Migrates a classic SQL dashboard to Lakeview.
+        
+        :param source_dashboard_id: str
+          UUID of the dashboard to be migrated.
+        :param display_name: str (optional)
+          Display name for the new Lakeview dashboard.
+        :param parent_path: str (optional)
+          The workspace path of the folder to contain the migrated Lakeview dashboard.
+        :param update_parameter_syntax: bool (optional)
+          Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named syntax
+          (:param) when converting datasets in the dashboard.
+        
+        :returns: :class:`Dashboard`
+        
 
     .. py:method:: publish(dashboard_id: str [, embed_credentials: Optional[bool], warehouse_id: Optional[str]]) -> PublishedDashboard
 
         Publish dashboard.
-
-Publish the current draft dashboard.
-
-:param dashboard_id: str
-  UUID identifying the dashboard to be published.
-:param embed_credentials: bool (optional)
-  Flag to indicate if the publisher's credentials should be embedded in the published dashboard. These
-  embedded credentials will be used to execute the published dashboard's queries.
-:param warehouse_id: str (optional)
-  The ID of the warehouse that can be used to override the warehouse which was set in the draft.
-
-:returns: :class:`PublishedDashboard`
-
+        
+        Publish the current draft dashboard.
+        
+        :param dashboard_id: str
+          UUID identifying the dashboard to be published.
+        :param embed_credentials: bool (optional)
+          Flag to indicate if the publisher's credentials should be embedded in the published dashboard. These
+          embedded credentials will be used to execute the published dashboard's queries.
+        :param warehouse_id: str (optional)
+          The ID of the warehouse that can be used to override the warehouse which was set in the draft.
+        
+        :returns: :class:`PublishedDashboard`
+        
 
     .. py:method:: trash(dashboard_id: str)
 
         Trash dashboard.
-
-Trash a dashboard.
-
-:param dashboard_id: str
-  UUID identifying the dashboard.
-
-
-
+        
+        Trash a dashboard.
+        
+        :param dashboard_id: str
+          UUID identifying the dashboard.
+        
+        
+        
 
     .. py:method:: unpublish(dashboard_id: str)
 
         Unpublish dashboard.
-
-Unpublish the dashboard.
-
-:param dashboard_id: str
-  UUID identifying the published dashboard.
-
-
-
+        
+        Unpublish the dashboard.
+        
+        :param dashboard_id: str
+          UUID identifying the published dashboard.
+        
+        
+        
 
     .. py:method:: update(dashboard_id: str [, dashboard: Optional[Dashboard]]) -> Dashboard
 
         Update dashboard.
-
-Update a draft dashboard.
-
-:param dashboard_id: str
-  UUID identifying the dashboard.
-:param dashboard: :class:`Dashboard` (optional)
-
-:returns: :class:`Dashboard`
-
+        
+        Update a draft dashboard.
+        
+        :param dashboard_id: str
+          UUID identifying the dashboard.
+        :param dashboard: :class:`Dashboard` (optional)
+        
+        :returns: :class:`Dashboard`
+        
 
     .. py:method:: update_schedule(dashboard_id: str, schedule_id: str [, schedule: Optional[Schedule]]) -> Schedule
 
         Update dashboard schedule.
-
-:param dashboard_id: str
-  UUID identifying the dashboard to which the schedule belongs.
-:param schedule_id: str
-  UUID identifying the schedule.
-:param schedule: :class:`Schedule` (optional)
-
-:returns: :class:`Schedule`
+        
+        :param dashboard_id: str
+          UUID identifying the dashboard to which the schedule belongs.
+        :param schedule_id: str
+          UUID identifying the schedule.
+        :param schedule: :class:`Schedule` (optional)
+        
+        :returns: :class:`Schedule`
+        
\ No newline at end of file
diff --git a/docs/workspace/dashboards/lakeview_embedded.rst b/docs/workspace/dashboards/lakeview_embedded.rst
index 460874edb..4c06031f5 100644
--- a/docs/workspace/dashboards/lakeview_embedded.rst
+++ b/docs/workspace/dashboards/lakeview_embedded.rst
@@ -9,10 +9,11 @@
     .. py:method:: get_published_dashboard_embedded(dashboard_id: str)
 
         Read a published dashboard in an embedded ui.
-
-Get the current published dashboard within an embedded context.
-
-:param dashboard_id: str
-  UUID identifying the published dashboard.
-
-
+        
+        Get the current published dashboard within an embedded context.
+        
+        :param dashboard_id: str
+          UUID identifying the published dashboard.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/dashboards/query_execution.rst b/docs/workspace/dashboards/query_execution.rst
index 2e4dfc6e7..5672183d9 100644
--- a/docs/workspace/dashboards/query_execution.rst
+++ b/docs/workspace/dashboards/query_execution.rst
@@ -9,37 +9,38 @@
     .. py:method:: cancel_published_query_execution(dashboard_name: str, dashboard_revision_id: str [, tokens: Optional[List[str]]]) -> CancelQueryExecutionResponse
 
         Cancel the results for the a query for a published, embedded dashboard.
-
-:param dashboard_name: str
-:param dashboard_revision_id: str
-:param tokens: List[str] (optional)
-  Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
-
-:returns: :class:`CancelQueryExecutionResponse`
-
+        
+        :param dashboard_name: str
+        :param dashboard_revision_id: str
+        :param tokens: List[str] (optional)
+          Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+        
+        :returns: :class:`CancelQueryExecutionResponse`
+        
 
     .. py:method:: execute_published_dashboard_query(dashboard_name: str, dashboard_revision_id: str [, override_warehouse_id: Optional[str]])
 
         Execute a query for a published dashboard.
-
-:param dashboard_name: str
-  Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains the
-  list of datasets, warehouse_id, and embedded_credentials
-:param dashboard_revision_id: str
-:param override_warehouse_id: str (optional)
-  A dashboard schedule can override the warehouse used as compute for processing the published
-  dashboard queries
-
-
-
+        
+        :param dashboard_name: str
+          Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains the
+          list of datasets, warehouse_id, and embedded_credentials
+        :param dashboard_revision_id: str
+        :param override_warehouse_id: str (optional)
+          A dashboard schedule can override the warehouse used as compute for processing the published
+          dashboard queries
+        
+        
+        
 
     .. py:method:: poll_published_query_status(dashboard_name: str, dashboard_revision_id: str [, tokens: Optional[List[str]]]) -> PollQueryStatusResponse
 
         Poll the results for the a query for a published, embedded dashboard.
-
-:param dashboard_name: str
-:param dashboard_revision_id: str
-:param tokens: List[str] (optional)
-  Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
-
-:returns: :class:`PollQueryStatusResponse`
+        
+        :param dashboard_name: str
+        :param dashboard_revision_id: str
+        :param tokens: List[str] (optional)
+          Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+        
+        :returns: :class:`PollQueryStatusResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/files/dbfs.rst b/docs/workspace/files/dbfs.rst
index e200363ac..c52d11bc8 100644
--- a/docs/workspace/files/dbfs.rst
+++ b/docs/workspace/files/dbfs.rst
@@ -5,37 +5,37 @@
 .. py:class:: DbfsExt
 
     DBFS API makes it simple to interact with various data sources without having to include a users
-credentials every time to read a file.
+    credentials every time to read a file.
 
     .. py:method:: add_block(handle: int, data: str)
 
         Append data block.
-
-Appends a block of data to the stream specified by the input handle. If the handle does not exist,
-this call will throw an exception with ``RESOURCE_DOES_NOT_EXIST``.
-
-If the block of data exceeds 1 MB, this call will throw an exception with ``MAX_BLOCK_SIZE_EXCEEDED``.
-
-:param handle: int
-  The handle on an open stream.
-:param data: str
-  The base64-encoded data to append to the stream. This has a limit of 1 MB.
-
-
-
+        
+        Appends a block of data to the stream specified by the input handle. If the handle does not exist,
+        this call will throw an exception with ``RESOURCE_DOES_NOT_EXIST``.
+        
+        If the block of data exceeds 1 MB, this call will throw an exception with ``MAX_BLOCK_SIZE_EXCEEDED``.
+        
+        :param handle: int
+          The handle on an open stream.
+        :param data: str
+          The base64-encoded data to append to the stream. This has a limit of 1 MB.
+        
+        
+        
 
     .. py:method:: close(handle: int)
 
         Close the stream.
-
-Closes the stream specified by the input handle. If the handle does not exist, this call throws an
-exception with ``RESOURCE_DOES_NOT_EXIST``.
-
-:param handle: int
-  The handle on an open stream.
-
-
-
+        
+        Closes the stream specified by the input handle. If the handle does not exist, this call throws an
+        exception with ``RESOURCE_DOES_NOT_EXIST``.
+        
+        :param handle: int
+          The handle on an open stream.
+        
+        
+        
 
     .. py:method:: copy(src: str, dst: str [, recursive: bool = False, overwrite: bool = False])
 
@@ -44,23 +44,23 @@ exception with ``RESOURCE_DOES_NOT_EXIST``.
     .. py:method:: create(path: str [, overwrite: Optional[bool]]) -> CreateResponse
 
         Open a stream.
-
-Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute idle
-timeout on this handle. If a file or directory already exists on the given path and __overwrite__ is
-set to false, this call will throw an exception with ``RESOURCE_ALREADY_EXISTS``.
-
-A typical workflow for file upload would be:
-
-1. Issue a ``create`` call and get a handle. 2. Issue one or more ``add-block`` calls with the handle
-you have. 3. Issue a ``close`` call with the handle you have.
-
-:param path: str
-  The path of the new file. The path should be the absolute DBFS path.
-:param overwrite: bool (optional)
-  The flag that specifies whether to overwrite existing file/files.
-
-:returns: :class:`CreateResponse`
-
+        
+        Opens a stream to write to a file and returns a handle to this stream. There is a 10 minute idle
+        timeout on this handle. If a file or directory already exists on the given path and __overwrite__ is
+        set to false, this call will throw an exception with ``RESOURCE_ALREADY_EXISTS``.
+        
+        A typical workflow for file upload would be:
+        
+        1. Issue a ``create`` call and get a handle. 2. Issue one or more ``add-block`` calls with the handle
+        you have. 3. Issue a ``close`` call with the handle you have.
+        
+        :param path: str
+          The path of the new file. The path should be the absolute DBFS path.
+        :param overwrite: bool (optional)
+          The flag that specifies whether to overwrite existing file/files.
+        
+        :returns: :class:`CreateResponse`
+        
 
     .. py:method:: delete(path: str [, recursive: bool = False])
 
@@ -98,30 +98,30 @@ you have. 3. Issue a ``close`` call with the handle you have.
     .. py:method:: get_status(path: str) -> FileInfo
 
         Get the information of a file or directory.
-
-Gets the file information for a file or directory. If the file or directory does not exist, this call
-throws an exception with `RESOURCE_DOES_NOT_EXIST`.
-
-:param path: str
-  The path of the file or directory. The path should be the absolute DBFS path.
-
-:returns: :class:`FileInfo`
-
+        
+        Gets the file information for a file or directory. If the file or directory does not exist, this call
+        throws an exception with `RESOURCE_DOES_NOT_EXIST`.
+        
+        :param path: str
+          The path of the file or directory. The path should be the absolute DBFS path.
+        
+        :returns: :class:`FileInfo`
+        
 
     .. py:method:: list(path: str [, recursive: bool = False]) -> Iterator[files.FileInfo]
 
         List directory contents or file details.
 
-List the contents of a directory, or details of the file. If the file or directory does not exist,
-this call throws an exception with `RESOURCE_DOES_NOT_EXIST`.
-
-When calling list on a large directory, the list operation will time out after approximately 60
-seconds.
+        List the contents of a directory, or details of the file. If the file or directory does not exist,
+        this call throws an exception with `RESOURCE_DOES_NOT_EXIST`.
 
-:param path: the DBFS or UC Volume path to list
-:param recursive: traverse deep into directory tree
-:returns iterator of metadata for every file
+        When calling list on a large directory, the list operation will time out after approximately 60
+        seconds.
 
+        :param path: the DBFS or UC Volume path to list
+        :param recursive: traverse deep into directory tree
+        :returns iterator of metadata for every file
+        
 
     .. py:method:: mkdirs(path: str)
 
@@ -130,19 +130,19 @@ seconds.
     .. py:method:: move(source_path: str, destination_path: str)
 
         Move a file.
-
-Moves a file from one location to another location within DBFS. If the source file does not exist,
-this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If a file already exists in the
-destination path, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. If the given source
-path is a directory, this call always recursively moves all files.
-
-:param source_path: str
-  The source path of the file or directory. The path should be the absolute DBFS path.
-:param destination_path: str
-  The destination path of the file or directory. The path should be the absolute DBFS path.
-
-
-
+        
+        Moves a file from one location to another location within DBFS. If the source file does not exist,
+        this call throws an exception with `RESOURCE_DOES_NOT_EXIST`. If a file already exists in the
+        destination path, this call throws an exception with `RESOURCE_ALREADY_EXISTS`. If the given source
+        path is a directory, this call always recursively moves all files.
+        
+        :param source_path: str
+          The source path of the file or directory. The path should be the absolute DBFS path.
+        :param destination_path: str
+          The destination path of the file or directory. The path should be the absolute DBFS path.
+        
+        
+        
 
     .. py:method:: move_(src: str, dst: str [, recursive: bool = False, overwrite: bool = False])
 
@@ -154,50 +154,50 @@ path is a directory, this call always recursively moves all files.
     .. py:method:: put(path: str [, contents: Optional[str], overwrite: Optional[bool]])
 
         Upload a file.
-
-Uploads a file through the use of multipart form post. It is mainly used for streaming uploads, but
-can also be used as a convenient single call for data upload.
-
-Alternatively you can pass contents as base64 string.
-
-The amount of data that can be passed (when not streaming) using the __contents__ parameter is limited
-to 1 MB. `MAX_BLOCK_SIZE_EXCEEDED` will be thrown if this limit is exceeded.
-
-If you want to upload large files, use the streaming upload. For details, see :method:dbfs/create,
-:method:dbfs/addBlock, :method:dbfs/close.
-
-:param path: str
-  The path of the new file. The path should be the absolute DBFS path.
-:param contents: str (optional)
-  This parameter might be absent, and instead a posted file will be used.
-:param overwrite: bool (optional)
-  The flag that specifies whether to overwrite existing file/files.
-
-
-
+        
+        Uploads a file through the use of multipart form post. It is mainly used for streaming uploads, but
+        can also be used as a convenient single call for data upload.
+        
+        Alternatively you can pass contents as base64 string.
+        
+        The amount of data that can be passed (when not streaming) using the __contents__ parameter is limited
+        to 1 MB. `MAX_BLOCK_SIZE_EXCEEDED` will be thrown if this limit is exceeded.
+        
+        If you want to upload large files, use the streaming upload. For details, see :method:dbfs/create,
+        :method:dbfs/addBlock, :method:dbfs/close.
+        
+        :param path: str
+          The path of the new file. The path should be the absolute DBFS path.
+        :param contents: str (optional)
+          This parameter might be absent, and instead a posted file will be used.
+        :param overwrite: bool (optional)
+          The flag that specifies whether to overwrite existing file/files.
+        
+        
+        
 
     .. py:method:: read(path: str [, length: Optional[int], offset: Optional[int]]) -> ReadResponse
 
         Get the contents of a file.
-
-Returns the contents of a file. If the file does not exist, this call throws an exception with
-`RESOURCE_DOES_NOT_EXIST`. If the path is a directory, the read length is negative, or if the offset
-is negative, this call throws an exception with `INVALID_PARAMETER_VALUE`. If the read length exceeds
-1 MB, this call throws an exception with `MAX_READ_SIZE_EXCEEDED`.
-
-If `offset + length` exceeds the number of bytes in a file, it reads the contents until the end of
-file.
-
-:param path: str
-  The path of the file to read. The path should be the absolute DBFS path.
-:param length: int (optional)
-  The number of bytes to read starting from the offset. This has a limit of 1 MB, and a default value
-  of 0.5 MB.
-:param offset: int (optional)
-  The offset to read from in bytes.
-
-:returns: :class:`ReadResponse`
-
+        
+        Returns the contents of a file. If the file does not exist, this call throws an exception with
+        `RESOURCE_DOES_NOT_EXIST`. If the path is a directory, the read length is negative, or if the offset
+        is negative, this call throws an exception with `INVALID_PARAMETER_VALUE`. If the read length exceeds
+        1 MB, this call throws an exception with `MAX_READ_SIZE_EXCEEDED`.
+        
+        If `offset + length` exceeds the number of bytes in a file, it reads the contents until the end of
+        file.
+        
+        :param path: str
+          The path of the file to read. The path should be the absolute DBFS path.
+        :param length: int (optional)
+          The number of bytes to read starting from the offset. This has a limit of 1 MB, and a default value
+          of 0.5 MB.
+        :param offset: int (optional)
+          The offset to read from in bytes.
+        
+        :returns: :class:`ReadResponse`
+        
 
     .. py:method:: upload(path: str, src: BinaryIO [, overwrite: bool = False])
 
diff --git a/docs/workspace/files/files.rst b/docs/workspace/files/files.rst
index 6314481a3..0151fcce2 100644
--- a/docs/workspace/files/files.rst
+++ b/docs/workspace/files/files.rst
@@ -5,150 +5,151 @@
 .. py:class:: FilesAPI
 
     The Files API is a standard HTTP API that allows you to read, write, list, and delete files and
-directories by referring to their URI. The API makes working with file content as raw bytes easier and
-more efficient.
-
-The API supports [Unity Catalog volumes], where files and directories to operate on are specified using
-their volume URI path, which follows the format
-/Volumes/<catalog_name>/<schema_name>/<volume_name>/<path_to_file>.
-
-The Files API has two distinct endpoints, one for working with files (`/fs/files`) and another one for
-working with directories (`/fs/directories`). Both endpoints use the standard HTTP methods GET, HEAD, PUT,
-and DELETE to manage files and directories specified using their URI path. The path is always absolute.
-
-Some Files API client features are currently experimental. To enable them, set
-`enable_experimental_files_api_client = True` in your configuration profile or use the environment
-variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`.
-
-[Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html
+    directories by referring to their URI. The API makes working with file content as raw bytes easier and
+    more efficient.
+    
+    The API supports [Unity Catalog volumes], where files and directories to operate on are specified using
+    their volume URI path, which follows the format
+    /Volumes/<catalog_name>/<schema_name>/<volume_name>/<path_to_file>.
+    
+    The Files API has two distinct endpoints, one for working with files (`/fs/files`) and another one for
+    working with directories (`/fs/directories`). Both endpoints use the standard HTTP methods GET, HEAD, PUT,
+    and DELETE to manage files and directories specified using their URI path. The path is always absolute.
+    
+    Some Files API client features are currently experimental. To enable them, set
+    `enable_experimental_files_api_client = True` in your configuration profile or use the environment
+    variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`.
+    
+    [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html
 
     .. py:method:: create_directory(directory_path: str)
 
         Create a directory.
-
-Creates an empty directory. If necessary, also creates any parent directories of the new, empty
-directory (like the shell command `mkdir -p`). If called on an existing directory, returns a success
-response; this method is idempotent (it will succeed if the directory already exists).
-
-:param directory_path: str
-  The absolute path of a directory.
-
-
-
+        
+        Creates an empty directory. If necessary, also creates any parent directories of the new, empty
+        directory (like the shell command `mkdir -p`). If called on an existing directory, returns a success
+        response; this method is idempotent (it will succeed if the directory already exists).
+        
+        :param directory_path: str
+          The absolute path of a directory.
+        
+        
+        
 
     .. py:method:: delete(file_path: str)
 
         Delete a file.
-
-Deletes a file. If the request is successful, there is no response body.
-
-:param file_path: str
-  The absolute path of the file.
-
-
-
+        
+        Deletes a file. If the request is successful, there is no response body.
+        
+        :param file_path: str
+          The absolute path of the file.
+        
+        
+        
 
     .. py:method:: delete_directory(directory_path: str)
 
         Delete a directory.
-
-Deletes an empty directory.
-
-To delete a non-empty directory, first delete all of its contents. This can be done by listing the
-directory contents and deleting each file and subdirectory recursively.
-
-:param directory_path: str
-  The absolute path of a directory.
-
-
-
+        
+        Deletes an empty directory.
+        
+        To delete a non-empty directory, first delete all of its contents. This can be done by listing the
+        directory contents and deleting each file and subdirectory recursively.
+        
+        :param directory_path: str
+          The absolute path of a directory.
+        
+        
+        
 
     .. py:method:: download(file_path: str) -> DownloadResponse
 
         Download a file.
-
-Downloads a file. The file contents are the response body. This is a standard HTTP file download, not
-a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers.
-
-:param file_path: str
-  The absolute path of the file.
-
-:returns: :class:`DownloadResponse`
-
+        
+        Downloads a file. The file contents are the response body. This is a standard HTTP file download, not
+        a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers.
+        
+        :param file_path: str
+          The absolute path of the file.
+        
+        :returns: :class:`DownloadResponse`
+        
 
     .. py:method:: get_directory_metadata(directory_path: str)
 
         Get directory metadata.
-
-Get the metadata of a directory. The response HTTP headers contain the metadata. There is no response
-body.
-
-This method is useful to check if a directory exists and the caller has access to it.
-
-If you wish to ensure the directory exists, you can instead use `PUT`, which will create the directory
-if it does not exist, and is idempotent (it will succeed if the directory already exists).
-
-:param directory_path: str
-  The absolute path of a directory.
-
-
-
+        
+        Get the metadata of a directory. The response HTTP headers contain the metadata. There is no response
+        body.
+        
+        This method is useful to check if a directory exists and the caller has access to it.
+        
+        If you wish to ensure the directory exists, you can instead use `PUT`, which will create the directory
+        if it does not exist, and is idempotent (it will succeed if the directory already exists).
+        
+        :param directory_path: str
+          The absolute path of a directory.
+        
+        
+        
 
     .. py:method:: get_metadata(file_path: str) -> GetMetadataResponse
 
         Get file metadata.
-
-Get the metadata of a file. The response HTTP headers contain the metadata. There is no response body.
-
-:param file_path: str
-  The absolute path of the file.
-
-:returns: :class:`GetMetadataResponse`
-
+        
+        Get the metadata of a file. The response HTTP headers contain the metadata. There is no response body.
+        
+        :param file_path: str
+          The absolute path of the file.
+        
+        :returns: :class:`GetMetadataResponse`
+        
 
     .. py:method:: list_directory_contents(directory_path: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[DirectoryEntry]
 
         List directory contents.
-
-Returns the contents of a directory. If there is no directory at the specified path, the API returns a
-HTTP 404 error.
-
-:param directory_path: str
-  The absolute path of a directory.
-:param page_size: int (optional)
-  The maximum number of directory entries to return. The response may contain fewer entries. If the
-  response contains a `next_page_token`, there may be more entries, even if fewer than `page_size`
-  entries are in the response.
-  
-  We recommend not to set this value unless you are intentionally listing less than the complete
-  directory contents.
-  
-  If unspecified, at most 1000 directory entries will be returned. The maximum value is 1000. Values
-  above 1000 will be coerced to 1000.
-:param page_token: str (optional)
-  An opaque page token which was the `next_page_token` in the response of the previous request to list
-  the contents of this directory. Provide this token to retrieve the next page of directory entries.
-  When providing a `page_token`, all other parameters provided to the request must match the previous
-  request. To list all of the entries in a directory, it is necessary to continue requesting pages of
-  entries until the response contains no `next_page_token`. Note that the number of entries returned
-  must not be used to determine when the listing is complete.
-
-:returns: Iterator over :class:`DirectoryEntry`
-
+        
+        Returns the contents of a directory. If there is no directory at the specified path, the API returns a
+        HTTP 404 error.
+        
+        :param directory_path: str
+          The absolute path of a directory.
+        :param page_size: int (optional)
+          The maximum number of directory entries to return. The response may contain fewer entries. If the
+          response contains a `next_page_token`, there may be more entries, even if fewer than `page_size`
+          entries are in the response.
+          
+          We recommend not to set this value unless you are intentionally listing less than the complete
+          directory contents.
+          
+          If unspecified, at most 1000 directory entries will be returned. The maximum value is 1000. Values
+          above 1000 will be coerced to 1000.
+        :param page_token: str (optional)
+          An opaque page token which was the `next_page_token` in the response of the previous request to list
+          the contents of this directory. Provide this token to retrieve the next page of directory entries.
+          When providing a `page_token`, all other parameters provided to the request must match the previous
+          request. To list all of the entries in a directory, it is necessary to continue requesting pages of
+          entries until the response contains no `next_page_token`. Note that the number of entries returned
+          must not be used to determine when the listing is complete.
+        
+        :returns: Iterator over :class:`DirectoryEntry`
+        
 
     .. py:method:: upload(file_path: str, contents: BinaryIO [, overwrite: Optional[bool]])
 
         Upload a file.
-
-Uploads a file of up to 5 GiB. The file contents should be sent as the request body as raw bytes (an
-octet stream); do not encode or otherwise modify the bytes before sending. The contents of the
-resulting file will be exactly the bytes sent in the request body. If the request is successful, there
-is no response body.
-
-:param file_path: str
-  The absolute path of the file.
-:param contents: BinaryIO
-:param overwrite: bool (optional)
-  If true, an existing file will be overwritten.
-
-
+        
+        Uploads a file of up to 5 GiB. The file contents should be sent as the request body as raw bytes (an
+        octet stream); do not encode or otherwise modify the bytes before sending. The contents of the
+        resulting file will be exactly the bytes sent in the request body. If the request is successful, there
+        is no response body.
+        
+        :param file_path: str
+          The absolute path of the file.
+        :param contents: BinaryIO
+        :param overwrite: bool (optional)
+          If true, an existing file will be overwritten.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/iam/access_control.rst b/docs/workspace/iam/access_control.rst
index d5d3b3252..a5f1feeda 100644
--- a/docs/workspace/iam/access_control.rst
+++ b/docs/workspace/iam/access_control.rst
@@ -9,14 +9,15 @@
     .. py:method:: check_policy(actor: Actor, permission: str, resource: str, consistency_token: ConsistencyToken, authz_identity: RequestAuthzIdentity [, resource_info: Optional[ResourceInfo]]) -> CheckPolicyResponse
 
         Check access policy to a resource.
-
-:param actor: :class:`Actor`
-:param permission: str
-:param resource: str
-  Ex: (servicePrincipal/use, accounts//servicePrincipals/) Ex:
-  (servicePrincipal.ruleSet/update, accounts//servicePrincipals//ruleSets/default)
-:param consistency_token: :class:`ConsistencyToken`
-:param authz_identity: :class:`RequestAuthzIdentity`
-:param resource_info: :class:`ResourceInfo` (optional)
-
-:returns: :class:`CheckPolicyResponse`
+        
+        :param actor: :class:`Actor`
+        :param permission: str
+        :param resource: str
+          Ex: (servicePrincipal/use, accounts//servicePrincipals/) Ex:
+          (servicePrincipal.ruleSet/update, accounts//servicePrincipals//ruleSets/default)
+        :param consistency_token: :class:`ConsistencyToken`
+        :param authz_identity: :class:`RequestAuthzIdentity`
+        :param resource_info: :class:`ResourceInfo` (optional)
+        
+        :returns: :class:`CheckPolicyResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/iam/account_access_control_proxy.rst b/docs/workspace/iam/account_access_control_proxy.rst
index 3242b7944..3265b29cc 100644
--- a/docs/workspace/iam/account_access_control_proxy.rst
+++ b/docs/workspace/iam/account_access_control_proxy.rst
@@ -5,51 +5,52 @@
 .. py:class:: AccountAccessControlProxyAPI
 
     These APIs manage access rules on resources in an account. Currently, only grant rules are supported. A
-grant rule specifies a role assigned to a set of principals. A list of rules attached to a resource is
-called a rule set. A workspace must belong to an account for these APIs to work.
+    grant rule specifies a role assigned to a set of principals. A list of rules attached to a resource is
+    called a rule set. A workspace must belong to an account for these APIs to work.
 
     .. py:method:: get_assignable_roles_for_resource(resource: str) -> GetAssignableRolesForResourceResponse
 
         Get assignable roles for a resource.
-
-Gets all the roles that can be granted on an account-level resource. A role is grantable if the rule
-set on the resource can contain an access rule of the role.
-
-:param resource: str
-  The resource name for which assignable roles will be listed.
-
-:returns: :class:`GetAssignableRolesForResourceResponse`
-
+        
+        Gets all the roles that can be granted on an account-level resource. A role is grantable if the rule
+        set on the resource can contain an access rule of the role.
+        
+        :param resource: str
+          The resource name for which assignable roles will be listed.
+        
+        :returns: :class:`GetAssignableRolesForResourceResponse`
+        
 
     .. py:method:: get_rule_set(name: str, etag: str) -> RuleSetResponse
 
         Get a rule set.
-
-Get a rule set by its name. A rule set is always attached to a resource and contains a list of access
-rules on the said resource. Currently only a default rule set for each resource is supported.
-
-:param name: str
-  The ruleset name associated with the request.
-:param etag: str
-  Etag used for versioning. The response is at least as fresh as the eTag provided. Etag is used for
-  optimistic concurrency control as a way to help prevent simultaneous updates of a rule set from
-  overwriting each other. It is strongly suggested that systems make use of the etag in the read ->
-  modify -> write pattern to perform rule set updates in order to avoid race conditions that is get an
-  etag from a GET rule set request, and pass it with the PUT update request to identify the rule set
-  version you are updating.
-
-:returns: :class:`RuleSetResponse`
-
+        
+        Get a rule set by its name. A rule set is always attached to a resource and contains a list of access
+        rules on the said resource. Currently only a default rule set for each resource is supported.
+        
+        :param name: str
+          The ruleset name associated with the request.
+        :param etag: str
+          Etag used for versioning. The response is at least as fresh as the eTag provided. Etag is used for
+          optimistic concurrency control as a way to help prevent simultaneous updates of a rule set from
+          overwriting each other. It is strongly suggested that systems make use of the etag in the read ->
+          modify -> write pattern to perform rule set updates in order to avoid race conditions that is get an
+          etag from a GET rule set request, and pass it with the PUT update request to identify the rule set
+          version you are updating.
+        
+        :returns: :class:`RuleSetResponse`
+        
 
     .. py:method:: update_rule_set(name: str, rule_set: RuleSetUpdateRequest) -> RuleSetResponse
 
         Update a rule set.
-
-Replace the rules of a rule set. First, use a GET rule set request to read the current version of the
-rule set before modifying it. This pattern helps prevent conflicts between concurrent updates.
-
-:param name: str
-  Name of the rule set.
-:param rule_set: :class:`RuleSetUpdateRequest`
-
-:returns: :class:`RuleSetResponse`
+        
+        Replace the rules of a rule set. First, use a GET rule set request to read the current version of the
+        rule set before modifying it. This pattern helps prevent conflicts between concurrent updates.
+        
+        :param name: str
+          Name of the rule set.
+        :param rule_set: :class:`RuleSetUpdateRequest`
+        
+        :returns: :class:`RuleSetResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/iam/current_user.rst b/docs/workspace/iam/current_user.rst
index 6a877bb2f..47ef1eff3 100644
--- a/docs/workspace/iam/current_user.rst
+++ b/docs/workspace/iam/current_user.rst
@@ -20,7 +20,8 @@
             me2 = w.current_user.me()
 
         Get current user info.
-
-Get details about the current method caller's identity.
-
-:returns: :class:`User`
+        
+        Get details about the current method caller's identity.
+        
+        :returns: :class:`User`
+        
\ No newline at end of file
diff --git a/docs/workspace/iam/groups.rst b/docs/workspace/iam/groups.rst
index 98ece2ad1..ef32112c8 100644
--- a/docs/workspace/iam/groups.rst
+++ b/docs/workspace/iam/groups.rst
@@ -5,11 +5,11 @@
 .. py:class:: GroupsAPI
 
     Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and
-other securable objects.
-
-It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups,
-instead of to users individually. All Databricks workspace identities can be assigned as members of
-groups, and members inherit permissions that are assigned to their group.
+    other securable objects.
+    
+    It is best practice to assign access to workspaces and access-control policies in Unity Catalog to groups,
+    instead of to users individually. All Databricks workspace identities can be assigned as members of
+    groups, and members inherit permissions that are assigned to their group.
 
     .. py:method:: create( [, display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], id: Optional[str], members: Optional[List[ComplexValue]], meta: Optional[ResourceMeta], roles: Optional[List[ComplexValue]], schemas: Optional[List[GroupSchema]]]) -> Group
 
@@ -30,30 +30,30 @@ groups, and members inherit permissions that are assigned to their group.
             w.groups.delete(id=group.id)
 
         Create a new group.
-
-Creates a group in the Databricks workspace with a unique name, using the supplied group details.
-
-:param display_name: str (optional)
-  String that represents a human-readable group name
-:param entitlements: List[:class:`ComplexValue`] (optional)
-  Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
-  values.
-  
-  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-:param external_id: str (optional)
-:param groups: List[:class:`ComplexValue`] (optional)
-:param id: str (optional)
-  Databricks group ID
-:param members: List[:class:`ComplexValue`] (optional)
-:param meta: :class:`ResourceMeta` (optional)
-  Container for the group identifier. Workspace local versus account.
-:param roles: List[:class:`ComplexValue`] (optional)
-  Corresponds to AWS instance profile/arn role.
-:param schemas: List[:class:`GroupSchema`] (optional)
-  The schema of the group.
-
-:returns: :class:`Group`
-
+        
+        Creates a group in the Databricks workspace with a unique name, using the supplied group details.
+        
+        :param display_name: str (optional)
+          String that represents a human-readable group name
+        :param entitlements: List[:class:`ComplexValue`] (optional)
+          Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
+          values.
+          
+          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+        :param external_id: str (optional)
+        :param groups: List[:class:`ComplexValue`] (optional)
+        :param id: str (optional)
+          Databricks group ID
+        :param members: List[:class:`ComplexValue`] (optional)
+        :param meta: :class:`ResourceMeta` (optional)
+          Container for the group identifier. Workspace local versus account.
+        :param roles: List[:class:`ComplexValue`] (optional)
+          Corresponds to AWS instance profile/arn role.
+        :param schemas: List[:class:`GroupSchema`] (optional)
+          The schema of the group.
+        
+        :returns: :class:`Group`
+        
 
     .. py:method:: delete(id: str)
 
@@ -73,14 +73,14 @@ Creates a group in the Databricks workspace with a unique name, using the suppli
             w.groups.delete(id=group.id)
 
         Delete a group.
-
-Deletes a group from the Databricks workspace.
-
-:param id: str
-  Unique ID for a group in the Databricks workspace.
-
-
-
+        
+        Deletes a group from the Databricks workspace.
+        
+        :param id: str
+          Unique ID for a group in the Databricks workspace.
+        
+        
+        
 
     .. py:method:: get(id: str) -> Group
 
@@ -103,43 +103,43 @@ Deletes a group from the Databricks workspace.
             w.groups.delete(id=group.id)
 
         Get group details.
-
-Gets the information for a specific group in the Databricks workspace.
-
-:param id: str
-  Unique ID for a group in the Databricks workspace.
-
-:returns: :class:`Group`
-
+        
+        Gets the information for a specific group in the Databricks workspace.
+        
+        :param id: str
+          Unique ID for a group in the Databricks workspace.
+        
+        :returns: :class:`Group`
+        
 
     .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[Group]
 
         List group details.
-
-Gets all details of the groups associated with the Databricks workspace.
-
-:param attributes: str (optional)
-  Comma-separated list of attributes to return in response.
-:param count: int (optional)
-  Desired number of results per page.
-:param excluded_attributes: str (optional)
-  Comma-separated list of attributes to exclude in response.
-:param filter: str (optional)
-  Query by which the results have to be filtered. Supported operators are equals(`eq`),
-  contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
-  formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
-  only support simple expressions.
-  
-  [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
-:param sort_by: str (optional)
-  Attribute to sort the results.
-:param sort_order: :class:`ListSortOrder` (optional)
-  The order to sort the results.
-:param start_index: int (optional)
-  Specifies the index of the first result. First item is number 1.
-
-:returns: Iterator over :class:`Group`
-
+        
+        Gets all details of the groups associated with the Databricks workspace.
+        
+        :param attributes: str (optional)
+          Comma-separated list of attributes to return in response.
+        :param count: int (optional)
+          Desired number of results per page.
+        :param excluded_attributes: str (optional)
+          Comma-separated list of attributes to exclude in response.
+        :param filter: str (optional)
+          Query by which the results have to be filtered. Supported operators are equals(`eq`),
+          contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
+          formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
+          only support simple expressions.
+          
+          [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
+        :param sort_by: str (optional)
+          Attribute to sort the results.
+        :param sort_order: :class:`ListSortOrder` (optional)
+          The order to sort the results.
+        :param start_index: int (optional)
+          Specifies the index of the first result. First item is number 1.
+        
+        :returns: Iterator over :class:`Group`
+        
 
     .. py:method:: patch(id: str [, operations: Optional[List[Patch]], schemas: Optional[List[PatchSchema]]])
 
@@ -174,41 +174,42 @@ Gets all details of the groups associated with the Databricks workspace.
             w.groups.delete(id=group.id)
 
         Update group details.
-
-Partially updates the details of a group.
-
-:param id: str
-  Unique ID for a group in the Databricks workspace.
-:param operations: List[:class:`Patch`] (optional)
-:param schemas: List[:class:`PatchSchema`] (optional)
-  The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
-
-
-
+        
+        Partially updates the details of a group.
+        
+        :param id: str
+          Unique ID for a group in the Databricks workspace.
+        :param operations: List[:class:`Patch`] (optional)
+        :param schemas: List[:class:`PatchSchema`] (optional)
+          The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
+        
+        
+        
 
     .. py:method:: update(id: str [, display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], members: Optional[List[ComplexValue]], meta: Optional[ResourceMeta], roles: Optional[List[ComplexValue]], schemas: Optional[List[GroupSchema]]])
 
         Replace a group.
-
-Updates the details of a group by replacing the entire group entity.
-
-:param id: str
-  Databricks group ID
-:param display_name: str (optional)
-  String that represents a human-readable group name
-:param entitlements: List[:class:`ComplexValue`] (optional)
-  Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
-  values.
-  
-  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-:param external_id: str (optional)
-:param groups: List[:class:`ComplexValue`] (optional)
-:param members: List[:class:`ComplexValue`] (optional)
-:param meta: :class:`ResourceMeta` (optional)
-  Container for the group identifier. Workspace local versus account.
-:param roles: List[:class:`ComplexValue`] (optional)
-  Corresponds to AWS instance profile/arn role.
-:param schemas: List[:class:`GroupSchema`] (optional)
-  The schema of the group.
-
-
+        
+        Updates the details of a group by replacing the entire group entity.
+        
+        :param id: str
+          Databricks group ID
+        :param display_name: str (optional)
+          String that represents a human-readable group name
+        :param entitlements: List[:class:`ComplexValue`] (optional)
+          Entitlements assigned to the group. See [assigning entitlements] for a full list of supported
+          values.
+          
+          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+        :param external_id: str (optional)
+        :param groups: List[:class:`ComplexValue`] (optional)
+        :param members: List[:class:`ComplexValue`] (optional)
+        :param meta: :class:`ResourceMeta` (optional)
+          Container for the group identifier. Workspace local versus account.
+        :param roles: List[:class:`ComplexValue`] (optional)
+          Corresponds to AWS instance profile/arn role.
+        :param schemas: List[:class:`GroupSchema`] (optional)
+          The schema of the group.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/iam/permission_migration.rst b/docs/workspace/iam/permission_migration.rst
index 1aaba1a93..8eef6e0e1 100644
--- a/docs/workspace/iam/permission_migration.rst
+++ b/docs/workspace/iam/permission_migration.rst
@@ -9,14 +9,15 @@
     .. py:method:: migrate_permissions(workspace_id: int, from_workspace_group_name: str, to_account_group_name: str [, size: Optional[int]]) -> MigratePermissionsResponse
 
         Migrate Permissions.
-
-:param workspace_id: int
-  WorkspaceId of the associated workspace where the permission migration will occur.
-:param from_workspace_group_name: str
-  The name of the workspace group that permissions will be migrated from.
-:param to_account_group_name: str
-  The name of the account group that permissions will be migrated to.
-:param size: int (optional)
-  The maximum number of permissions that will be migrated.
-
-:returns: :class:`MigratePermissionsResponse`
+        
+        :param workspace_id: int
+          WorkspaceId of the associated workspace where the permission migration will occur.
+        :param from_workspace_group_name: str
+          The name of the workspace group that permissions will be migrated from.
+        :param to_account_group_name: str
+          The name of the account group that permissions will be migrated to.
+        :param size: int (optional)
+          The maximum number of permissions that will be migrated.
+        
+        :returns: :class:`MigratePermissionsResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst
index 24894cc8b..bf8f8e77f 100644
--- a/docs/workspace/iam/permissions.rst
+++ b/docs/workspace/iam/permissions.rst
@@ -5,54 +5,54 @@
 .. py:class:: PermissionsAPI
 
     Permissions API are used to create read, write, edit, update and manage access for various users on
-different objects and endpoints.
-
-* **[Apps permissions](:service:apps)** — Manage which users can manage or use apps.
-
-* **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or attach to
-clusters.
-
-* **[Cluster policy permissions](:service:clusterpolicies)** — Manage which users can use cluster
-policies.
-
-* **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can view,
-manage, run, cancel, or own a Delta Live Tables pipeline.
-
-* **[Job permissions](:service:jobs)** — Manage which users can view, manage, trigger, cancel, or own a
-job.
-
-* **[MLflow experiment permissions](:service:experiments)** — Manage which users can read, edit, or
-manage MLflow experiments.
-
-* **[MLflow registered model permissions](:service:modelregistry)** — Manage which users can read, edit,
-or manage MLflow registered models.
-
-* **[Password permissions](:service:users)** — Manage which users can use password login when SSO is
-enabled.
-
-* **[Instance Pool permissions](:service:instancepools)** — Manage which users can manage or attach to
-pools.
-
-* **[Repo permissions](repos)** — Manage which users can read, run, edit, or manage a repo.
-
-* **[Serving endpoint permissions](:service:servingendpoints)** — Manage which users can view, query, or
-manage a serving endpoint.
-
-* **[SQL warehouse permissions](:service:warehouses)** — Manage which users can use or manage SQL
-warehouses.
-
-* **[Token permissions](:service:tokenmanagement)** — Manage which users can create or use tokens.
-
-* **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, edit, or
-manage alerts, dbsql-dashboards, directories, files, notebooks and queries.
-
-For the mapping of the required permissions for specific actions or abilities and other important
-information, see [Access Control].
-
-Note that to manage access control on service principals, use **[Account Access Control
-Proxy](:service:accountaccesscontrolproxy)**.
-
-[Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html
+    different objects and endpoints.
+    
+    * **[Apps permissions](:service:apps)** — Manage which users can manage or use apps.
+    
+    * **[Cluster permissions](:service:clusters)** — Manage which users can manage, restart, or attach to
+    clusters.
+    
+    * **[Cluster policy permissions](:service:clusterpolicies)** — Manage which users can use cluster
+    policies.
+    
+    * **[Delta Live Tables pipeline permissions](:service:pipelines)** — Manage which users can view,
+    manage, run, cancel, or own a Delta Live Tables pipeline.
+    
+    * **[Job permissions](:service:jobs)** — Manage which users can view, manage, trigger, cancel, or own a
+    job.
+    
+    * **[MLflow experiment permissions](:service:experiments)** — Manage which users can read, edit, or
+    manage MLflow experiments.
+    
+    * **[MLflow registered model permissions](:service:modelregistry)** — Manage which users can read, edit,
+    or manage MLflow registered models.
+    
+    * **[Password permissions](:service:users)** — Manage which users can use password login when SSO is
+    enabled.
+    
+    * **[Instance Pool permissions](:service:instancepools)** — Manage which users can manage or attach to
+    pools.
+    
+    * **[Repo permissions](repos)** — Manage which users can read, run, edit, or manage a repo.
+    
+    * **[Serving endpoint permissions](:service:servingendpoints)** — Manage which users can view, query, or
+    manage a serving endpoint.
+    
+    * **[SQL warehouse permissions](:service:warehouses)** — Manage which users can use or manage SQL
+    warehouses.
+    
+    * **[Token permissions](:service:tokenmanagement)** — Manage which users can create or use tokens.
+    
+    * **[Workspace object permissions](:service:workspace)** — Manage which users can read, run, edit, or
+    manage alerts, dbsql-dashboards, directories, files, notebooks and queries.
+    
+    For the mapping of the required permissions for specific actions or abilities and other important
+    information, see [Access Control].
+    
+    Note that to manage access control on service principals, use **[Account Access Control
+    Proxy](:service:accountaccesscontrolproxy)**.
+    
+    [Access Control]: https://docs.databricks.com/security/auth-authz/access-control/index.html
 
     .. py:method:: get(request_object_type: str, request_object_id: str) -> ObjectPermissions
 
@@ -75,19 +75,19 @@ Proxy](:service:accountaccesscontrolproxy)**.
                                                          request_object_id="%d" % (obj.object_id))
 
         Get object permissions.
-
-Gets the permissions of an object. Objects can inherit permissions from their parent objects or root
-object.
-
-:param request_object_type: str
-  The type of the request object. Can be one of the following: alerts, authorization, clusters,
-  cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
-  jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.
-:param request_object_id: str
-  The id of the request object.
-
-:returns: :class:`ObjectPermissions`
-
+        
+        Gets the permissions of an object. Objects can inherit permissions from their parent objects or root
+        object.
+        
+        :param request_object_type: str
+          The type of the request object. Can be one of the following: alerts, authorization, clusters,
+          cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
+          jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.
+        :param request_object_id: str
+          The id of the request object.
+        
+        :returns: :class:`ObjectPermissions`
+        
 
     .. py:method:: get_permission_levels(request_object_type: str, request_object_id: str) -> GetPermissionLevelsResponse
 
@@ -110,16 +110,16 @@ object.
                                                          request_object_id="%d" % (obj.object_id))
 
         Get object permission levels.
-
-Gets the permission levels that a user can have on an object.
-
-:param request_object_type: str
-  
-:param request_object_id: str
-  
-
-:returns: :class:`GetPermissionLevelsResponse`
-
+        
+        Gets the permission levels that a user can have on an object.
+        
+        :param request_object_type: str
+          
+        :param request_object_id: str
+          
+        
+        :returns: :class:`GetPermissionLevelsResponse`
+        
 
     .. py:method:: set(request_object_type: str, request_object_id: str [, access_control_list: Optional[List[AccessControlRequest]]]) -> ObjectPermissions
 
@@ -152,35 +152,36 @@ Gets the permission levels that a user can have on an object.
             w.groups.delete(id=group.id)
 
         Set object permissions.
-
-Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-permissions if none are specified. Objects can inherit permissions from their parent objects or root
-object.
-
-:param request_object_type: str
-  The type of the request object. Can be one of the following: alerts, authorization, clusters,
-  cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
-  jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.
-:param request_object_id: str
-  The id of the request object.
-:param access_control_list: List[:class:`AccessControlRequest`] (optional)
-
-:returns: :class:`ObjectPermissions`
-
+        
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their parent objects or root
+        object.
+        
+        :param request_object_type: str
+          The type of the request object. Can be one of the following: alerts, authorization, clusters,
+          cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
+          jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.
+        :param request_object_id: str
+          The id of the request object.
+        :param access_control_list: List[:class:`AccessControlRequest`] (optional)
+        
+        :returns: :class:`ObjectPermissions`
+        
 
     .. py:method:: update(request_object_type: str, request_object_id: str [, access_control_list: Optional[List[AccessControlRequest]]]) -> ObjectPermissions
 
         Update object permissions.
-
-Updates the permissions on an object. Objects can inherit permissions from their parent objects or
-root object.
-
-:param request_object_type: str
-  The type of the request object. Can be one of the following: alerts, authorization, clusters,
-  cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
-  jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.
-:param request_object_id: str
-  The id of the request object.
-:param access_control_list: List[:class:`AccessControlRequest`] (optional)
-
-:returns: :class:`ObjectPermissions`
+        
+        Updates the permissions on an object. Objects can inherit permissions from their parent objects or
+        root object.
+        
+        :param request_object_type: str
+          The type of the request object. Can be one of the following: alerts, authorization, clusters,
+          cluster-policies, dashboards, dbsql-dashboards, directories, experiments, files, instance-pools,
+          jobs, notebooks, pipelines, queries, registered-models, repos, serving-endpoints, or warehouses.
+        :param request_object_id: str
+          The id of the request object.
+        :param access_control_list: List[:class:`AccessControlRequest`] (optional)
+        
+        :returns: :class:`ObjectPermissions`
+        
\ No newline at end of file
diff --git a/docs/workspace/iam/service_principals.rst b/docs/workspace/iam/service_principals.rst
index f1ba78396..0fb8ca643 100644
--- a/docs/workspace/iam/service_principals.rst
+++ b/docs/workspace/iam/service_principals.rst
@@ -5,10 +5,10 @@
 .. py:class:: ServicePrincipalsAPI
 
     Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms.
-Databricks recommends creating service principals to run production jobs or modify production data. If all
-processes that act on production data run with service principals, interactive users do not need any
-write, delete, or modify privileges in production. This eliminates the risk of a user overwriting
-production data by accident.
+    Databricks recommends creating service principals to run production jobs or modify production data. If all
+    processes that act on production data run with service principals, interactive users do not need any
+    write, delete, or modify privileges in production. This eliminates the risk of a user overwriting
+    production data by accident.
 
     .. py:method:: create( [, active: Optional[bool], application_id: Optional[str], display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], id: Optional[str], roles: Optional[List[ComplexValue]], schemas: Optional[List[ServicePrincipalSchema]]]) -> ServicePrincipal
 
@@ -33,43 +33,43 @@ production data by accident.
             w.service_principals.delete(id=spn.id)
 
         Create a service principal.
-
-Creates a new service principal in the Databricks workspace.
-
-:param active: bool (optional)
-  If this user is active
-:param application_id: str (optional)
-  UUID relating to the service principal
-:param display_name: str (optional)
-  String that represents a concatenation of given and family names.
-:param entitlements: List[:class:`ComplexValue`] (optional)
-  Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
-  supported values.
-  
-  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-:param external_id: str (optional)
-:param groups: List[:class:`ComplexValue`] (optional)
-:param id: str (optional)
-  Databricks service principal ID.
-:param roles: List[:class:`ComplexValue`] (optional)
-  Corresponds to AWS instance profile/arn role.
-:param schemas: List[:class:`ServicePrincipalSchema`] (optional)
-  The schema of the List response.
-
-:returns: :class:`ServicePrincipal`
-
+        
+        Creates a new service principal in the Databricks workspace.
+        
+        :param active: bool (optional)
+          If this user is active
+        :param application_id: str (optional)
+          UUID relating to the service principal
+        :param display_name: str (optional)
+          String that represents a concatenation of given and family names.
+        :param entitlements: List[:class:`ComplexValue`] (optional)
+          Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
+          supported values.
+          
+          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+        :param external_id: str (optional)
+        :param groups: List[:class:`ComplexValue`] (optional)
+        :param id: str (optional)
+          Databricks service principal ID.
+        :param roles: List[:class:`ComplexValue`] (optional)
+          Corresponds to AWS instance profile/arn role.
+        :param schemas: List[:class:`ServicePrincipalSchema`] (optional)
+          The schema of the List response.
+        
+        :returns: :class:`ServicePrincipal`
+        
 
     .. py:method:: delete(id: str)
 
         Delete a service principal.
-
-Delete a single service principal in the Databricks workspace.
-
-:param id: str
-  Unique ID for a service principal in the Databricks workspace.
-
-
-
+        
+        Delete a single service principal in the Databricks workspace.
+        
+        :param id: str
+          Unique ID for a service principal in the Databricks workspace.
+        
+        
+        
 
     .. py:method:: get(id: str) -> ServicePrincipal
 
@@ -92,14 +92,14 @@ Delete a single service principal in the Databricks workspace.
             w.service_principals.delete(id=created.id)
 
         Get service principal details.
-
-Gets the details for a single service principal define in the Databricks workspace.
-
-:param id: str
-  Unique ID for a service principal in the Databricks workspace.
-
-:returns: :class:`ServicePrincipal`
-
+        
+        Gets the details for a single service principal define in the Databricks workspace.
+        
+        :param id: str
+          Unique ID for a service principal in the Databricks workspace.
+        
+        :returns: :class:`ServicePrincipal`
+        
 
     .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[ServicePrincipal]
 
@@ -116,31 +116,31 @@ Gets the details for a single service principal define in the Databricks workspa
             all = w.service_principals.list(iam.ListServicePrincipalsRequest())
 
         List service principals.
-
-Gets the set of service principals associated with a Databricks workspace.
-
-:param attributes: str (optional)
-  Comma-separated list of attributes to return in response.
-:param count: int (optional)
-  Desired number of results per page.
-:param excluded_attributes: str (optional)
-  Comma-separated list of attributes to exclude in response.
-:param filter: str (optional)
-  Query by which the results have to be filtered. Supported operators are equals(`eq`),
-  contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
-  formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
-  only support simple expressions.
-  
-  [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
-:param sort_by: str (optional)
-  Attribute to sort the results.
-:param sort_order: :class:`ListSortOrder` (optional)
-  The order to sort the results.
-:param start_index: int (optional)
-  Specifies the index of the first result. First item is number 1.
-
-:returns: Iterator over :class:`ServicePrincipal`
-
+        
+        Gets the set of service principals associated with a Databricks workspace.
+        
+        :param attributes: str (optional)
+          Comma-separated list of attributes to return in response.
+        :param count: int (optional)
+          Desired number of results per page.
+        :param excluded_attributes: str (optional)
+          Comma-separated list of attributes to exclude in response.
+        :param filter: str (optional)
+          Query by which the results have to be filtered. Supported operators are equals(`eq`),
+          contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
+          formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
+          only support simple expressions.
+          
+          [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
+        :param sort_by: str (optional)
+          Attribute to sort the results.
+        :param sort_order: :class:`ListSortOrder` (optional)
+          The order to sort the results.
+        :param start_index: int (optional)
+          Specifies the index of the first result. First item is number 1.
+        
+        :returns: Iterator over :class:`ServicePrincipal`
+        
 
     .. py:method:: patch(id: str [, operations: Optional[List[Patch]], schemas: Optional[List[PatchSchema]]])
 
@@ -168,17 +168,17 @@ Gets the set of service principals associated with a Databricks workspace.
             w.service_principals.delete(id=created.id)
 
         Update service principal details.
-
-Partially updates the details of a single service principal in the Databricks workspace.
-
-:param id: str
-  Unique ID for a service principal in the Databricks workspace.
-:param operations: List[:class:`Patch`] (optional)
-:param schemas: List[:class:`PatchSchema`] (optional)
-  The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
-
-
-
+        
+        Partially updates the details of a single service principal in the Databricks workspace.
+        
+        :param id: str
+          Unique ID for a service principal in the Databricks workspace.
+        :param operations: List[:class:`Patch`] (optional)
+        :param schemas: List[:class:`PatchSchema`] (optional)
+          The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
+        
+        
+        
 
     .. py:method:: update(id: str [, active: Optional[bool], application_id: Optional[str], display_name: Optional[str], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], roles: Optional[List[ComplexValue]], schemas: Optional[List[ServicePrincipalSchema]]])
 
@@ -204,29 +204,30 @@ Partially updates the details of a single service principal in the Databricks wo
             w.service_principals.delete(id=created.id)
 
         Replace service principal.
-
-Updates the details of a single service principal.
-
-This action replaces the existing service principal with the same name.
-
-:param id: str
-  Databricks service principal ID.
-:param active: bool (optional)
-  If this user is active
-:param application_id: str (optional)
-  UUID relating to the service principal
-:param display_name: str (optional)
-  String that represents a concatenation of given and family names.
-:param entitlements: List[:class:`ComplexValue`] (optional)
-  Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
-  supported values.
-  
-  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-:param external_id: str (optional)
-:param groups: List[:class:`ComplexValue`] (optional)
-:param roles: List[:class:`ComplexValue`] (optional)
-  Corresponds to AWS instance profile/arn role.
-:param schemas: List[:class:`ServicePrincipalSchema`] (optional)
-  The schema of the List response.
-
-
+        
+        Updates the details of a single service principal.
+        
+        This action replaces the existing service principal with the same name.
+        
+        :param id: str
+          Databricks service principal ID.
+        :param active: bool (optional)
+          If this user is active
+        :param application_id: str (optional)
+          UUID relating to the service principal
+        :param display_name: str (optional)
+          String that represents a concatenation of given and family names.
+        :param entitlements: List[:class:`ComplexValue`] (optional)
+          Entitlements assigned to the service principal. See [assigning entitlements] for a full list of
+          supported values.
+          
+          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+        :param external_id: str (optional)
+        :param groups: List[:class:`ComplexValue`] (optional)
+        :param roles: List[:class:`ComplexValue`] (optional)
+          Corresponds to AWS instance profile/arn role.
+        :param schemas: List[:class:`ServicePrincipalSchema`] (optional)
+          The schema of the List response.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/iam/users.rst b/docs/workspace/iam/users.rst
index e7c16c191..616ef7b86 100644
--- a/docs/workspace/iam/users.rst
+++ b/docs/workspace/iam/users.rst
@@ -5,14 +5,14 @@
 .. py:class:: UsersAPI
 
     User identities recognized by Databricks and represented by email addresses.
-
-Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity
-provider to your Databricks workspace. SCIM streamlines onboarding a new employee or team by using your
-identity provider to create users and groups in Databricks workspace and give them the proper level of
-access. When a user leaves your organization or no longer needs access to Databricks workspace, admins can
-terminate the user in your identity provider and that user’s account will also be removed from
-Databricks workspace. This ensures a consistent offboarding process and prevents unauthorized users from
-accessing sensitive data.
+    
+    Databricks recommends using SCIM provisioning to sync users and groups automatically from your identity
+    provider to your Databricks workspace. SCIM streamlines onboarding a new employee or team by using your
+    identity provider to create users and groups in Databricks workspace and give them the proper level of
+    access. When a user leaves your organization or no longer needs access to Databricks workspace, admins can
+    terminate the user in your identity provider and that user’s account will also be removed from
+    Databricks workspace. This ensures a consistent offboarding process and prevents unauthorized users from
+    accessing sensitive data.
 
     .. py:method:: create( [, active: Optional[bool], display_name: Optional[str], emails: Optional[List[ComplexValue]], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], id: Optional[str], name: Optional[Name], roles: Optional[List[ComplexValue]], schemas: Optional[List[UserSchema]], user_name: Optional[str]]) -> User
 
@@ -30,40 +30,40 @@ accessing sensitive data.
             user = w.users.create(display_name=f'sdk-{time.time_ns()}', user_name=f'sdk-{time.time_ns()}@example.com')
 
         Create a new user.
-
-Creates a new user in the Databricks workspace. This new user will also be added to the Databricks
-account.
-
-:param active: bool (optional)
-  If this user is active
-:param display_name: str (optional)
-  String that represents a concatenation of given and family names. For example `John Smith`. This
-  field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
-  Account SCIM APIs to update `displayName`.
-  
-  [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
-:param emails: List[:class:`ComplexValue`] (optional)
-  All the emails associated with the Databricks user.
-:param entitlements: List[:class:`ComplexValue`] (optional)
-  Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
-  
-  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-:param external_id: str (optional)
-  External ID is not currently supported. It is reserved for future use.
-:param groups: List[:class:`ComplexValue`] (optional)
-:param id: str (optional)
-  Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
-  be ignored.
-:param name: :class:`Name` (optional)
-:param roles: List[:class:`ComplexValue`] (optional)
-  Corresponds to AWS instance profile/arn role.
-:param schemas: List[:class:`UserSchema`] (optional)
-  The schema of the user.
-:param user_name: str (optional)
-  Email address of the Databricks user.
-
-:returns: :class:`User`
-
+        
+        Creates a new user in the Databricks workspace. This new user will also be added to the Databricks
+        account.
+        
+        :param active: bool (optional)
+          If this user is active
+        :param display_name: str (optional)
+          String that represents a concatenation of given and family names. For example `John Smith`. This
+          field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
+          Account SCIM APIs to update `displayName`.
+          
+          [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
+        :param emails: List[:class:`ComplexValue`] (optional)
+          All the emails associated with the Databricks user.
+        :param entitlements: List[:class:`ComplexValue`] (optional)
+          Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
+          
+          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+        :param external_id: str (optional)
+          External ID is not currently supported. It is reserved for future use.
+        :param groups: List[:class:`ComplexValue`] (optional)
+        :param id: str (optional)
+          Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
+          be ignored.
+        :param name: :class:`Name` (optional)
+        :param roles: List[:class:`ComplexValue`] (optional)
+          Corresponds to AWS instance profile/arn role.
+        :param schemas: List[:class:`UserSchema`] (optional)
+          The schema of the user.
+        :param user_name: str (optional)
+          Email address of the Databricks user.
+        
+        :returns: :class:`User`
+        
 
     .. py:method:: delete(id: str)
 
@@ -83,15 +83,15 @@ account.
             w.users.delete(id=other_owner.id)
 
         Delete a user.
-
-Deletes a user. Deleting a user from a Databricks workspace also removes objects associated with the
-user.
-
-:param id: str
-  Unique ID for a user in the Databricks workspace.
-
-
-
+        
+        Deletes a user. Deleting a user from a Databricks workspace also removes objects associated with the
+        user.
+        
+        :param id: str
+          Unique ID for a user in the Databricks workspace.
+        
+        
+        
 
     .. py:method:: get(id: str [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[GetSortOrder], start_index: Optional[int]]) -> User
 
@@ -111,52 +111,52 @@ user.
             fetch = w.users.get(id=user.id)
 
         Get user details.
-
-Gets information for a specific user in Databricks workspace.
-
-:param id: str
-  Unique ID for a user in the Databricks workspace.
-:param attributes: str (optional)
-  Comma-separated list of attributes to return in response.
-:param count: int (optional)
-  Desired number of results per page.
-:param excluded_attributes: str (optional)
-  Comma-separated list of attributes to exclude in response.
-:param filter: str (optional)
-  Query by which the results have to be filtered. Supported operators are equals(`eq`),
-  contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
-  formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
-  only support simple expressions.
-  
-  [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
-:param sort_by: str (optional)
-  Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
-  `name.givenName`, and `emails`.
-:param sort_order: :class:`GetSortOrder` (optional)
-  The order to sort the results.
-:param start_index: int (optional)
-  Specifies the index of the first result. First item is number 1.
-
-:returns: :class:`User`
-
+        
+        Gets information for a specific user in Databricks workspace.
+        
+        :param id: str
+          Unique ID for a user in the Databricks workspace.
+        :param attributes: str (optional)
+          Comma-separated list of attributes to return in response.
+        :param count: int (optional)
+          Desired number of results per page.
+        :param excluded_attributes: str (optional)
+          Comma-separated list of attributes to exclude in response.
+        :param filter: str (optional)
+          Query by which the results have to be filtered. Supported operators are equals(`eq`),
+          contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
+          formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
+          only support simple expressions.
+          
+          [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
+        :param sort_by: str (optional)
+          Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
+          `name.givenName`, and `emails`.
+        :param sort_order: :class:`GetSortOrder` (optional)
+          The order to sort the results.
+        :param start_index: int (optional)
+          Specifies the index of the first result. First item is number 1.
+        
+        :returns: :class:`User`
+        
 
     .. py:method:: get_permission_levels() -> GetPasswordPermissionLevelsResponse
 
         Get password permission levels.
-
-Gets the permission levels that a user can have on an object.
-
-:returns: :class:`GetPasswordPermissionLevelsResponse`
-
+        
+        Gets the permission levels that a user can have on an object.
+        
+        :returns: :class:`GetPasswordPermissionLevelsResponse`
+        
 
     .. py:method:: get_permissions() -> PasswordPermissions
 
         Get password permissions.
-
-Gets the permissions of all passwords. Passwords can inherit permissions from their root object.
-
-:returns: :class:`PasswordPermissions`
-
+        
+        Gets the permissions of all passwords. Passwords can inherit permissions from their root object.
+        
+        :returns: :class:`PasswordPermissions`
+        
 
     .. py:method:: list( [, attributes: Optional[str], count: Optional[int], excluded_attributes: Optional[str], filter: Optional[str], sort_by: Optional[str], sort_order: Optional[ListSortOrder], start_index: Optional[int]]) -> Iterator[User]
 
@@ -175,32 +175,32 @@ Gets the permissions of all passwords. Passwords can inherit permissions from th
                                      sort_order=iam.ListSortOrder.DESCENDING)
 
         List users.
-
-Gets details for all the users associated with a Databricks workspace.
-
-:param attributes: str (optional)
-  Comma-separated list of attributes to return in response.
-:param count: int (optional)
-  Desired number of results per page.
-:param excluded_attributes: str (optional)
-  Comma-separated list of attributes to exclude in response.
-:param filter: str (optional)
-  Query by which the results have to be filtered. Supported operators are equals(`eq`),
-  contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
-  formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
-  only support simple expressions.
-  
-  [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
-:param sort_by: str (optional)
-  Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
-  `name.givenName`, and `emails`.
-:param sort_order: :class:`ListSortOrder` (optional)
-  The order to sort the results.
-:param start_index: int (optional)
-  Specifies the index of the first result. First item is number 1.
-
-:returns: Iterator over :class:`User`
-
+        
+        Gets details for all the users associated with a Databricks workspace.
+        
+        :param attributes: str (optional)
+          Comma-separated list of attributes to return in response.
+        :param count: int (optional)
+          Desired number of results per page.
+        :param excluded_attributes: str (optional)
+          Comma-separated list of attributes to exclude in response.
+        :param filter: str (optional)
+          Query by which the results have to be filtered. Supported operators are equals(`eq`),
+          contains(`co`), starts with(`sw`) and not equals(`ne`). Additionally, simple expressions can be
+          formed using logical operators - `and` and `or`. The [SCIM RFC] has more details but we currently
+          only support simple expressions.
+          
+          [SCIM RFC]: https://tools.ietf.org/html/rfc7644#section-3.4.2.2
+        :param sort_by: str (optional)
+          Attribute to sort the results. Multi-part paths are supported. For example, `userName`,
+          `name.givenName`, and `emails`.
+        :param sort_order: :class:`ListSortOrder` (optional)
+          The order to sort the results.
+        :param start_index: int (optional)
+          Specifies the index of the first result. First item is number 1.
+        
+        :returns: Iterator over :class:`User`
+        
 
     .. py:method:: patch(id: str [, operations: Optional[List[Patch]], schemas: Optional[List[PatchSchema]]])
 
@@ -223,29 +223,29 @@ Gets details for all the users associated with a Databricks workspace.
                           schemas=[iam.PatchSchema.URN_IETF_PARAMS_SCIM_API_MESSAGES_2_0_PATCH_OP])
 
         Update user details.
-
-Partially updates a user resource by applying the supplied operations on specific user attributes.
-
-:param id: str
-  Unique ID for a user in the Databricks workspace.
-:param operations: List[:class:`Patch`] (optional)
-:param schemas: List[:class:`PatchSchema`] (optional)
-  The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
-
-
-
+        
+        Partially updates a user resource by applying the supplied operations on specific user attributes.
+        
+        :param id: str
+          Unique ID for a user in the Databricks workspace.
+        :param operations: List[:class:`Patch`] (optional)
+        :param schemas: List[:class:`PatchSchema`] (optional)
+          The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
+        
+        
+        
 
     .. py:method:: set_permissions( [, access_control_list: Optional[List[PasswordAccessControlRequest]]]) -> PasswordPermissions
 
         Set password permissions.
-
-Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-permissions if none are specified. Objects can inherit permissions from their root object.
-
-:param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional)
-
-:returns: :class:`PasswordPermissions`
-
+        
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
+        
+        :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional)
+        
+        :returns: :class:`PasswordPermissions`
+        
 
     .. py:method:: update(id: str [, active: Optional[bool], display_name: Optional[str], emails: Optional[List[ComplexValue]], entitlements: Optional[List[ComplexValue]], external_id: Optional[str], groups: Optional[List[ComplexValue]], name: Optional[Name], roles: Optional[List[ComplexValue]], schemas: Optional[List[UserSchema]], user_name: Optional[str]])
 
@@ -265,46 +265,47 @@ permissions if none are specified. Objects can inherit permissions from their ro
             w.users.update(id=user.id, user_name=user.user_name, active=True)
 
         Replace a user.
-
-Replaces a user's information with the data supplied in request.
-
-:param id: str
-  Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
-  be ignored.
-:param active: bool (optional)
-  If this user is active
-:param display_name: str (optional)
-  String that represents a concatenation of given and family names. For example `John Smith`. This
-  field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
-  Account SCIM APIs to update `displayName`.
-  
-  [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
-:param emails: List[:class:`ComplexValue`] (optional)
-  All the emails associated with the Databricks user.
-:param entitlements: List[:class:`ComplexValue`] (optional)
-  Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
-  
-  [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
-:param external_id: str (optional)
-  External ID is not currently supported. It is reserved for future use.
-:param groups: List[:class:`ComplexValue`] (optional)
-:param name: :class:`Name` (optional)
-:param roles: List[:class:`ComplexValue`] (optional)
-  Corresponds to AWS instance profile/arn role.
-:param schemas: List[:class:`UserSchema`] (optional)
-  The schema of the user.
-:param user_name: str (optional)
-  Email address of the Databricks user.
-
-
-
+        
+        Replaces a user's information with the data supplied in request.
+        
+        :param id: str
+          Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
+          be ignored.
+        :param active: bool (optional)
+          If this user is active
+        :param display_name: str (optional)
+          String that represents a concatenation of given and family names. For example `John Smith`. This
+          field cannot be updated through the Workspace SCIM APIs when [identity federation is enabled]. Use
+          Account SCIM APIs to update `displayName`.
+          
+          [identity federation is enabled]: https://docs.databricks.com/administration-guide/users-groups/best-practices.html#enable-identity-federation
+        :param emails: List[:class:`ComplexValue`] (optional)
+          All the emails associated with the Databricks user.
+        :param entitlements: List[:class:`ComplexValue`] (optional)
+          Entitlements assigned to the user. See [assigning entitlements] for a full list of supported values.
+          
+          [assigning entitlements]: https://docs.databricks.com/administration-guide/users-groups/index.html#assigning-entitlements
+        :param external_id: str (optional)
+          External ID is not currently supported. It is reserved for future use.
+        :param groups: List[:class:`ComplexValue`] (optional)
+        :param name: :class:`Name` (optional)
+        :param roles: List[:class:`ComplexValue`] (optional)
+          Corresponds to AWS instance profile/arn role.
+        :param schemas: List[:class:`UserSchema`] (optional)
+          The schema of the user.
+        :param user_name: str (optional)
+          Email address of the Databricks user.
+        
+        
+        
 
     .. py:method:: update_permissions( [, access_control_list: Optional[List[PasswordAccessControlRequest]]]) -> PasswordPermissions
 
         Update password permissions.
-
-Updates the permissions on all passwords. Passwords can inherit permissions from their root object.
-
-:param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional)
-
-:returns: :class:`PasswordPermissions`
+        
+        Updates the permissions on all passwords. Passwords can inherit permissions from their root object.
+        
+        :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional)
+        
+        :returns: :class:`PasswordPermissions`
+        
\ No newline at end of file
diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst
index f4168ed96..36f7d7d39 100644
--- a/docs/workspace/jobs/jobs.rst
+++ b/docs/workspace/jobs/jobs.rst
@@ -5,20 +5,20 @@
 .. py:class:: JobsExt
 
     The Jobs API allows you to create, edit, and delete jobs.
-
-You can use a Databricks job to run a data processing or data analysis task in a Databricks cluster with
-scalable resources. Your job can consist of a single task or can be a large, multi-task workflow with
-complex dependencies. Databricks manages the task orchestration, cluster management, monitoring, and error
-reporting for all of your jobs. You can run your jobs immediately or periodically through an easy-to-use
-scheduling system. You can implement job tasks using notebooks, JARS, Delta Live Tables pipelines, or
-Python, Scala, Spark submit, and Java applications.
-
-You should never hard code secrets or store them in plain text. Use the [Secrets CLI] to manage secrets in
-the [Databricks CLI]. Use the [Secrets utility] to reference secrets in notebooks and jobs.
-
-[Databricks CLI]: https://docs.databricks.com/dev-tools/cli/index.html
-[Secrets CLI]: https://docs.databricks.com/dev-tools/cli/secrets-cli.html
-[Secrets utility]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets
+    
+    You can use a Databricks job to run a data processing or data analysis task in a Databricks cluster with
+    scalable resources. Your job can consist of a single task or can be a large, multi-task workflow with
+    complex dependencies. Databricks manages the task orchestration, cluster management, monitoring, and error
+    reporting for all of your jobs. You can run your jobs immediately or periodically through an easy-to-use
+    scheduling system. You can implement job tasks using notebooks, JARS, Delta Live Tables pipelines, or
+    Python, Scala, Spark submit, and Java applications.
+    
+    You should never hard code secrets or store them in plain text. Use the [Secrets CLI] to manage secrets in
+    the [Databricks CLI]. Use the [Secrets utility] to reference secrets in notebooks and jobs.
+    
+    [Databricks CLI]: https://docs.databricks.com/dev-tools/cli/index.html
+    [Secrets CLI]: https://docs.databricks.com/dev-tools/cli/secrets-cli.html
+    [Secrets utility]: https://docs.databricks.com/dev-tools/databricks-utils.html#dbutils-secrets
 
     .. py:method:: cancel_all_runs( [, all_queued_runs: Optional[bool], job_id: Optional[int]])
 
@@ -55,18 +55,18 @@ the [Databricks CLI]. Use the [Secrets utility] to reference secrets in notebook
             w.jobs.delete(job_id=created_job.job_id)
 
         Cancel all runs of a job.
-
-Cancels all active runs of a job. The runs are canceled asynchronously, so it doesn't prevent new runs
-from being started.
-
-:param all_queued_runs: bool (optional)
-  Optional boolean parameter to cancel all queued runs. If no job_id is provided, all queued runs in
-  the workspace are canceled.
-:param job_id: int (optional)
-  The canonical identifier of the job to cancel all runs of.
-
-
-
+        
+        Cancels all active runs of a job. The runs are canceled asynchronously, so it doesn't prevent new runs
+        from being started.
+        
+        :param all_queued_runs: bool (optional)
+          Optional boolean parameter to cancel all queued runs. If no job_id is provided, all queued runs in
+          the workspace are canceled.
+        :param job_id: int (optional)
+          The canonical identifier of the job to cancel all runs of.
+        
+        
+        
 
     .. py:method:: cancel_run(run_id: int) -> Wait[Run]
 
@@ -105,17 +105,17 @@ from being started.
             w.jobs.delete(job_id=created_job.job_id)
 
         Cancel a run.
-
-Cancels a job run or a task run. The run is canceled asynchronously, so it may still be running when
-this request completes.
-
-:param run_id: int
-  This field is required.
-
-:returns:
-  Long-running operation waiter for :class:`Run`.
-  See :method:wait_get_run_job_terminated_or_skipped for more details.
-
+        
+        Cancels a job run or a task run. The run is canceled asynchronously, so it may still be running when
+        this request completes.
+        
+        :param run_id: int
+          This field is required.
+        
+        :returns:
+          Long-running operation waiter for :class:`Run`.
+          See :method:wait_get_run_job_terminated_or_skipped for more details.
+        
 
     .. py:method:: cancel_run_and_wait(run_id: int, timeout: datetime.timedelta = 0:20:00) -> Run
 
@@ -153,125 +153,125 @@ this request completes.
             w.jobs.delete(job_id=created_job.job_id)
 
         Create a new job.
-
-Create a new job.
-
-:param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
-  List of permissions to set on the job.
-:param budget_policy_id: str (optional)
-  The id of the user specified budget policy to use for this job. If not specified, a default budget
-  policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for the
-  budget policy used by this workload.
-:param continuous: :class:`Continuous` (optional)
-  An optional continuous property for this job. The continuous property will ensure that there is
-  always one run executing. Only one of `schedule` and `continuous` can be used.
-:param deployment: :class:`JobDeployment` (optional)
-  Deployment information for jobs managed by external sources.
-:param description: str (optional)
-  An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.
-:param edit_mode: :class:`JobEditMode` (optional)
-  Edit mode of the job.
-  
-  * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is in
-  an editable state and can be modified.
-:param email_notifications: :class:`JobEmailNotifications` (optional)
-  An optional set of email addresses that is notified when runs of this job begin or complete as well
-  as when this job is deleted.
-:param environments: List[:class:`JobEnvironment`] (optional)
-  A list of task execution environment specifications that can be referenced by serverless tasks of
-  this job. An environment is required to be present for serverless tasks. For serverless notebook
-  tasks, the environment is accessible in the notebook environment panel. For other serverless tasks,
-  the task environment is required to be specified using environment_key in the task settings.
-:param format: :class:`Format` (optional)
-  Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. When
-  using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`.
-:param git_source: :class:`GitSource` (optional)
-  An optional specification for a remote Git repository containing the source code used by tasks.
-  Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
-  
-  If `git_source` is set, these tasks retrieve the file from the remote repository by default.
-  However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.
-  
-  Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are
-  used, `git_source` must be defined on the job.
-:param health: :class:`JobsHealthRules` (optional)
-  An optional set of health rules that can be defined for this job.
-:param job_clusters: List[:class:`JobCluster`] (optional)
-  A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries
-  cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.
-  If more than 100 job clusters are available, you can paginate through them using :method:jobs/get.
-:param max_concurrent_runs: int (optional)
-  An optional maximum allowed number of concurrent runs of the job. Set this value if you want to be
-  able to execute multiple runs of the same job concurrently. This is useful for example if you
-  trigger your job on a frequent schedule and want to allow consecutive runs to overlap with each
-  other, or if you want to trigger multiple runs which differ by their input parameters. This setting
-  affects only new runs. For example, suppose the job’s concurrency is 4 and there are 4 concurrent
-  active runs. Then setting the concurrency to 3 won’t kill any of the active runs. However, from
-  then on, new runs are skipped unless there are fewer than 3 active runs. This value cannot exceed
-  1000. Setting this value to `0` causes all new runs to be skipped.
-:param name: str (optional)
-  An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding.
-:param notification_settings: :class:`JobNotificationSettings` (optional)
-  Optional notification settings that are used when sending notifications to each of the
-  `email_notifications` and `webhook_notifications` for this job.
-:param parameters: List[:class:`JobParameterDefinition`] (optional)
-  Job-level parameter definitions
-:param performance_target: :class:`PerformanceTarget` (optional)
-  PerformanceTarget defines how performant or cost efficient the execution of run on serverless should
-  be.
-:param queue: :class:`QueueSettings` (optional)
-  The queue settings of the job.
-:param run_as: :class:`JobRunAs` (optional)
-  Write-only setting. Specifies the user or service principal that the job runs as. If not specified,
-  the job runs as the user who created the job.
-  
-  Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.
-:param schedule: :class:`CronSchedule` (optional)
-  An optional periodic schedule for this job. The default behavior is that the job only runs when
-  triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.
-:param tags: Dict[str,str] (optional)
-  A map of tags associated with the job. These are forwarded to the cluster as cluster tags for jobs
-  clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added
-  to the job.
-:param tasks: List[:class:`Task`] (optional)
-  A list of task specifications to be executed by this job. If more than 100 tasks are available, you
-  can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root
-  to determine if more results are available.
-:param timeout_seconds: int (optional)
-  An optional timeout applied to each run of this job. A value of `0` means no timeout.
-:param trigger: :class:`TriggerSettings` (optional)
-  A configuration to trigger a run when certain conditions are met. The default behavior is that the
-  job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to
-  `runNow`.
-:param webhook_notifications: :class:`WebhookNotifications` (optional)
-  A collection of system notification IDs to notify when runs of this job begin or complete.
-
-:returns: :class:`CreateResponse`
-
+        
+        Create a new job.
+        
+        :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
+          List of permissions to set on the job.
+        :param budget_policy_id: str (optional)
+          The id of the user specified budget policy to use for this job. If not specified, a default budget
+          policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for the
+          budget policy used by this workload.
+        :param continuous: :class:`Continuous` (optional)
+          An optional continuous property for this job. The continuous property will ensure that there is
+          always one run executing. Only one of `schedule` and `continuous` can be used.
+        :param deployment: :class:`JobDeployment` (optional)
+          Deployment information for jobs managed by external sources.
+        :param description: str (optional)
+          An optional description for the job. The maximum length is 27700 characters in UTF-8 encoding.
+        :param edit_mode: :class:`JobEditMode` (optional)
+          Edit mode of the job.
+          
+          * `UI_LOCKED`: The job is in a locked UI state and cannot be modified. * `EDITABLE`: The job is in
+          an editable state and can be modified.
+        :param email_notifications: :class:`JobEmailNotifications` (optional)
+          An optional set of email addresses that is notified when runs of this job begin or complete as well
+          as when this job is deleted.
+        :param environments: List[:class:`JobEnvironment`] (optional)
+          A list of task execution environment specifications that can be referenced by serverless tasks of
+          this job. An environment is required to be present for serverless tasks. For serverless notebook
+          tasks, the environment is accessible in the notebook environment panel. For other serverless tasks,
+          the task environment is required to be specified using environment_key in the task settings.
+        :param format: :class:`Format` (optional)
+          Used to tell what is the format of the job. This field is ignored in Create/Update/Reset calls. When
+          using the Jobs API 2.1 this value is always set to `"MULTI_TASK"`.
+        :param git_source: :class:`GitSource` (optional)
+          An optional specification for a remote Git repository containing the source code used by tasks.
+          Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
+          
+          If `git_source` is set, these tasks retrieve the file from the remote repository by default.
+          However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.
+          
+          Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are
+          used, `git_source` must be defined on the job.
+        :param health: :class:`JobsHealthRules` (optional)
+          An optional set of health rules that can be defined for this job.
+        :param job_clusters: List[:class:`JobCluster`] (optional)
+          A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries
+          cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.
+          If more than 100 job clusters are available, you can paginate through them using :method:jobs/get.
+        :param max_concurrent_runs: int (optional)
+          An optional maximum allowed number of concurrent runs of the job. Set this value if you want to be
+          able to execute multiple runs of the same job concurrently. This is useful for example if you
+          trigger your job on a frequent schedule and want to allow consecutive runs to overlap with each
+          other, or if you want to trigger multiple runs which differ by their input parameters. This setting
+          affects only new runs. For example, suppose the job’s concurrency is 4 and there are 4 concurrent
+          active runs. Then setting the concurrency to 3 won’t kill any of the active runs. However, from
+          then on, new runs are skipped unless there are fewer than 3 active runs. This value cannot exceed
+          1000. Setting this value to `0` causes all new runs to be skipped.
+        :param name: str (optional)
+          An optional name for the job. The maximum length is 4096 bytes in UTF-8 encoding.
+        :param notification_settings: :class:`JobNotificationSettings` (optional)
+          Optional notification settings that are used when sending notifications to each of the
+          `email_notifications` and `webhook_notifications` for this job.
+        :param parameters: List[:class:`JobParameterDefinition`] (optional)
+          Job-level parameter definitions
+        :param performance_target: :class:`PerformanceTarget` (optional)
+          PerformanceTarget defines how performant or cost efficient the execution of run on serverless should
+          be.
+        :param queue: :class:`QueueSettings` (optional)
+          The queue settings of the job.
+        :param run_as: :class:`JobRunAs` (optional)
+          Write-only setting. Specifies the user or service principal that the job runs as. If not specified,
+          the job runs as the user who created the job.
+          
+          Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.
+        :param schedule: :class:`CronSchedule` (optional)
+          An optional periodic schedule for this job. The default behavior is that the job only runs when
+          triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.
+        :param tags: Dict[str,str] (optional)
+          A map of tags associated with the job. These are forwarded to the cluster as cluster tags for jobs
+          clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added
+          to the job.
+        :param tasks: List[:class:`Task`] (optional)
+          A list of task specifications to be executed by this job. If more than 100 tasks are available, you
+          can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root
+          to determine if more results are available.
+        :param timeout_seconds: int (optional)
+          An optional timeout applied to each run of this job. A value of `0` means no timeout.
+        :param trigger: :class:`TriggerSettings` (optional)
+          A configuration to trigger a run when certain conditions are met. The default behavior is that the
+          job runs only when triggered by clicking “Run Now” in the Jobs UI or sending an API request to
+          `runNow`.
+        :param webhook_notifications: :class:`WebhookNotifications` (optional)
+          A collection of system notification IDs to notify when runs of this job begin or complete.
+        
+        :returns: :class:`CreateResponse`
+        
 
     .. py:method:: delete(job_id: int)
 
         Delete a job.
-
-Deletes a job.
-
-:param job_id: int
-  The canonical identifier of the job to delete. This field is required.
-
-
-
+        
+        Deletes a job.
+        
+        :param job_id: int
+          The canonical identifier of the job to delete. This field is required.
+        
+        
+        
 
     .. py:method:: delete_run(run_id: int)
 
         Delete a job run.
-
-Deletes a non-active run. Returns an error if the run is active.
-
-:param run_id: int
-  ID of the run to delete.
-
-
-
+        
+        Deletes a non-active run. Returns an error if the run is active.
+        
+        :param run_id: int
+          ID of the run to delete.
+        
+        
+        
 
     .. py:method:: export_run(run_id: int [, views_to_export: Optional[ViewsToExport]]) -> ExportRunOutput
 
@@ -310,16 +310,16 @@ Deletes a non-active run. Returns an error if the run is active.
             w.jobs.delete(job_id=created_job.job_id)
 
         Export and retrieve a job run.
-
-Export and retrieve the job run task.
-
-:param run_id: int
-  The canonical identifier for the run. This field is required.
-:param views_to_export: :class:`ViewsToExport` (optional)
-  Which views to export (CODE, DASHBOARDS, or ALL). Defaults to CODE.
-
-:returns: :class:`ExportRunOutput`
-
+        
+        Export and retrieve the job run task.
+        
+        :param run_id: int
+          The canonical identifier for the run. This field is required.
+        :param views_to_export: :class:`ViewsToExport` (optional)
+          Which views to export (CODE, DASHBOARDS, or ALL). Defaults to CODE.
+        
+        :returns: :class:`ExportRunOutput`
+        
 
     .. py:method:: get(job_id: int [, page_token: Optional[str]]) -> Job
 
@@ -354,46 +354,46 @@ Export and retrieve the job run task.
             w.jobs.delete_run(run_id=run.run_id)
 
         Get a single job.
-
-Retrieves the details for a single job.
-
-In Jobs API 2.2, requests for a single job support pagination of `tasks` and `job_clusters` when
-either exceeds 100 elements. Use the `next_page_token` field to check for more results and pass its
-value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements in a page will
-be empty on later pages.
-
-:param job_id: int
-  The canonical identifier of the job to retrieve information about. This field is required.
-:param page_token: str (optional)
-  Use `next_page_token` returned from the previous GetJob to request the next page of the job's
-  sub-resources.
-
-:returns: :class:`Job`
-
+        
+        Retrieves the details for a single job.
+        
+        In Jobs API 2.2, requests for a single job support pagination of `tasks` and `job_clusters` when
+        either exceeds 100 elements. Use the `next_page_token` field to check for more results and pass its
+        value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements in a page will
+        be empty on later pages.
+        
+        :param job_id: int
+          The canonical identifier of the job to retrieve information about. This field is required.
+        :param page_token: str (optional)
+          Use `next_page_token` returned from the previous GetJob to request the next page of the job's
+          sub-resources.
+        
+        :returns: :class:`Job`
+        
 
     .. py:method:: get_permission_levels(job_id: str) -> GetJobPermissionLevelsResponse
 
         Get job permission levels.
-
-Gets the permission levels that a user can have on an object.
-
-:param job_id: str
-  The job for which to get or manage permissions.
-
-:returns: :class:`GetJobPermissionLevelsResponse`
-
+        
+        Gets the permission levels that a user can have on an object.
+        
+        :param job_id: str
+          The job for which to get or manage permissions.
+        
+        :returns: :class:`GetJobPermissionLevelsResponse`
+        
 
     .. py:method:: get_permissions(job_id: str) -> JobPermissions
 
         Get job permissions.
-
-Gets the permissions of a job. Jobs can inherit permissions from their root object.
-
-:param job_id: str
-  The job for which to get or manage permissions.
-
-:returns: :class:`JobPermissions`
-
+        
+        Gets the permissions of a job. Jobs can inherit permissions from their root object.
+        
+        :param job_id: str
+          The job for which to get or manage permissions.
+        
+        :returns: :class:`JobPermissions`
+        
 
     .. py:method:: get_run(run_id: int [, include_history: bool, include_resolved_values: bool, page_token: str]) -> Run
 
@@ -427,20 +427,22 @@ Gets the permissions of a job. Jobs can inherit permissions from their root obje
             # cleanup
             w.jobs.delete_run(run_id=run.run_id)
 
-        
-This method fetches the details of a run identified by `run_id`. If the run has multiple pages of tasks or iterations,
-it will paginate through all pages and aggregate the results.
-:param run_id: int
-  The canonical identifier of the run for which to retrieve the metadata. This field is required.
-:param include_history: bool (optional)
-  Whether to include the repair history in the response.
-:param include_resolved_values: bool (optional)
-  Whether to include resolved parameter values in the response.
-:param page_token: str (optional)
-  To list the next page or the previous page of job tasks, set this field to the value of the
-  `next_page_token` or `prev_page_token` returned in the GetJob response.
-:returns: :class:`Run`
+        Get a single job run.
+
+        Retrieve the metadata of a run. If a run has multiple pages of tasks, it will paginate through all pages of tasks, iterations, job_clusters, job_parameters, and repair history.
+
+        :param run_id: int
+          The canonical identifier of the run for which to retrieve the metadata. This field is required.
+        :param include_history: bool (optional)
+          Whether to include the repair history in the response.
+        :param include_resolved_values: bool (optional)
+          Whether to include resolved parameter values in the response.
+        :param page_token: str (optional)
+          To list the next page of job tasks, set this field to the value of the `next_page_token` returned in
+          the GetJob response.
 
+        :returns: :class:`Run`
+        
 
     .. py:method:: get_run_output(run_id: int) -> RunOutput
 
@@ -475,21 +477,21 @@ it will paginate through all pages and aggregate the results.
             w.jobs.delete_run(run_id=run.run_id)
 
         Get the output for a single run.
-
-Retrieve the output and metadata of a single task run. When a notebook task returns a value through
-the `dbutils.notebook.exit()` call, you can use this endpoint to retrieve that value. Databricks
-restricts this API to returning the first 5 MB of the output. To return a larger result, you can store
-job results in a cloud storage service.
-
-This endpoint validates that the __run_id__ parameter is valid and returns an HTTP status code 400 if
-the __run_id__ parameter is invalid. Runs are automatically removed after 60 days. If you to want to
-reference them beyond 60 days, you must save old run results before they expire.
-
-:param run_id: int
-  The canonical identifier for the run.
-
-:returns: :class:`RunOutput`
-
+        
+        Retrieve the output and metadata of a single task run. When a notebook task returns a value through
+        the `dbutils.notebook.exit()` call, you can use this endpoint to retrieve that value. Databricks
+        restricts this API to returning the first 5 MB of the output. To return a larger result, you can store
+        job results in a cloud storage service.
+        
+        This endpoint validates that the __run_id__ parameter is valid and returns an HTTP status code 400 if
+        the __run_id__ parameter is invalid. Runs are automatically removed after 60 days. If you to want to
+        reference them beyond 60 days, you must save old run results before they expire.
+        
+        :param run_id: int
+          The canonical identifier for the run.
+        
+        :returns: :class:`RunOutput`
+        
 
     .. py:method:: list( [, expand_tasks: Optional[bool], limit: Optional[int], name: Optional[str], offset: Optional[int], page_token: Optional[str]]) -> Iterator[BaseJob]
 
@@ -526,26 +528,26 @@ reference them beyond 60 days, you must save old run results before they expire.
             w.jobs.delete(job_id=created_job.job_id)
 
         List jobs.
-
-Retrieves a list of jobs.
-
-:param expand_tasks: bool (optional)
-  Whether to include task and cluster details in the response. Note that in API 2.2, only the first
-  100 elements will be shown. Use :method:jobs/get to paginate through all tasks and clusters.
-:param limit: int (optional)
-  The number of jobs to return. This value must be greater than 0 and less or equal to 100. The
-  default value is 20.
-:param name: str (optional)
-  A filter on the list based on the exact (case insensitive) job name.
-:param offset: int (optional)
-  The offset of the first job to return, relative to the most recently created job. Deprecated since
-  June 2023. Use `page_token` to iterate through the pages instead.
-:param page_token: str (optional)
-  Use `next_page_token` or `prev_page_token` returned from the previous request to list the next or
-  previous page of jobs respectively.
-
-:returns: Iterator over :class:`BaseJob`
-
+        
+        Retrieves a list of jobs.
+        
+        :param expand_tasks: bool (optional)
+          Whether to include task and cluster details in the response. Note that in API 2.2, only the first
+          100 elements will be shown. Use :method:jobs/get to paginate through all tasks and clusters.
+        :param limit: int (optional)
+          The number of jobs to return. This value must be greater than 0 and less or equal to 100. The
+          default value is 20.
+        :param name: str (optional)
+          A filter on the list based on the exact (case insensitive) job name.
+        :param offset: int (optional)
+          The offset of the first job to return, relative to the most recently created job. Deprecated since
+          June 2023. Use `page_token` to iterate through the pages instead.
+        :param page_token: str (optional)
+          Use `next_page_token` or `prev_page_token` returned from the previous request to list the next or
+          previous page of jobs respectively.
+        
+        :returns: Iterator over :class:`BaseJob`
+        
 
     .. py:method:: list_runs( [, active_only: Optional[bool], completed_only: Optional[bool], expand_tasks: Optional[bool], job_id: Optional[int], limit: Optional[int], offset: Optional[int], page_token: Optional[str], run_type: Optional[RunType], start_time_from: Optional[int], start_time_to: Optional[int]]) -> Iterator[BaseRun]
 
@@ -582,41 +584,41 @@ Retrieves a list of jobs.
             w.jobs.delete(job_id=created_job.job_id)
 
         List job runs.
-
-List runs in descending order by start time.
-
-:param active_only: bool (optional)
-  If active_only is `true`, only active runs are included in the results; otherwise, lists both active
-  and completed runs. An active run is a run in the `QUEUED`, `PENDING`, `RUNNING`, or `TERMINATING`.
-  This field cannot be `true` when completed_only is `true`.
-:param completed_only: bool (optional)
-  If completed_only is `true`, only completed runs are included in the results; otherwise, lists both
-  active and completed runs. This field cannot be `true` when active_only is `true`.
-:param expand_tasks: bool (optional)
-  Whether to include task and cluster details in the response. Note that in API 2.2, only the first
-  100 elements will be shown. Use :method:jobs/getrun to paginate through all tasks and clusters.
-:param job_id: int (optional)
-  The job for which to list runs. If omitted, the Jobs service lists runs from all jobs.
-:param limit: int (optional)
-  The number of runs to return. This value must be greater than 0 and less than 25. The default value
-  is 20. If a request specifies a limit of 0, the service instead uses the maximum limit.
-:param offset: int (optional)
-  The offset of the first run to return, relative to the most recent run. Deprecated since June 2023.
-  Use `page_token` to iterate through the pages instead.
-:param page_token: str (optional)
-  Use `next_page_token` or `prev_page_token` returned from the previous request to list the next or
-  previous page of runs respectively.
-:param run_type: :class:`RunType` (optional)
-  The type of runs to return. For a description of run types, see :method:jobs/getRun.
-:param start_time_from: int (optional)
-  Show runs that started _at or after_ this value. The value must be a UTC timestamp in milliseconds.
-  Can be combined with _start_time_to_ to filter by a time range.
-:param start_time_to: int (optional)
-  Show runs that started _at or before_ this value. The value must be a UTC timestamp in milliseconds.
-  Can be combined with _start_time_from_ to filter by a time range.
-
-:returns: Iterator over :class:`BaseRun`
-
+        
+        List runs in descending order by start time.
+        
+        :param active_only: bool (optional)
+          If active_only is `true`, only active runs are included in the results; otherwise, lists both active
+          and completed runs. An active run is a run in the `QUEUED`, `PENDING`, `RUNNING`, or `TERMINATING`.
+          This field cannot be `true` when completed_only is `true`.
+        :param completed_only: bool (optional)
+          If completed_only is `true`, only completed runs are included in the results; otherwise, lists both
+          active and completed runs. This field cannot be `true` when active_only is `true`.
+        :param expand_tasks: bool (optional)
+          Whether to include task and cluster details in the response. Note that in API 2.2, only the first
+          100 elements will be shown. Use :method:jobs/getrun to paginate through all tasks and clusters.
+        :param job_id: int (optional)
+          The job for which to list runs. If omitted, the Jobs service lists runs from all jobs.
+        :param limit: int (optional)
+          The number of runs to return. This value must be greater than 0 and less than 25. The default value
+          is 20. If a request specifies a limit of 0, the service instead uses the maximum limit.
+        :param offset: int (optional)
+          The offset of the first run to return, relative to the most recent run. Deprecated since June 2023.
+          Use `page_token` to iterate through the pages instead.
+        :param page_token: str (optional)
+          Use `next_page_token` or `prev_page_token` returned from the previous request to list the next or
+          previous page of runs respectively.
+        :param run_type: :class:`RunType` (optional)
+          The type of runs to return. For a description of run types, see :method:jobs/getRun.
+        :param start_time_from: int (optional)
+          Show runs that started _at or after_ this value. The value must be a UTC timestamp in milliseconds.
+          Can be combined with _start_time_to_ to filter by a time range.
+        :param start_time_to: int (optional)
+          Show runs that started _at or before_ this value. The value must be a UTC timestamp in milliseconds.
+          Can be combined with _start_time_from_ to filter by a time range.
+        
+        :returns: Iterator over :class:`BaseRun`
+        
 
     .. py:method:: repair_run(run_id: int [, dbt_commands: Optional[List[str]], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], latest_repair_id: Optional[int], notebook_params: Optional[Dict[str, str]], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], rerun_all_failed_tasks: Optional[bool], rerun_dependent_tasks: Optional[bool], rerun_tasks: Optional[List[str]], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]]]) -> Wait[Run]
 
@@ -658,95 +660,95 @@ List runs in descending order by start time.
             w.jobs.delete(job_id=created_job.job_id)
 
         Repair a job run.
-
-Re-run one or more tasks. Tasks are re-run as part of the original job run. They use the current job
-and task settings, and can be viewed in the history for the original job run.
-
-:param run_id: int
-  The job run ID of the run to repair. The run must not be in progress.
-:param dbt_commands: List[str] (optional)
-  An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
-  deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`
-:param jar_params: List[str] (optional)
-  A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`.
-  The parameters are used to invoke the main function of the main class specified in the Spark JAR
-  task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot be specified
-  in conjunction with notebook_params. The JSON representation of this field (for example
-  `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
-  
-  Use [Task parameter variables] to set parameters containing information about job runs.
-  
-  [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
-:param job_parameters: Dict[str,str] (optional)
-  Job-level parameters used in the run. for example `"param": "overriding_val"`
-:param latest_repair_id: int (optional)
-  The ID of the latest repair. This parameter is not required when repairing a run for the first time,
-  but must be provided on subsequent requests to repair the same run.
-:param notebook_params: Dict[str,str] (optional)
-  A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
-  "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the
-  [dbutils.widgets.get] function.
-  
-  If not specified upon `run-now`, the triggered run uses the job’s base parameters.
-  
-  notebook_params cannot be specified in conjunction with jar_params.
-  
-  Use [Task parameter variables] to set parameters containing information about job runs.
-  
-  The JSON representation of this field (for example `{"notebook_params":{"name":"john
-  doe","age":"35"}}`) cannot exceed 10,000 bytes.
-  
-  [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
-  [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
-:param pipeline_params: :class:`PipelineParams` (optional)
-  Controls whether the pipeline should perform a full refresh
-:param python_named_params: Dict[str,str] (optional)
-:param python_params: List[str] (optional)
-  A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
-  The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it
-  would overwrite the parameters specified in job setting. The JSON representation of this field (for
-  example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
-  
-  Use [Task parameter variables] to set parameters containing information about job runs.
-  
-  Important
-  
-  These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
-  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
-  emojis.
-  
-  [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
-:param rerun_all_failed_tasks: bool (optional)
-  If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be used.
-:param rerun_dependent_tasks: bool (optional)
-  If true, repair all tasks that depend on the tasks in `rerun_tasks`, even if they were previously
-  successful. Can be also used in combination with `rerun_all_failed_tasks`.
-:param rerun_tasks: List[str] (optional)
-  The task keys of the task runs to repair.
-:param spark_submit_params: List[str] (optional)
-  A list of parameters for jobs with spark submit task, for example `"spark_submit_params":
-  ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit script
-  as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified
-  in job setting. The JSON representation of this field (for example `{"python_params":["john
-  doe","35"]}`) cannot exceed 10,000 bytes.
-  
-  Use [Task parameter variables] to set parameters containing information about job runs
-  
-  Important
-  
-  These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
-  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
-  emojis.
-  
-  [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
-:param sql_params: Dict[str,str] (optional)
-  A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe",
-  "age": "35"}`. The SQL alert task does not support custom parameters.
-
-:returns:
-  Long-running operation waiter for :class:`Run`.
-  See :method:wait_get_run_job_terminated_or_skipped for more details.
-
+        
+        Re-run one or more tasks. Tasks are re-run as part of the original job run. They use the current job
+        and task settings, and can be viewed in the history for the original job run.
+        
+        :param run_id: int
+          The job run ID of the run to repair. The run must not be in progress.
+        :param dbt_commands: List[str] (optional)
+          An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
+          deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`
+        :param jar_params: List[str] (optional)
+          A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`.
+          The parameters are used to invoke the main function of the main class specified in the Spark JAR
+          task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot be specified
+          in conjunction with notebook_params. The JSON representation of this field (for example
+          `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
+          
+          Use [Task parameter variables] to set parameters containing information about job runs.
+          
+          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
+        :param job_parameters: Dict[str,str] (optional)
+          Job-level parameters used in the run. for example `"param": "overriding_val"`
+        :param latest_repair_id: int (optional)
+          The ID of the latest repair. This parameter is not required when repairing a run for the first time,
+          but must be provided on subsequent requests to repair the same run.
+        :param notebook_params: Dict[str,str] (optional)
+          A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
+          "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the
+          [dbutils.widgets.get] function.
+          
+          If not specified upon `run-now`, the triggered run uses the job’s base parameters.
+          
+          notebook_params cannot be specified in conjunction with jar_params.
+          
+          Use [Task parameter variables] to set parameters containing information about job runs.
+          
+          The JSON representation of this field (for example `{"notebook_params":{"name":"john
+          doe","age":"35"}}`) cannot exceed 10,000 bytes.
+          
+          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
+          [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
+        :param pipeline_params: :class:`PipelineParams` (optional)
+          Controls whether the pipeline should perform a full refresh
+        :param python_named_params: Dict[str,str] (optional)
+        :param python_params: List[str] (optional)
+          A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
+          The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it
+          would overwrite the parameters specified in job setting. The JSON representation of this field (for
+          example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
+          
+          Use [Task parameter variables] to set parameters containing information about job runs.
+          
+          Important
+          
+          These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
+          returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
+          emojis.
+          
+          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
+        :param rerun_all_failed_tasks: bool (optional)
+          If true, repair all failed tasks. Only one of `rerun_tasks` or `rerun_all_failed_tasks` can be used.
+        :param rerun_dependent_tasks: bool (optional)
+          If true, repair all tasks that depend on the tasks in `rerun_tasks`, even if they were previously
+          successful. Can be also used in combination with `rerun_all_failed_tasks`.
+        :param rerun_tasks: List[str] (optional)
+          The task keys of the task runs to repair.
+        :param spark_submit_params: List[str] (optional)
+          A list of parameters for jobs with spark submit task, for example `"spark_submit_params":
+          ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit script
+          as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified
+          in job setting. The JSON representation of this field (for example `{"python_params":["john
+          doe","35"]}`) cannot exceed 10,000 bytes.
+          
+          Use [Task parameter variables] to set parameters containing information about job runs
+          
+          Important
+          
+          These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
+          returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
+          emojis.
+          
+          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
+        :param sql_params: Dict[str,str] (optional)
+          A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe",
+          "age": "35"}`. The SQL alert task does not support custom parameters.
+        
+        :returns:
+          Long-running operation waiter for :class:`Run`.
+          See :method:wait_get_run_job_terminated_or_skipped for more details.
+        
 
     .. py:method:: repair_run_and_wait(run_id: int [, dbt_commands: Optional[List[str]], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], latest_repair_id: Optional[int], notebook_params: Optional[Dict[str, str]], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], rerun_all_failed_tasks: Optional[bool], rerun_dependent_tasks: Optional[bool], rerun_tasks: Optional[List[str]], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]], timeout: datetime.timedelta = 0:20:00]) -> Run
 
@@ -790,20 +792,20 @@ and task settings, and can be viewed in the history for the original job run.
             w.jobs.delete(job_id=created_job.job_id)
 
         Update all job settings (reset).
-
-Overwrite all settings for the given job. Use the [_Update_ endpoint](:method:jobs/update) to update
-job settings partially.
-
-:param job_id: int
-  The canonical identifier of the job to reset. This field is required.
-:param new_settings: :class:`JobSettings`
-  The new settings of the job. These settings completely replace the old settings.
-  
-  Changes to the field `JobBaseSettings.timeout_seconds` are applied to active runs. Changes to other
-  fields are applied to future runs only.
-
-
-
+        
+        Overwrite all settings for the given job. Use the [_Update_ endpoint](:method:jobs/update) to update
+        job settings partially.
+        
+        :param job_id: int
+          The canonical identifier of the job to reset. This field is required.
+        :param new_settings: :class:`JobSettings`
+          The new settings of the job. These settings completely replace the old settings.
+          
+          Changes to the field `JobBaseSettings.timeout_seconds` are applied to active runs. Changes to other
+          fields are applied to future runs only.
+        
+        
+        
 
     .. py:method:: run_now(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], only: Optional[List[str]], performance_target: Optional[PerformanceTarget], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]]]) -> Wait[Run]
 
@@ -840,106 +842,106 @@ job settings partially.
             w.jobs.delete(job_id=created_job.job_id)
 
         Trigger a new job run.
-
-Run a job and return the `run_id` of the triggered run.
-
-:param job_id: int
-  The ID of the job to be executed
-:param dbt_commands: List[str] (optional)
-  An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
-  deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`
-:param idempotency_token: str (optional)
-  An optional token to guarantee the idempotency of job run requests. If a run with the provided token
-  already exists, the request does not create a new run but returns the ID of the existing run
-  instead. If a run with the provided token is deleted, an error is returned.
-  
-  If you specify the idempotency token, upon failure you can retry until the request succeeds.
-  Databricks guarantees that exactly one run is launched with that idempotency token.
-  
-  This token must have at most 64 characters.
-  
-  For more information, see [How to ensure idempotency for jobs].
-  
-  [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html
-:param jar_params: List[str] (optional)
-  A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`.
-  The parameters are used to invoke the main function of the main class specified in the Spark JAR
-  task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot be specified
-  in conjunction with notebook_params. The JSON representation of this field (for example
-  `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
-  
-  Use [Task parameter variables] to set parameters containing information about job runs.
-  
-  [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
-:param job_parameters: Dict[str,str] (optional)
-  Job-level parameters used in the run. for example `"param": "overriding_val"`
-:param notebook_params: Dict[str,str] (optional)
-  A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
-  "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the
-  [dbutils.widgets.get] function.
-  
-  If not specified upon `run-now`, the triggered run uses the job’s base parameters.
-  
-  notebook_params cannot be specified in conjunction with jar_params.
-  
-  Use [Task parameter variables] to set parameters containing information about job runs.
-  
-  The JSON representation of this field (for example `{"notebook_params":{"name":"john
-  doe","age":"35"}}`) cannot exceed 10,000 bytes.
-  
-  [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
-  [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
-:param only: List[str] (optional)
-  A list of task keys to run inside of the job. If this field is not provided, all tasks in the job
-  will be run.
-:param performance_target: :class:`PerformanceTarget` (optional)
-  PerformanceTarget defines how performant or cost efficient the execution of run on serverless
-  compute should be. For RunNow request, the run will execute with this settings instead of ones
-  defined in job.
-:param pipeline_params: :class:`PipelineParams` (optional)
-  Controls whether the pipeline should perform a full refresh
-:param python_named_params: Dict[str,str] (optional)
-:param python_params: List[str] (optional)
-  A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
-  The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it
-  would overwrite the parameters specified in job setting. The JSON representation of this field (for
-  example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
-  
-  Use [Task parameter variables] to set parameters containing information about job runs.
-  
-  Important
-  
-  These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
-  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
-  emojis.
-  
-  [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
-:param queue: :class:`QueueSettings` (optional)
-  The queue settings of the run.
-:param spark_submit_params: List[str] (optional)
-  A list of parameters for jobs with spark submit task, for example `"spark_submit_params":
-  ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit script
-  as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified
-  in job setting. The JSON representation of this field (for example `{"python_params":["john
-  doe","35"]}`) cannot exceed 10,000 bytes.
-  
-  Use [Task parameter variables] to set parameters containing information about job runs
-  
-  Important
-  
-  These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
-  returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
-  emojis.
-  
-  [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
-:param sql_params: Dict[str,str] (optional)
-  A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe",
-  "age": "35"}`. The SQL alert task does not support custom parameters.
-
-:returns:
-  Long-running operation waiter for :class:`Run`.
-  See :method:wait_get_run_job_terminated_or_skipped for more details.
-
+        
+        Run a job and return the `run_id` of the triggered run.
+        
+        :param job_id: int
+          The ID of the job to be executed
+        :param dbt_commands: List[str] (optional)
+          An array of commands to execute for jobs with the dbt task, for example `"dbt_commands": ["dbt
+          deps", "dbt seed", "dbt deps", "dbt seed", "dbt run"]`
+        :param idempotency_token: str (optional)
+          An optional token to guarantee the idempotency of job run requests. If a run with the provided token
+          already exists, the request does not create a new run but returns the ID of the existing run
+          instead. If a run with the provided token is deleted, an error is returned.
+          
+          If you specify the idempotency token, upon failure you can retry until the request succeeds.
+          Databricks guarantees that exactly one run is launched with that idempotency token.
+          
+          This token must have at most 64 characters.
+          
+          For more information, see [How to ensure idempotency for jobs].
+          
+          [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html
+        :param jar_params: List[str] (optional)
+          A list of parameters for jobs with Spark JAR tasks, for example `"jar_params": ["john doe", "35"]`.
+          The parameters are used to invoke the main function of the main class specified in the Spark JAR
+          task. If not specified upon `run-now`, it defaults to an empty list. jar_params cannot be specified
+          in conjunction with notebook_params. The JSON representation of this field (for example
+          `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
+          
+          Use [Task parameter variables] to set parameters containing information about job runs.
+          
+          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
+        :param job_parameters: Dict[str,str] (optional)
+          Job-level parameters used in the run. for example `"param": "overriding_val"`
+        :param notebook_params: Dict[str,str] (optional)
+          A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
+          "john doe", "age": "35"}`. The map is passed to the notebook and is accessible through the
+          [dbutils.widgets.get] function.
+          
+          If not specified upon `run-now`, the triggered run uses the job’s base parameters.
+          
+          notebook_params cannot be specified in conjunction with jar_params.
+          
+          Use [Task parameter variables] to set parameters containing information about job runs.
+          
+          The JSON representation of this field (for example `{"notebook_params":{"name":"john
+          doe","age":"35"}}`) cannot exceed 10,000 bytes.
+          
+          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
+          [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
+        :param only: List[str] (optional)
+          A list of task keys to run inside of the job. If this field is not provided, all tasks in the job
+          will be run.
+        :param performance_target: :class:`PerformanceTarget` (optional)
+          PerformanceTarget defines how performant or cost efficient the execution of run on serverless
+          compute should be. For RunNow request, the run will execute with this settings instead of ones
+          defined in job.
+        :param pipeline_params: :class:`PipelineParams` (optional)
+          Controls whether the pipeline should perform a full refresh
+        :param python_named_params: Dict[str,str] (optional)
+        :param python_params: List[str] (optional)
+          A list of parameters for jobs with Python tasks, for example `"python_params": ["john doe", "35"]`.
+          The parameters are passed to Python file as command-line parameters. If specified upon `run-now`, it
+          would overwrite the parameters specified in job setting. The JSON representation of this field (for
+          example `{"python_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
+          
+          Use [Task parameter variables] to set parameters containing information about job runs.
+          
+          Important
+          
+          These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
+          returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
+          emojis.
+          
+          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
+        :param queue: :class:`QueueSettings` (optional)
+          The queue settings of the run.
+        :param spark_submit_params: List[str] (optional)
+          A list of parameters for jobs with spark submit task, for example `"spark_submit_params":
+          ["--class", "org.apache.spark.examples.SparkPi"]`. The parameters are passed to spark-submit script
+          as command-line parameters. If specified upon `run-now`, it would overwrite the parameters specified
+          in job setting. The JSON representation of this field (for example `{"python_params":["john
+          doe","35"]}`) cannot exceed 10,000 bytes.
+          
+          Use [Task parameter variables] to set parameters containing information about job runs
+          
+          Important
+          
+          These parameters accept only Latin characters (ASCII character set). Using non-ASCII characters
+          returns an error. Examples of invalid, non-ASCII characters are Chinese, Japanese kanjis, and
+          emojis.
+          
+          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
+        :param sql_params: Dict[str,str] (optional)
+          A map from keys to values for jobs with SQL task, for example `"sql_params": {"name": "john doe",
+          "age": "35"}`. The SQL alert task does not support custom parameters.
+        
+        :returns:
+          Long-running operation waiter for :class:`Run`.
+          See :method:wait_get_run_job_terminated_or_skipped for more details.
+        
 
     .. py:method:: run_now_and_wait(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], only: Optional[List[str]], performance_target: Optional[PerformanceTarget], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]], timeout: datetime.timedelta = 0:20:00]) -> Run
 
@@ -947,16 +949,16 @@ Run a job and return the `run_id` of the triggered run.
     .. py:method:: set_permissions(job_id: str [, access_control_list: Optional[List[JobAccessControlRequest]]]) -> JobPermissions
 
         Set job permissions.
-
-Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-permissions if none are specified. Objects can inherit permissions from their root object.
-
-:param job_id: str
-  The job for which to get or manage permissions.
-:param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
-
-:returns: :class:`JobPermissions`
-
+        
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
+        
+        :param job_id: str
+          The job for which to get or manage permissions.
+        :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
+        
+        :returns: :class:`JobPermissions`
+        
 
     .. py:method:: submit( [, access_control_list: Optional[List[JobAccessControlRequest]], budget_policy_id: Optional[str], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications]]) -> Wait[Run]
 
@@ -989,64 +991,64 @@ permissions if none are specified. Objects can inherit permissions from their ro
             w.jobs.delete_run(run_id=run.run_id)
 
         Create and trigger a one-time run.
-
-Submit a one-time run. This endpoint allows you to submit a workload directly without creating a job.
-Runs submitted using this endpoint don’t display in the UI. Use the `jobs/runs/get` API to check the
-run state after the job is submitted.
-
-:param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
-  List of permissions to set on the job.
-:param budget_policy_id: str (optional)
-  The user specified id of the budget policy to use for this one-time run. If not specified, the run
-  will be not be attributed to any budget policy.
-:param email_notifications: :class:`JobEmailNotifications` (optional)
-  An optional set of email addresses notified when the run begins or completes.
-:param environments: List[:class:`JobEnvironment`] (optional)
-  A list of task execution environment specifications that can be referenced by tasks of this run.
-:param git_source: :class:`GitSource` (optional)
-  An optional specification for a remote Git repository containing the source code used by tasks.
-  Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
-  
-  If `git_source` is set, these tasks retrieve the file from the remote repository by default.
-  However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.
-  
-  Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are
-  used, `git_source` must be defined on the job.
-:param health: :class:`JobsHealthRules` (optional)
-  An optional set of health rules that can be defined for this job.
-:param idempotency_token: str (optional)
-  An optional token that can be used to guarantee the idempotency of job run requests. If a run with
-  the provided token already exists, the request does not create a new run but returns the ID of the
-  existing run instead. If a run with the provided token is deleted, an error is returned.
-  
-  If you specify the idempotency token, upon failure you can retry until the request succeeds.
-  Databricks guarantees that exactly one run is launched with that idempotency token.
-  
-  This token must have at most 64 characters.
-  
-  For more information, see [How to ensure idempotency for jobs].
-  
-  [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html
-:param notification_settings: :class:`JobNotificationSettings` (optional)
-  Optional notification settings that are used when sending notifications to each of the
-  `email_notifications` and `webhook_notifications` for this run.
-:param queue: :class:`QueueSettings` (optional)
-  The queue settings of the one-time run.
-:param run_as: :class:`JobRunAs` (optional)
-  Specifies the user or service principal that the job runs as. If not specified, the job runs as the
-  user who submits the request.
-:param run_name: str (optional)
-  An optional name for the run. The default value is `Untitled`.
-:param tasks: List[:class:`SubmitTask`] (optional)
-:param timeout_seconds: int (optional)
-  An optional timeout applied to each run of this job. A value of `0` means no timeout.
-:param webhook_notifications: :class:`WebhookNotifications` (optional)
-  A collection of system notification IDs to notify when the run begins or completes.
-
-:returns:
-  Long-running operation waiter for :class:`Run`.
-  See :method:wait_get_run_job_terminated_or_skipped for more details.
-
+        
+        Submit a one-time run. This endpoint allows you to submit a workload directly without creating a job.
+        Runs submitted using this endpoint don’t display in the UI. Use the `jobs/runs/get` API to check the
+        run state after the job is submitted.
+        
+        :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
+          List of permissions to set on the job.
+        :param budget_policy_id: str (optional)
+          The user specified id of the budget policy to use for this one-time run. If not specified, the run
+          will be not be attributed to any budget policy.
+        :param email_notifications: :class:`JobEmailNotifications` (optional)
+          An optional set of email addresses notified when the run begins or completes.
+        :param environments: List[:class:`JobEnvironment`] (optional)
+          A list of task execution environment specifications that can be referenced by tasks of this run.
+        :param git_source: :class:`GitSource` (optional)
+          An optional specification for a remote Git repository containing the source code used by tasks.
+          Version-controlled source code is supported by notebook, dbt, Python script, and SQL File tasks.
+          
+          If `git_source` is set, these tasks retrieve the file from the remote repository by default.
+          However, this behavior can be overridden by setting `source` to `WORKSPACE` on the task.
+          
+          Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks are
+          used, `git_source` must be defined on the job.
+        :param health: :class:`JobsHealthRules` (optional)
+          An optional set of health rules that can be defined for this job.
+        :param idempotency_token: str (optional)
+          An optional token that can be used to guarantee the idempotency of job run requests. If a run with
+          the provided token already exists, the request does not create a new run but returns the ID of the
+          existing run instead. If a run with the provided token is deleted, an error is returned.
+          
+          If you specify the idempotency token, upon failure you can retry until the request succeeds.
+          Databricks guarantees that exactly one run is launched with that idempotency token.
+          
+          This token must have at most 64 characters.
+          
+          For more information, see [How to ensure idempotency for jobs].
+          
+          [How to ensure idempotency for jobs]: https://kb.databricks.com/jobs/jobs-idempotency.html
+        :param notification_settings: :class:`JobNotificationSettings` (optional)
+          Optional notification settings that are used when sending notifications to each of the
+          `email_notifications` and `webhook_notifications` for this run.
+        :param queue: :class:`QueueSettings` (optional)
+          The queue settings of the one-time run.
+        :param run_as: :class:`JobRunAs` (optional)
+          Specifies the user or service principal that the job runs as. If not specified, the job runs as the
+          user who submits the request.
+        :param run_name: str (optional)
+          An optional name for the run. The default value is `Untitled`.
+        :param tasks: List[:class:`SubmitTask`] (optional)
+        :param timeout_seconds: int (optional)
+          An optional timeout applied to each run of this job. A value of `0` means no timeout.
+        :param webhook_notifications: :class:`WebhookNotifications` (optional)
+          A collection of system notification IDs to notify when the run begins or completes.
+        
+        :returns:
+          Long-running operation waiter for :class:`Run`.
+          See :method:wait_get_run_job_terminated_or_skipped for more details.
+        
 
     .. py:method:: submit_and_wait( [, access_control_list: Optional[List[JobAccessControlRequest]], budget_policy_id: Optional[str], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], git_source: Optional[GitSource], health: Optional[JobsHealthRules], idempotency_token: Optional[str], notification_settings: Optional[JobNotificationSettings], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], run_name: Optional[str], tasks: Optional[List[SubmitTask]], timeout_seconds: Optional[int], webhook_notifications: Optional[WebhookNotifications], timeout: datetime.timedelta = 0:20:00]) -> Run
 
@@ -1088,41 +1090,41 @@ run state after the job is submitted.
             w.jobs.delete(job_id=created_job.job_id)
 
         Update job settings partially.
-
-Add, update, or remove specific settings of an existing job. Use the [_Reset_
-endpoint](:method:jobs/reset) to overwrite all job settings.
-
-:param job_id: int
-  The canonical identifier of the job to update. This field is required.
-:param fields_to_remove: List[str] (optional)
-  Remove top-level fields in the job settings. Removing nested fields is not supported, except for
-  tasks and job clusters (`tasks/task_1`). This field is optional.
-:param new_settings: :class:`JobSettings` (optional)
-  The new settings for the job.
-  
-  Top-level fields specified in `new_settings` are completely replaced, except for arrays which are
-  merged. That is, new and existing entries are completely replaced based on the respective key
-  fields, i.e. `task_key` or `job_cluster_key`, while previous entries are kept.
-  
-  Partially updating nested fields is not supported.
-  
-  Changes to the field `JobSettings.timeout_seconds` are applied to active runs. Changes to other
-  fields are applied to future runs only.
-
-
-
+        
+        Add, update, or remove specific settings of an existing job. Use the [_Reset_
+        endpoint](:method:jobs/reset) to overwrite all job settings.
+        
+        :param job_id: int
+          The canonical identifier of the job to update. This field is required.
+        :param fields_to_remove: List[str] (optional)
+          Remove top-level fields in the job settings. Removing nested fields is not supported, except for
+          tasks and job clusters (`tasks/task_1`). This field is optional.
+        :param new_settings: :class:`JobSettings` (optional)
+          The new settings for the job.
+          
+          Top-level fields specified in `new_settings` are completely replaced, except for arrays which are
+          merged. That is, new and existing entries are completely replaced based on the respective key
+          fields, i.e. `task_key` or `job_cluster_key`, while previous entries are kept.
+          
+          Partially updating nested fields is not supported.
+          
+          Changes to the field `JobSettings.timeout_seconds` are applied to active runs. Changes to other
+          fields are applied to future runs only.
+        
+        
+        
 
     .. py:method:: update_permissions(job_id: str [, access_control_list: Optional[List[JobAccessControlRequest]]]) -> JobPermissions
 
         Update job permissions.
-
-Updates the permissions on a job. Jobs can inherit permissions from their root object.
-
-:param job_id: str
-  The job for which to get or manage permissions.
-:param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
-
-:returns: :class:`JobPermissions`
-
+        
+        Updates the permissions on a job. Jobs can inherit permissions from their root object.
+        
+        :param job_id: str
+          The job for which to get or manage permissions.
+        :param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
+        
+        :returns: :class:`JobPermissions`
+        
 
     .. py:method:: wait_get_run_job_terminated_or_skipped(run_id: int, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[Run], None]]) -> Run
diff --git a/docs/workspace/jobs/policy_compliance_for_jobs.rst b/docs/workspace/jobs/policy_compliance_for_jobs.rst
index b4326e9e2..69f211552 100644
--- a/docs/workspace/jobs/policy_compliance_for_jobs.rst
+++ b/docs/workspace/jobs/policy_compliance_for_jobs.rst
@@ -5,61 +5,62 @@
 .. py:class:: PolicyComplianceForJobsAPI
 
     The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace.
-This API currently only supports compliance controls for cluster policies.
-
-A job is in compliance if its cluster configurations satisfy the rules of all their respective cluster
-policies. A job could be out of compliance if a cluster policy it uses was updated after the job was last
-edited. The job is considered out of compliance if any of its clusters no longer comply with their updated
-policies.
-
-The get and list compliance APIs allow you to view the policy compliance status of a job. The enforce
-compliance API allows you to update a job so that it becomes compliant with all of its policies.
+    This API currently only supports compliance controls for cluster policies.
+    
+    A job is in compliance if its cluster configurations satisfy the rules of all their respective cluster
+    policies. A job could be out of compliance if a cluster policy it uses was updated after the job was last
+    edited. The job is considered out of compliance if any of its clusters no longer comply with their updated
+    policies.
+    
+    The get and list compliance APIs allow you to view the policy compliance status of a job. The enforce
+    compliance API allows you to update a job so that it becomes compliant with all of its policies.
 
     .. py:method:: enforce_compliance(job_id: int [, validate_only: Optional[bool]]) -> EnforcePolicyComplianceResponse
 
         Enforce job policy compliance.
-
-Updates a job so the job clusters that are created when running the job (specified in `new_cluster`)
-are compliant with the current versions of their respective cluster policies. All-purpose clusters
-used in the job will not be updated.
-
-:param job_id: int
-  The ID of the job you want to enforce policy compliance on.
-:param validate_only: bool (optional)
-  If set, previews changes made to the job to comply with its policy, but does not update the job.
-
-:returns: :class:`EnforcePolicyComplianceResponse`
-
+        
+        Updates a job so the job clusters that are created when running the job (specified in `new_cluster`)
+        are compliant with the current versions of their respective cluster policies. All-purpose clusters
+        used in the job will not be updated.
+        
+        :param job_id: int
+          The ID of the job you want to enforce policy compliance on.
+        :param validate_only: bool (optional)
+          If set, previews changes made to the job to comply with its policy, but does not update the job.
+        
+        :returns: :class:`EnforcePolicyComplianceResponse`
+        
 
     .. py:method:: get_compliance(job_id: int) -> GetPolicyComplianceResponse
 
         Get job policy compliance.
-
-Returns the policy compliance status of a job. Jobs could be out of compliance if a cluster policy
-they use was updated after the job was last edited and some of its job clusters no longer comply with
-their updated policies.
-
-:param job_id: int
-  The ID of the job whose compliance status you are requesting.
-
-:returns: :class:`GetPolicyComplianceResponse`
-
+        
+        Returns the policy compliance status of a job. Jobs could be out of compliance if a cluster policy
+        they use was updated after the job was last edited and some of its job clusters no longer comply with
+        their updated policies.
+        
+        :param job_id: int
+          The ID of the job whose compliance status you are requesting.
+        
+        :returns: :class:`GetPolicyComplianceResponse`
+        
 
     .. py:method:: list_compliance(policy_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[JobCompliance]
 
         List job policy compliance.
-
-Returns the policy compliance status of all jobs that use a given policy. Jobs could be out of
-compliance if a cluster policy they use was updated after the job was last edited and its job clusters
-no longer comply with the updated policy.
-
-:param policy_id: str
-  Canonical unique identifier for the cluster policy.
-:param page_size: int (optional)
-  Use this field to specify the maximum number of results to be returned by the server. The server may
-  further constrain the maximum number of results returned in a single page.
-:param page_token: str (optional)
-  A page token that can be used to navigate to the next page or previous page as returned by
-  `next_page_token` or `prev_page_token`.
-
-:returns: Iterator over :class:`JobCompliance`
+        
+        Returns the policy compliance status of all jobs that use a given policy. Jobs could be out of
+        compliance if a cluster policy they use was updated after the job was last edited and its job clusters
+        no longer comply with the updated policy.
+        
+        :param policy_id: str
+          Canonical unique identifier for the cluster policy.
+        :param page_size: int (optional)
+          Use this field to specify the maximum number of results to be returned by the server. The server may
+          further constrain the maximum number of results returned in a single page.
+        :param page_token: str (optional)
+          A page token that can be used to navigate to the next page or previous page as returned by
+          `next_page_token` or `prev_page_token`.
+        
+        :returns: Iterator over :class:`JobCompliance`
+        
\ No newline at end of file
diff --git a/docs/workspace/marketplace/consumer_fulfillments.rst b/docs/workspace/marketplace/consumer_fulfillments.rst
index 5833c3fac..4ea7a9c29 100644
--- a/docs/workspace/marketplace/consumer_fulfillments.rst
+++ b/docs/workspace/marketplace/consumer_fulfillments.rst
@@ -9,27 +9,28 @@
     .. py:method:: get(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[SharedDataObject]
 
         Get listing content metadata.
-
-Get a high level preview of the metadata of listing installable content.
-
-:param listing_id: str
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`SharedDataObject`
-
+        
+        Get a high level preview of the metadata of listing installable content.
+        
+        :param listing_id: str
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`SharedDataObject`
+        
 
     .. py:method:: list(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListingFulfillment]
 
         List all listing fulfillments.
-
-Get all listings fulfillments associated with a listing. A _fulfillment_ is a potential installation.
-Standard installations contain metadata about the attached share or git repo. Only one of these fields
-will be present. Personalized installations contain metadata about the attached share or git repo, as
-well as the Delta Sharing recipient type.
-
-:param listing_id: str
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`ListingFulfillment`
+        
+        Get all listings fulfillments associated with a listing. A _fulfillment_ is a potential installation.
+        Standard installations contain metadata about the attached share or git repo. Only one of these fields
+        will be present. Personalized installations contain metadata about the attached share or git repo, as
+        well as the Delta Sharing recipient type.
+        
+        :param listing_id: str
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`ListingFulfillment`
+        
\ No newline at end of file
diff --git a/docs/workspace/marketplace/consumer_installations.rst b/docs/workspace/marketplace/consumer_installations.rst
index 363d90655..3cdb00a5a 100644
--- a/docs/workspace/marketplace/consumer_installations.rst
+++ b/docs/workspace/marketplace/consumer_installations.rst
@@ -9,69 +9,70 @@
     .. py:method:: create(listing_id: str [, accepted_consumer_terms: Optional[ConsumerTerms], catalog_name: Optional[str], recipient_type: Optional[DeltaSharingRecipientType], repo_detail: Optional[RepoInstallation], share_name: Optional[str]]) -> Installation
 
         Install from a listing.
-
-Install payload associated with a Databricks Marketplace listing.
-
-:param listing_id: str
-:param accepted_consumer_terms: :class:`ConsumerTerms` (optional)
-:param catalog_name: str (optional)
-:param recipient_type: :class:`DeltaSharingRecipientType` (optional)
-:param repo_detail: :class:`RepoInstallation` (optional)
-  for git repo installations
-:param share_name: str (optional)
-
-:returns: :class:`Installation`
-
+        
+        Install payload associated with a Databricks Marketplace listing.
+        
+        :param listing_id: str
+        :param accepted_consumer_terms: :class:`ConsumerTerms` (optional)
+        :param catalog_name: str (optional)
+        :param recipient_type: :class:`DeltaSharingRecipientType` (optional)
+        :param repo_detail: :class:`RepoInstallation` (optional)
+          for git repo installations
+        :param share_name: str (optional)
+        
+        :returns: :class:`Installation`
+        
 
     .. py:method:: delete(listing_id: str, installation_id: str)
 
         Uninstall from a listing.
-
-Uninstall an installation associated with a Databricks Marketplace listing.
-
-:param listing_id: str
-:param installation_id: str
-
-
-
+        
+        Uninstall an installation associated with a Databricks Marketplace listing.
+        
+        :param listing_id: str
+        :param installation_id: str
+        
+        
+        
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[InstallationDetail]
 
         List all installations.
-
-List all installations across all listings.
-
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`InstallationDetail`
-
+        
+        List all installations across all listings.
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`InstallationDetail`
+        
 
     .. py:method:: list_listing_installations(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[InstallationDetail]
 
         List installations for a listing.
-
-List all installations for a particular listing.
-
-:param listing_id: str
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`InstallationDetail`
-
+        
+        List all installations for a particular listing.
+        
+        :param listing_id: str
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`InstallationDetail`
+        
 
     .. py:method:: update(listing_id: str, installation_id: str, installation: InstallationDetail [, rotate_token: Optional[bool]]) -> UpdateInstallationResponse
 
         Update an installation.
-
-This is a update API that will update the part of the fields defined in the installation table as well
-as interact with external services according to the fields not included in the installation table 1.
-the token will be rotate if the rotateToken flag is true 2. the token will be forcibly rotate if the
-rotateToken flag is true and the tokenInfo field is empty
-
-:param listing_id: str
-:param installation_id: str
-:param installation: :class:`InstallationDetail`
-:param rotate_token: bool (optional)
-
-:returns: :class:`UpdateInstallationResponse`
+        
+        This is a update API that will update the part of the fields defined in the installation table as well
+        as interact with external services according to the fields not included in the installation table 1.
+        the token will be rotate if the rotateToken flag is true 2. the token will be forcibly rotate if the
+        rotateToken flag is true and the tokenInfo field is empty
+        
+        :param listing_id: str
+        :param installation_id: str
+        :param installation: :class:`InstallationDetail`
+        :param rotate_token: bool (optional)
+        
+        :returns: :class:`UpdateInstallationResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/marketplace/consumer_listings.rst b/docs/workspace/marketplace/consumer_listings.rst
index 9dff6a54e..242a8fce7 100644
--- a/docs/workspace/marketplace/consumer_listings.rst
+++ b/docs/workspace/marketplace/consumer_listings.rst
@@ -5,74 +5,75 @@
 .. py:class:: ConsumerListingsAPI
 
     Listings are the core entities in the Marketplace. They represent the products that are available for
-consumption.
+    consumption.
 
     .. py:method:: batch_get( [, ids: Optional[List[str]]]) -> BatchGetListingsResponse
 
         Get one batch of listings. One may specify up to 50 IDs per request.
-
-Batch get a published listing in the Databricks Marketplace that the consumer has access to.
-
-:param ids: List[str] (optional)
-
-:returns: :class:`BatchGetListingsResponse`
-
+        
+        Batch get a published listing in the Databricks Marketplace that the consumer has access to.
+        
+        :param ids: List[str] (optional)
+        
+        :returns: :class:`BatchGetListingsResponse`
+        
 
     .. py:method:: get(id: str) -> GetListingResponse
 
         Get listing.
-
-Get a published listing in the Databricks Marketplace that the consumer has access to.
-
-:param id: str
-
-:returns: :class:`GetListingResponse`
-
+        
+        Get a published listing in the Databricks Marketplace that the consumer has access to.
+        
+        :param id: str
+        
+        :returns: :class:`GetListingResponse`
+        
 
     .. py:method:: list( [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_free: Optional[bool], is_private_exchange: Optional[bool], is_staff_pick: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]], tags: Optional[List[ListingTag]]]) -> Iterator[Listing]
 
         List listings.
-
-List all published listings in the Databricks Marketplace that the consumer has access to.
-
-:param assets: List[:class:`AssetType`] (optional)
-  Matches any of the following asset types
-:param categories: List[:class:`Category`] (optional)
-  Matches any of the following categories
-:param is_free: bool (optional)
-  Filters each listing based on if it is free.
-:param is_private_exchange: bool (optional)
-  Filters each listing based on if it is a private exchange.
-:param is_staff_pick: bool (optional)
-  Filters each listing based on whether it is a staff pick.
-:param page_size: int (optional)
-:param page_token: str (optional)
-:param provider_ids: List[str] (optional)
-  Matches any of the following provider ids
-:param tags: List[:class:`ListingTag`] (optional)
-  Matches any of the following tags
-
-:returns: Iterator over :class:`Listing`
-
+        
+        List all published listings in the Databricks Marketplace that the consumer has access to.
+        
+        :param assets: List[:class:`AssetType`] (optional)
+          Matches any of the following asset types
+        :param categories: List[:class:`Category`] (optional)
+          Matches any of the following categories
+        :param is_free: bool (optional)
+          Filters each listing based on if it is free.
+        :param is_private_exchange: bool (optional)
+          Filters each listing based on if it is a private exchange.
+        :param is_staff_pick: bool (optional)
+          Filters each listing based on whether it is a staff pick.
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        :param provider_ids: List[str] (optional)
+          Matches any of the following provider ids
+        :param tags: List[:class:`ListingTag`] (optional)
+          Matches any of the following tags
+        
+        :returns: Iterator over :class:`Listing`
+        
 
     .. py:method:: search(query: str [, assets: Optional[List[AssetType]], categories: Optional[List[Category]], is_free: Optional[bool], is_private_exchange: Optional[bool], page_size: Optional[int], page_token: Optional[str], provider_ids: Optional[List[str]]]) -> Iterator[Listing]
 
         Search listings.
-
-Search published listings in the Databricks Marketplace that the consumer has access to. This query
-supports a variety of different search parameters and performs fuzzy matching.
-
-:param query: str
-  Fuzzy matches query
-:param assets: List[:class:`AssetType`] (optional)
-  Matches any of the following asset types
-:param categories: List[:class:`Category`] (optional)
-  Matches any of the following categories
-:param is_free: bool (optional)
-:param is_private_exchange: bool (optional)
-:param page_size: int (optional)
-:param page_token: str (optional)
-:param provider_ids: List[str] (optional)
-  Matches any of the following provider ids
-
-:returns: Iterator over :class:`Listing`
+        
+        Search published listings in the Databricks Marketplace that the consumer has access to. This query
+        supports a variety of different search parameters and performs fuzzy matching.
+        
+        :param query: str
+          Fuzzy matches query
+        :param assets: List[:class:`AssetType`] (optional)
+          Matches any of the following asset types
+        :param categories: List[:class:`Category`] (optional)
+          Matches any of the following categories
+        :param is_free: bool (optional)
+        :param is_private_exchange: bool (optional)
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        :param provider_ids: List[str] (optional)
+          Matches any of the following provider ids
+        
+        :returns: Iterator over :class:`Listing`
+        
\ No newline at end of file
diff --git a/docs/workspace/marketplace/consumer_personalization_requests.rst b/docs/workspace/marketplace/consumer_personalization_requests.rst
index e732113ff..63ead75d3 100644
--- a/docs/workspace/marketplace/consumer_personalization_requests.rst
+++ b/docs/workspace/marketplace/consumer_personalization_requests.rst
@@ -9,41 +9,42 @@
     .. py:method:: create(listing_id: str, intended_use: str, accepted_consumer_terms: ConsumerTerms [, comment: Optional[str], company: Optional[str], first_name: Optional[str], is_from_lighthouse: Optional[bool], last_name: Optional[str], recipient_type: Optional[DeltaSharingRecipientType]]) -> CreatePersonalizationRequestResponse
 
         Create a personalization request.
-
-Create a personalization request for a listing.
-
-:param listing_id: str
-:param intended_use: str
-:param accepted_consumer_terms: :class:`ConsumerTerms`
-:param comment: str (optional)
-:param company: str (optional)
-:param first_name: str (optional)
-:param is_from_lighthouse: bool (optional)
-:param last_name: str (optional)
-:param recipient_type: :class:`DeltaSharingRecipientType` (optional)
-
-:returns: :class:`CreatePersonalizationRequestResponse`
-
+        
+        Create a personalization request for a listing.
+        
+        :param listing_id: str
+        :param intended_use: str
+        :param accepted_consumer_terms: :class:`ConsumerTerms`
+        :param comment: str (optional)
+        :param company: str (optional)
+        :param first_name: str (optional)
+        :param is_from_lighthouse: bool (optional)
+        :param last_name: str (optional)
+        :param recipient_type: :class:`DeltaSharingRecipientType` (optional)
+        
+        :returns: :class:`CreatePersonalizationRequestResponse`
+        
 
     .. py:method:: get(listing_id: str) -> GetPersonalizationRequestResponse
 
         Get the personalization request for a listing.
-
-Get the personalization request for a listing. Each consumer can make at *most* one personalization
-request for a listing.
-
-:param listing_id: str
-
-:returns: :class:`GetPersonalizationRequestResponse`
-
+        
+        Get the personalization request for a listing. Each consumer can make at *most* one personalization
+        request for a listing.
+        
+        :param listing_id: str
+        
+        :returns: :class:`GetPersonalizationRequestResponse`
+        
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[PersonalizationRequest]
 
         List all personalization requests.
-
-List personalization requests for a consumer across all listings.
-
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`PersonalizationRequest`
+        
+        List personalization requests for a consumer across all listings.
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`PersonalizationRequest`
+        
\ No newline at end of file
diff --git a/docs/workspace/marketplace/consumer_providers.rst b/docs/workspace/marketplace/consumer_providers.rst
index f6cc1d770..13cca357e 100644
--- a/docs/workspace/marketplace/consumer_providers.rst
+++ b/docs/workspace/marketplace/consumer_providers.rst
@@ -9,33 +9,34 @@
     .. py:method:: batch_get( [, ids: Optional[List[str]]]) -> BatchGetProvidersResponse
 
         Get one batch of providers. One may specify up to 50 IDs per request.
-
-Batch get a provider in the Databricks Marketplace with at least one visible listing.
-
-:param ids: List[str] (optional)
-
-:returns: :class:`BatchGetProvidersResponse`
-
+        
+        Batch get a provider in the Databricks Marketplace with at least one visible listing.
+        
+        :param ids: List[str] (optional)
+        
+        :returns: :class:`BatchGetProvidersResponse`
+        
 
     .. py:method:: get(id: str) -> GetProviderResponse
 
         Get a provider.
-
-Get a provider in the Databricks Marketplace with at least one visible listing.
-
-:param id: str
-
-:returns: :class:`GetProviderResponse`
-
+        
+        Get a provider in the Databricks Marketplace with at least one visible listing.
+        
+        :param id: str
+        
+        :returns: :class:`GetProviderResponse`
+        
 
     .. py:method:: list( [, is_featured: Optional[bool], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderInfo]
 
         List providers.
-
-List all providers in the Databricks Marketplace with at least one visible listing.
-
-:param is_featured: bool (optional)
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`ProviderInfo`
+        
+        List all providers in the Databricks Marketplace with at least one visible listing.
+        
+        :param is_featured: bool (optional)
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`ProviderInfo`
+        
\ No newline at end of file
diff --git a/docs/workspace/marketplace/provider_exchange_filters.rst b/docs/workspace/marketplace/provider_exchange_filters.rst
index 3d3becc67..ceca51e63 100644
--- a/docs/workspace/marketplace/provider_exchange_filters.rst
+++ b/docs/workspace/marketplace/provider_exchange_filters.rst
@@ -9,45 +9,46 @@
     .. py:method:: create(filter: ExchangeFilter) -> CreateExchangeFilterResponse
 
         Create a new exchange filter.
-
-Add an exchange filter.
-
-:param filter: :class:`ExchangeFilter`
-
-:returns: :class:`CreateExchangeFilterResponse`
-
+        
+        Add an exchange filter.
+        
+        :param filter: :class:`ExchangeFilter`
+        
+        :returns: :class:`CreateExchangeFilterResponse`
+        
 
     .. py:method:: delete(id: str)
 
         Delete an exchange filter.
-
-Delete an exchange filter
-
-:param id: str
-
-
-
+        
+        Delete an exchange filter
+        
+        :param id: str
+        
+        
+        
 
     .. py:method:: list(exchange_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ExchangeFilter]
 
         List exchange filters.
-
-List exchange filter
-
-:param exchange_id: str
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`ExchangeFilter`
-
+        
+        List exchange filter
+        
+        :param exchange_id: str
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`ExchangeFilter`
+        
 
     .. py:method:: update(id: str, filter: ExchangeFilter) -> UpdateExchangeFilterResponse
 
         Update exchange filter.
-
-Update an exchange filter.
-
-:param id: str
-:param filter: :class:`ExchangeFilter`
-
-:returns: :class:`UpdateExchangeFilterResponse`
+        
+        Update an exchange filter.
+        
+        :param id: str
+        :param filter: :class:`ExchangeFilter`
+        
+        :returns: :class:`UpdateExchangeFilterResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/marketplace/provider_exchanges.rst b/docs/workspace/marketplace/provider_exchanges.rst
index 6c5eda159..d53fd823d 100644
--- a/docs/workspace/marketplace/provider_exchanges.rst
+++ b/docs/workspace/marketplace/provider_exchanges.rst
@@ -9,104 +9,105 @@
     .. py:method:: add_listing_to_exchange(listing_id: str, exchange_id: str) -> AddExchangeForListingResponse
 
         Add an exchange for listing.
-
-Associate an exchange with a listing
-
-:param listing_id: str
-:param exchange_id: str
-
-:returns: :class:`AddExchangeForListingResponse`
-
+        
+        Associate an exchange with a listing
+        
+        :param listing_id: str
+        :param exchange_id: str
+        
+        :returns: :class:`AddExchangeForListingResponse`
+        
 
     .. py:method:: create(exchange: Exchange) -> CreateExchangeResponse
 
         Create an exchange.
-
-Create an exchange
-
-:param exchange: :class:`Exchange`
-
-:returns: :class:`CreateExchangeResponse`
-
+        
+        Create an exchange
+        
+        :param exchange: :class:`Exchange`
+        
+        :returns: :class:`CreateExchangeResponse`
+        
 
     .. py:method:: delete(id: str)
 
         Delete an exchange.
-
-This removes a listing from marketplace.
-
-:param id: str
-
-
-
+        
+        This removes a listing from marketplace.
+        
+        :param id: str
+        
+        
+        
 
     .. py:method:: delete_listing_from_exchange(id: str)
 
         Remove an exchange for listing.
-
-Disassociate an exchange with a listing
-
-:param id: str
-
-
-
+        
+        Disassociate an exchange with a listing
+        
+        :param id: str
+        
+        
+        
 
     .. py:method:: get(id: str) -> GetExchangeResponse
 
         Get an exchange.
-
-Get an exchange.
-
-:param id: str
-
-:returns: :class:`GetExchangeResponse`
-
+        
+        Get an exchange.
+        
+        :param id: str
+        
+        :returns: :class:`GetExchangeResponse`
+        
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Exchange]
 
         List exchanges.
-
-List exchanges visible to provider
-
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`Exchange`
-
+        
+        List exchanges visible to provider
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`Exchange`
+        
 
     .. py:method:: list_exchanges_for_listing(listing_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ExchangeListing]
 
         List exchanges for listing.
-
-List exchanges associated with a listing
-
-:param listing_id: str
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`ExchangeListing`
-
+        
+        List exchanges associated with a listing
+        
+        :param listing_id: str
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`ExchangeListing`
+        
 
     .. py:method:: list_listings_for_exchange(exchange_id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ExchangeListing]
 
         List listings for exchange.
-
-List listings associated with an exchange
-
-:param exchange_id: str
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`ExchangeListing`
-
+        
+        List listings associated with an exchange
+        
+        :param exchange_id: str
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`ExchangeListing`
+        
 
     .. py:method:: update(id: str, exchange: Exchange) -> UpdateExchangeResponse
 
         Update exchange.
-
-Update an exchange
-
-:param id: str
-:param exchange: :class:`Exchange`
-
-:returns: :class:`UpdateExchangeResponse`
+        
+        Update an exchange
+        
+        :param id: str
+        :param exchange: :class:`Exchange`
+        
+        :returns: :class:`UpdateExchangeResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/marketplace/provider_files.rst b/docs/workspace/marketplace/provider_files.rst
index b71865e30..f719ca65f 100644
--- a/docs/workspace/marketplace/provider_files.rst
+++ b/docs/workspace/marketplace/provider_files.rst
@@ -9,47 +9,48 @@
     .. py:method:: create(file_parent: FileParent, marketplace_file_type: MarketplaceFileType, mime_type: str [, display_name: Optional[str]]) -> CreateFileResponse
 
         Create a file.
-
-Create a file. Currently, only provider icons and attached notebooks are supported.
-
-:param file_parent: :class:`FileParent`
-:param marketplace_file_type: :class:`MarketplaceFileType`
-:param mime_type: str
-:param display_name: str (optional)
-
-:returns: :class:`CreateFileResponse`
-
+        
+        Create a file. Currently, only provider icons and attached notebooks are supported.
+        
+        :param file_parent: :class:`FileParent`
+        :param marketplace_file_type: :class:`MarketplaceFileType`
+        :param mime_type: str
+        :param display_name: str (optional)
+        
+        :returns: :class:`CreateFileResponse`
+        
 
     .. py:method:: delete(file_id: str)
 
         Delete a file.
-
-Delete a file
-
-:param file_id: str
-
-
-
+        
+        Delete a file
+        
+        :param file_id: str
+        
+        
+        
 
     .. py:method:: get(file_id: str) -> GetFileResponse
 
         Get a file.
-
-Get a file
-
-:param file_id: str
-
-:returns: :class:`GetFileResponse`
-
+        
+        Get a file
+        
+        :param file_id: str
+        
+        :returns: :class:`GetFileResponse`
+        
 
     .. py:method:: list(file_parent: FileParent [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FileInfo]
 
         List files.
-
-List files attached to a parent entity.
-
-:param file_parent: :class:`FileParent`
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`FileInfo`
+        
+        List files attached to a parent entity.
+        
+        :param file_parent: :class:`FileParent`
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`FileInfo`
+        
\ No newline at end of file
diff --git a/docs/workspace/marketplace/provider_listings.rst b/docs/workspace/marketplace/provider_listings.rst
index 7b96689a8..d26c5293e 100644
--- a/docs/workspace/marketplace/provider_listings.rst
+++ b/docs/workspace/marketplace/provider_listings.rst
@@ -5,60 +5,61 @@
 .. py:class:: ProviderListingsAPI
 
     Listings are the core entities in the Marketplace. They represent the products that are available for
-consumption.
+    consumption.
 
     .. py:method:: create(listing: Listing) -> CreateListingResponse
 
         Create a listing.
-
-Create a new listing
-
-:param listing: :class:`Listing`
-
-:returns: :class:`CreateListingResponse`
-
+        
+        Create a new listing
+        
+        :param listing: :class:`Listing`
+        
+        :returns: :class:`CreateListingResponse`
+        
 
     .. py:method:: delete(id: str)
 
         Delete a listing.
-
-Delete a listing
-
-:param id: str
-
-
-
+        
+        Delete a listing
+        
+        :param id: str
+        
+        
+        
 
     .. py:method:: get(id: str) -> GetListingResponse
 
         Get a listing.
-
-Get a listing
-
-:param id: str
-
-:returns: :class:`GetListingResponse`
-
+        
+        Get a listing
+        
+        :param id: str
+        
+        :returns: :class:`GetListingResponse`
+        
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Listing]
 
         List listings.
-
-List listings owned by this provider
-
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`Listing`
-
+        
+        List listings owned by this provider
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`Listing`
+        
 
     .. py:method:: update(id: str, listing: Listing) -> UpdateListingResponse
 
         Update listing.
-
-Update a listing
-
-:param id: str
-:param listing: :class:`Listing`
-
-:returns: :class:`UpdateListingResponse`
+        
+        Update a listing
+        
+        :param id: str
+        :param listing: :class:`Listing`
+        
+        :returns: :class:`UpdateListingResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/marketplace/provider_personalization_requests.rst b/docs/workspace/marketplace/provider_personalization_requests.rst
index ba896ce96..32cdbdbb3 100644
--- a/docs/workspace/marketplace/provider_personalization_requests.rst
+++ b/docs/workspace/marketplace/provider_personalization_requests.rst
@@ -5,31 +5,32 @@
 .. py:class:: ProviderPersonalizationRequestsAPI
 
     Personalization requests are an alternate to instantly available listings. Control the lifecycle of
-personalized solutions.
+    personalized solutions.
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[PersonalizationRequest]
 
         All personalization requests across all listings.
-
-List personalization requests to this provider. This will return all personalization requests,
-regardless of which listing they are for.
-
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`PersonalizationRequest`
-
+        
+        List personalization requests to this provider. This will return all personalization requests,
+        regardless of which listing they are for.
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`PersonalizationRequest`
+        
 
     .. py:method:: update(listing_id: str, request_id: str, status: PersonalizationRequestStatus [, reason: Optional[str], share: Optional[ShareInfo]]) -> UpdatePersonalizationRequestResponse
 
         Update personalization request status.
-
-Update personalization request. This method only permits updating the status of the request.
-
-:param listing_id: str
-:param request_id: str
-:param status: :class:`PersonalizationRequestStatus`
-:param reason: str (optional)
-:param share: :class:`ShareInfo` (optional)
-
-:returns: :class:`UpdatePersonalizationRequestResponse`
+        
+        Update personalization request. This method only permits updating the status of the request.
+        
+        :param listing_id: str
+        :param request_id: str
+        :param status: :class:`PersonalizationRequestStatus`
+        :param reason: str (optional)
+        :param share: :class:`ShareInfo` (optional)
+        
+        :returns: :class:`UpdatePersonalizationRequestResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst b/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst
index 4ddee879a..cc29e089f 100644
--- a/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst
+++ b/docs/workspace/marketplace/provider_provider_analytics_dashboards.rst
@@ -9,41 +9,42 @@
     .. py:method:: create() -> ProviderAnalyticsDashboard
 
         Create provider analytics dashboard.
-
-Create provider analytics dashboard. Returns Marketplace specific `id`. Not to be confused with the
-Lakeview dashboard id.
-
-:returns: :class:`ProviderAnalyticsDashboard`
-
+        
+        Create provider analytics dashboard. Returns Marketplace specific `id`. Not to be confused with the
+        Lakeview dashboard id.
+        
+        :returns: :class:`ProviderAnalyticsDashboard`
+        
 
     .. py:method:: get() -> ListProviderAnalyticsDashboardResponse
 
         Get provider analytics dashboard.
-
-Get provider analytics dashboard.
-
-:returns: :class:`ListProviderAnalyticsDashboardResponse`
-
+        
+        Get provider analytics dashboard.
+        
+        :returns: :class:`ListProviderAnalyticsDashboardResponse`
+        
 
     .. py:method:: get_latest_version() -> GetLatestVersionProviderAnalyticsDashboardResponse
 
         Get latest version of provider analytics dashboard.
-
-Get latest version of provider analytics dashboard.
-
-:returns: :class:`GetLatestVersionProviderAnalyticsDashboardResponse`
-
+        
+        Get latest version of provider analytics dashboard.
+        
+        :returns: :class:`GetLatestVersionProviderAnalyticsDashboardResponse`
+        
 
     .. py:method:: update(id: str [, version: Optional[int]]) -> UpdateProviderAnalyticsDashboardResponse
 
         Update provider analytics dashboard.
-
-Update provider analytics dashboard.
-
-:param id: str
-  id is immutable property and can't be updated.
-:param version: int (optional)
-  this is the version of the dashboard template we want to update our user to current expectation is
-  that it should be equal to latest version of the dashboard template
-
-:returns: :class:`UpdateProviderAnalyticsDashboardResponse`
+        
+        Update provider analytics dashboard.
+        
+        :param id: str
+          id is immutable property and can't be updated.
+        :param version: int (optional)
+          this is the version of the dashboard template we want to update our user to current expectation is
+          that it should be equal to latest version of the dashboard template
+        
+        :returns: :class:`UpdateProviderAnalyticsDashboardResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/marketplace/provider_providers.rst b/docs/workspace/marketplace/provider_providers.rst
index 61ea4d966..610c9602e 100644
--- a/docs/workspace/marketplace/provider_providers.rst
+++ b/docs/workspace/marketplace/provider_providers.rst
@@ -9,55 +9,56 @@
     .. py:method:: create(provider: ProviderInfo) -> CreateProviderResponse
 
         Create a provider.
-
-Create a provider
-
-:param provider: :class:`ProviderInfo`
-
-:returns: :class:`CreateProviderResponse`
-
+        
+        Create a provider
+        
+        :param provider: :class:`ProviderInfo`
+        
+        :returns: :class:`CreateProviderResponse`
+        
 
     .. py:method:: delete(id: str)
 
         Delete provider.
-
-Delete provider
-
-:param id: str
-
-
-
+        
+        Delete provider
+        
+        :param id: str
+        
+        
+        
 
     .. py:method:: get(id: str) -> GetProviderResponse
 
         Get provider.
-
-Get provider profile
-
-:param id: str
-
-:returns: :class:`GetProviderResponse`
-
+        
+        Get provider profile
+        
+        :param id: str
+        
+        :returns: :class:`GetProviderResponse`
+        
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderInfo]
 
         List providers.
-
-List provider profiles for account.
-
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`ProviderInfo`
-
+        
+        List provider profiles for account.
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`ProviderInfo`
+        
 
     .. py:method:: update(id: str, provider: ProviderInfo) -> UpdateProviderResponse
 
         Update provider.
-
-Update provider profile
-
-:param id: str
-:param provider: :class:`ProviderInfo`
-
-:returns: :class:`UpdateProviderResponse`
+        
+        Update provider profile
+        
+        :param id: str
+        :param provider: :class:`ProviderInfo`
+        
+        :returns: :class:`UpdateProviderResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/ml/experiments.rst b/docs/workspace/ml/experiments.rst
index 386395493..44ceeef8c 100644
--- a/docs/workspace/ml/experiments.rst
+++ b/docs/workspace/ml/experiments.rst
@@ -5,11 +5,11 @@
 .. py:class:: ExperimentsAPI
 
     Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment. Each
-experiment lets you visualize, search, and compare runs, as well as download run artifacts or metadata for
-analysis in other tools. Experiments are maintained in a Databricks hosted MLflow tracking server.
-
-Experiments are located in the workspace file tree. You manage experiments using the same tools you use to
-manage other workspace objects such as folders, notebooks, and libraries.
+    experiment lets you visualize, search, and compare runs, as well as download run artifacts or metadata for
+    analysis in other tools. Experiments are maintained in a Databricks hosted MLflow tracking server.
+    
+    Experiments are located in the workspace file tree. You manage experiments using the same tools you use to
+    manage other workspace objects such as folders, notebooks, and libraries.
 
     .. py:method:: create_experiment(name: str [, artifact_location: Optional[str], tags: Optional[List[ExperimentTag]]]) -> CreateExperimentResponse
 
@@ -30,26 +30,26 @@ manage other workspace objects such as folders, notebooks, and libraries.
             w.experiments.delete_experiment(experiment_id=experiment.experiment_id)
 
         Create experiment.
-
-Creates an experiment with a name. Returns the ID of the newly created experiment. Validates that
-another experiment with the same name does not already exist and fails if another experiment with the
-same name already exists.
-
-Throws `RESOURCE_ALREADY_EXISTS` if a experiment with the given name exists.
-
-:param name: str
-  Experiment name.
-:param artifact_location: str (optional)
-  Location where all artifacts for the experiment are stored. If not provided, the remote server will
-  select an appropriate default.
-:param tags: List[:class:`ExperimentTag`] (optional)
-  A collection of tags to set on the experiment. Maximum tag size and number of tags per request
-  depends on the storage backend. All storage backends are guaranteed to support tag keys up to 250
-  bytes in size and tag values up to 5000 bytes in size. All storage backends are also guaranteed to
-  support up to 20 tags per request.
-
-:returns: :class:`CreateExperimentResponse`
-
+        
+        Creates an experiment with a name. Returns the ID of the newly created experiment. Validates that
+        another experiment with the same name does not already exist and fails if another experiment with the
+        same name already exists.
+        
+        Throws `RESOURCE_ALREADY_EXISTS` if a experiment with the given name exists.
+        
+        :param name: str
+          Experiment name.
+        :param artifact_location: str (optional)
+          Location where all artifacts for the experiment are stored. If not provided, the remote server will
+          select an appropriate default.
+        :param tags: List[:class:`ExperimentTag`] (optional)
+          A collection of tags to set on the experiment. Maximum tag size and number of tags per request
+          depends on the storage backend. All storage backends are guaranteed to support tag keys up to 250
+          bytes in size and tag values up to 5000 bytes in size. All storage backends are also guaranteed to
+          support up to 20 tags per request.
+        
+        :returns: :class:`CreateExperimentResponse`
+        
 
     .. py:method:: create_run( [, experiment_id: Optional[str], start_time: Optional[int], tags: Optional[List[RunTag]], user_id: Optional[str]]) -> CreateRunResponse
 
@@ -75,101 +75,101 @@ Throws `RESOURCE_ALREADY_EXISTS` if a experiment with the given name exists.
             w.experiments.delete_run(run_id=created.run.info.run_id)
 
         Create a run.
-
-Creates a new run within an experiment. A run is usually a single execution of a machine learning or
-data ETL pipeline. MLflow uses runs to track the `mlflowParam`, `mlflowMetric` and `mlflowRunTag`
-associated with a single execution.
-
-:param experiment_id: str (optional)
-  ID of the associated experiment.
-:param start_time: int (optional)
-  Unix timestamp in milliseconds of when the run started.
-:param tags: List[:class:`RunTag`] (optional)
-  Additional metadata for run.
-:param user_id: str (optional)
-  ID of the user executing the run. This field is deprecated as of MLflow 1.0, and will be removed in
-  a future MLflow release. Use 'mlflow.user' tag instead.
-
-:returns: :class:`CreateRunResponse`
-
+        
+        Creates a new run within an experiment. A run is usually a single execution of a machine learning or
+        data ETL pipeline. MLflow uses runs to track the `mlflowParam`, `mlflowMetric` and `mlflowRunTag`
+        associated with a single execution.
+        
+        :param experiment_id: str (optional)
+          ID of the associated experiment.
+        :param start_time: int (optional)
+          Unix timestamp in milliseconds of when the run started.
+        :param tags: List[:class:`RunTag`] (optional)
+          Additional metadata for run.
+        :param user_id: str (optional)
+          ID of the user executing the run. This field is deprecated as of MLflow 1.0, and will be removed in
+          a future MLflow release. Use 'mlflow.user' tag instead.
+        
+        :returns: :class:`CreateRunResponse`
+        
 
     .. py:method:: delete_experiment(experiment_id: str)
 
         Delete an experiment.
-
-Marks an experiment and associated metadata, runs, metrics, params, and tags for deletion. If the
-experiment uses FileStore, artifacts associated with experiment are also deleted.
-
-:param experiment_id: str
-  ID of the associated experiment.
-
-
-
+        
+        Marks an experiment and associated metadata, runs, metrics, params, and tags for deletion. If the
+        experiment uses FileStore, artifacts associated with experiment are also deleted.
+        
+        :param experiment_id: str
+          ID of the associated experiment.
+        
+        
+        
 
     .. py:method:: delete_run(run_id: str)
 
         Delete a run.
-
-Marks a run for deletion.
-
-:param run_id: str
-  ID of the run to delete.
-
-
-
+        
+        Marks a run for deletion.
+        
+        :param run_id: str
+          ID of the run to delete.
+        
+        
+        
 
     .. py:method:: delete_runs(experiment_id: str, max_timestamp_millis: int [, max_runs: Optional[int]]) -> DeleteRunsResponse
 
         Delete runs by creation time.
-
-Bulk delete runs in an experiment that were created prior to or at the specified timestamp. Deletes at
-most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the
-client code snippet on https://learn.microsoft.com/en-us/azure/databricks/mlflow/runs#bulk-delete.
-
-:param experiment_id: str
-  The ID of the experiment containing the runs to delete.
-:param max_timestamp_millis: int
-  The maximum creation timestamp in milliseconds since the UNIX epoch for deleting runs. Only runs
-  created prior to or at this timestamp are deleted.
-:param max_runs: int (optional)
-  An optional positive integer indicating the maximum number of runs to delete. The maximum allowed
-  value for max_runs is 10000.
-
-:returns: :class:`DeleteRunsResponse`
-
+        
+        Bulk delete runs in an experiment that were created prior to or at the specified timestamp. Deletes at
+        most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the
+        client code snippet on https://learn.microsoft.com/en-us/azure/databricks/mlflow/runs#bulk-delete.
+        
+        :param experiment_id: str
+          The ID of the experiment containing the runs to delete.
+        :param max_timestamp_millis: int
+          The maximum creation timestamp in milliseconds since the UNIX epoch for deleting runs. Only runs
+          created prior to or at this timestamp are deleted.
+        :param max_runs: int (optional)
+          An optional positive integer indicating the maximum number of runs to delete. The maximum allowed
+          value for max_runs is 10000.
+        
+        :returns: :class:`DeleteRunsResponse`
+        
 
     .. py:method:: delete_tag(run_id: str, key: str)
 
         Delete a tag.
-
-Deletes a tag on a run. Tags are run metadata that can be updated during a run and after a run
-completes.
-
-:param run_id: str
-  ID of the run that the tag was logged under. Must be provided.
-:param key: str
-  Name of the tag. Maximum size is 255 bytes. Must be provided.
-
-
-
+        
+        Deletes a tag on a run. Tags are run metadata that can be updated during a run and after a run
+        completes.
+        
+        :param run_id: str
+          ID of the run that the tag was logged under. Must be provided.
+        :param key: str
+          Name of the tag. Maximum size is 255 bytes. Must be provided.
+        
+        
+        
 
     .. py:method:: get_by_name(experiment_name: str) -> GetExperimentResponse
 
         Get metadata.
-
-Gets metadata for an experiment.
-
-This endpoint will return deleted experiments, but prefers the active experiment if an active and
-deleted experiment share the same name. If multiple deleted experiments share the same name, the API
-will return one of them.
-
-Throws `RESOURCE_DOES_NOT_EXIST` if no experiment with the specified name exists.
-
-:param experiment_name: str
-  Name of the associated experiment.
-
-:returns: :class:`GetExperimentResponse`
-
+        
+        Gets metadata for an experiment.
+        
+        This endpoint will return deleted experiments, but prefers the active experiment if an active and
+        deleted experiment share the same name. If multiple deleted experiments share the same name, the API
+        will return one of them.
+        
+        Throws `RESOURCE_DOES_NOT_EXIST` if no experiment with the specified name exists.
+        
+        :param experiment_name: str
+          Name of the associated experiment.
+        
+        :returns: :class:`GetExperimentResponse`
+        
 
     .. py:method:: get_experiment(experiment_id: str) -> GetExperimentResponse
 
@@ -192,104 +192,104 @@ Throws `RESOURCE_DOES_NOT_EXIST` if no experiment with the specified name exists
             w.experiments.delete_experiment(experiment_id=experiment.experiment_id)
 
         Get an experiment.
-
-Gets metadata for an experiment. This method works on deleted experiments.
-
-:param experiment_id: str
-  ID of the associated experiment.
-
-:returns: :class:`GetExperimentResponse`
-
+        
+        Gets metadata for an experiment. This method works on deleted experiments.
+        
+        :param experiment_id: str
+          ID of the associated experiment.
+        
+        :returns: :class:`GetExperimentResponse`
+        
 
     .. py:method:: get_history(metric_key: str [, max_results: Optional[int], page_token: Optional[str], run_id: Optional[str], run_uuid: Optional[str]]) -> Iterator[Metric]
 
         Get history of a given metric within a run.
-
-Gets a list of all values for the specified metric for a given run.
-
-:param metric_key: str
-  Name of the metric.
-:param max_results: int (optional)
-  Maximum number of Metric records to return per paginated request. Default is set to 25,000. If set
-  higher than 25,000, a request Exception will be raised.
-:param page_token: str (optional)
-  Token indicating the page of metric histories to fetch.
-:param run_id: str (optional)
-  ID of the run from which to fetch metric values. Must be provided.
-:param run_uuid: str (optional)
-  [Deprecated, use run_id instead] ID of the run from which to fetch metric values. This field will be
-  removed in a future MLflow version.
-
-:returns: Iterator over :class:`Metric`
-
+        
+        Gets a list of all values for the specified metric for a given run.
+        
+        :param metric_key: str
+          Name of the metric.
+        :param max_results: int (optional)
+          Maximum number of Metric records to return per paginated request. Default is set to 25,000. If set
+          higher than 25,000, a request Exception will be raised.
+        :param page_token: str (optional)
+          Token indicating the page of metric histories to fetch.
+        :param run_id: str (optional)
+          ID of the run from which to fetch metric values. Must be provided.
+        :param run_uuid: str (optional)
+          [Deprecated, use run_id instead] ID of the run from which to fetch metric values. This field will be
+          removed in a future MLflow version.
+        
+        :returns: Iterator over :class:`Metric`
+        
 
     .. py:method:: get_permission_levels(experiment_id: str) -> GetExperimentPermissionLevelsResponse
 
         Get experiment permission levels.
-
-Gets the permission levels that a user can have on an object.
-
-:param experiment_id: str
-  The experiment for which to get or manage permissions.
-
-:returns: :class:`GetExperimentPermissionLevelsResponse`
-
+        
+        Gets the permission levels that a user can have on an object.
+        
+        :param experiment_id: str
+          The experiment for which to get or manage permissions.
+        
+        :returns: :class:`GetExperimentPermissionLevelsResponse`
+        
 
     .. py:method:: get_permissions(experiment_id: str) -> ExperimentPermissions
 
         Get experiment permissions.
-
-Gets the permissions of an experiment. Experiments can inherit permissions from their root object.
-
-:param experiment_id: str
-  The experiment for which to get or manage permissions.
-
-:returns: :class:`ExperimentPermissions`
-
+        
+        Gets the permissions of an experiment. Experiments can inherit permissions from their root object.
+        
+        :param experiment_id: str
+          The experiment for which to get or manage permissions.
+        
+        :returns: :class:`ExperimentPermissions`
+        
 
     .. py:method:: get_run(run_id: str [, run_uuid: Optional[str]]) -> GetRunResponse
 
         Get a run.
-
-Gets the metadata, metrics, params, and tags for a run. In the case where multiple metrics with the
-same key are logged for a run, return only the value with the latest timestamp.
-
-If there are multiple values with the latest timestamp, return the maximum of these values.
-
-:param run_id: str
-  ID of the run to fetch. Must be provided.
-:param run_uuid: str (optional)
-  [Deprecated, use run_id instead] ID of the run to fetch. This field will be removed in a future
-  MLflow version.
-
-:returns: :class:`GetRunResponse`
-
+        
+        Gets the metadata, metrics, params, and tags for a run. In the case where multiple metrics with the
+        same key are logged for a run, return only the value with the latest timestamp.
+        
+        If there are multiple values with the latest timestamp, return the maximum of these values.
+        
+        :param run_id: str
+          ID of the run to fetch. Must be provided.
+        :param run_uuid: str (optional)
+          [Deprecated, use run_id instead] ID of the run to fetch. This field will be removed in a future
+          MLflow version.
+        
+        :returns: :class:`GetRunResponse`
+        
 
     .. py:method:: list_artifacts( [, page_token: Optional[str], path: Optional[str], run_id: Optional[str], run_uuid: Optional[str]]) -> Iterator[FileInfo]
 
         Get all artifacts.
-
-List artifacts for a run. Takes an optional `artifact_path` prefix. If it is specified, the response
-contains only artifacts with the specified prefix. This API does not support pagination when listing
-artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call
-`/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports
-pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents).
-
-:param page_token: str (optional)
-  Token indicating the page of artifact results to fetch. `page_token` is not supported when listing
-  artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call
-  `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports
-  pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents).
-:param path: str (optional)
-  Filter artifacts matching this path (a relative path from the root artifact directory).
-:param run_id: str (optional)
-  ID of the run whose artifacts to list. Must be provided.
-:param run_uuid: str (optional)
-  [Deprecated, use run_id instead] ID of the run whose artifacts to list. This field will be removed
-  in a future MLflow version.
-
-:returns: Iterator over :class:`FileInfo`
-
+        
+        List artifacts for a run. Takes an optional `artifact_path` prefix. If it is specified, the response
+        contains only artifacts with the specified prefix. This API does not support pagination when listing
+        artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call
+        `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports
+        pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents).
+        
+        :param page_token: str (optional)
+          Token indicating the page of artifact results to fetch. `page_token` is not supported when listing
+          artifacts in UC Volumes. A maximum of 1000 artifacts will be retrieved for UC Volumes. Please call
+          `/api/2.0/fs/directories{directory_path}` for listing artifacts in UC Volumes, which supports
+          pagination. See [List directory contents | Files API](/api/workspace/files/listdirectorycontents).
+        :param path: str (optional)
+          Filter artifacts matching this path (a relative path from the root artifact directory).
+        :param run_id: str (optional)
+          ID of the run whose artifacts to list. Must be provided.
+        :param run_uuid: str (optional)
+          [Deprecated, use run_id instead] ID of the run whose artifacts to list. This field will be removed
+          in a future MLflow version.
+        
+        :returns: Iterator over :class:`FileInfo`
+        
 
     .. py:method:: list_experiments( [, max_results: Optional[int], page_token: Optional[str], view_type: Optional[str]]) -> Iterator[Experiment]
 
@@ -306,308 +306,308 @@ pagination. See [List directory contents | Files API](/api/workspace/files/listd
             all = w.experiments.list_experiments(ml.ListExperimentsRequest())
 
         List experiments.
-
-Gets a list of all experiments.
-
-:param max_results: int (optional)
-  Maximum number of experiments desired. If `max_results` is unspecified, return all experiments. If
-  `max_results` is too large, it'll be automatically capped at 1000. Callers of this endpoint are
-  encouraged to pass max_results explicitly and leverage page_token to iterate through experiments.
-:param page_token: str (optional)
-  Token indicating the page of experiments to fetch
-:param view_type: str (optional)
-  Qualifier for type of experiments to be returned. If unspecified, return only active experiments.
-
-:returns: Iterator over :class:`Experiment`
-
+        
+        Gets a list of all experiments.
+        
+        :param max_results: int (optional)
+          Maximum number of experiments desired. If `max_results` is unspecified, return all experiments. If
+          `max_results` is too large, it'll be automatically capped at 1000. Callers of this endpoint are
+          encouraged to pass max_results explicitly and leverage page_token to iterate through experiments.
+        :param page_token: str (optional)
+          Token indicating the page of experiments to fetch
+        :param view_type: str (optional)
+          Qualifier for type of experiments to be returned. If unspecified, return only active experiments.
+        
+        :returns: Iterator over :class:`Experiment`
+        
 
     .. py:method:: log_batch( [, metrics: Optional[List[Metric]], params: Optional[List[Param]], run_id: Optional[str], tags: Optional[List[RunTag]]])
 
         Log a batch.
-
-Logs a batch of metrics, params, and tags for a run. If any data failed to be persisted, the server
-will respond with an error (non-200 status code).
-
-In case of error (due to internal server error or an invalid request), partial data may be written.
-
-You can write metrics, params, and tags in interleaving fashion, but within a given entity type are
-guaranteed to follow the order specified in the request body.
-
-The overwrite behavior for metrics, params, and tags is as follows:
-
-* Metrics: metric values are never overwritten. Logging a metric (key, value, timestamp) appends to
-the set of values for the metric with the provided key.
-
-* Tags: tag values can be overwritten by successive writes to the same tag key. That is, if multiple
-tag values with the same key are provided in the same API request, the last-provided tag value is
-written. Logging the same tag (key, value) is permitted. Specifically, logging a tag is idempotent.
-
-* Parameters: once written, param values cannot be changed (attempting to overwrite a param value will
-result in an error). However, logging the same param (key, value) is permitted. Specifically, logging
-a param is idempotent.
-
-Request Limits ------------------------------- A single JSON-serialized API request may be up to 1 MB
-in size and contain:
-
-* No more than 1000 metrics, params, and tags in total * Up to 1000 metrics * Up to 100 params * Up to
-100 tags
-
-For example, a valid request might contain 900 metrics, 50 params, and 50 tags, but logging 900
-metrics, 50 params, and 51 tags is invalid.
-
-The following limits also apply to metric, param, and tag keys and values:
-
-* Metric keys, param keys, and tag keys can be up to 250 characters in length * Parameter and tag
-values can be up to 250 characters in length
-
-:param metrics: List[:class:`Metric`] (optional)
-  Metrics to log. A single request can contain up to 1000 metrics, and up to 1000 metrics, params, and
-  tags in total.
-:param params: List[:class:`Param`] (optional)
-  Params to log. A single request can contain up to 100 params, and up to 1000 metrics, params, and
-  tags in total.
-:param run_id: str (optional)
-  ID of the run to log under
-:param tags: List[:class:`RunTag`] (optional)
-  Tags to log. A single request can contain up to 100 tags, and up to 1000 metrics, params, and tags
-  in total.
-
-
-
+        
+        Logs a batch of metrics, params, and tags for a run. If any data failed to be persisted, the server
+        will respond with an error (non-200 status code).
+        
+        In case of error (due to internal server error or an invalid request), partial data may be written.
+        
+        You can write metrics, params, and tags in interleaving fashion, but within a given entity type are
+        guaranteed to follow the order specified in the request body.
+        
+        The overwrite behavior for metrics, params, and tags is as follows:
+        
+        * Metrics: metric values are never overwritten. Logging a metric (key, value, timestamp) appends to
+        the set of values for the metric with the provided key.
+        
+        * Tags: tag values can be overwritten by successive writes to the same tag key. That is, if multiple
+        tag values with the same key are provided in the same API request, the last-provided tag value is
+        written. Logging the same tag (key, value) is permitted. Specifically, logging a tag is idempotent.
+        
+        * Parameters: once written, param values cannot be changed (attempting to overwrite a param value will
+        result in an error). However, logging the same param (key, value) is permitted. Specifically, logging
+        a param is idempotent.
+        
+        Request Limits ------------------------------- A single JSON-serialized API request may be up to 1 MB
+        in size and contain:
+        
+        * No more than 1000 metrics, params, and tags in total * Up to 1000 metrics * Up to 100 params * Up to
+        100 tags
+        
+        For example, a valid request might contain 900 metrics, 50 params, and 50 tags, but logging 900
+        metrics, 50 params, and 51 tags is invalid.
+        
+        The following limits also apply to metric, param, and tag keys and values:
+        
+        * Metric keys, param keys, and tag keys can be up to 250 characters in length * Parameter and tag
+        values can be up to 250 characters in length
+        
+        :param metrics: List[:class:`Metric`] (optional)
+          Metrics to log. A single request can contain up to 1000 metrics, and up to 1000 metrics, params, and
+          tags in total.
+        :param params: List[:class:`Param`] (optional)
+          Params to log. A single request can contain up to 100 params, and up to 1000 metrics, params, and
+          tags in total.
+        :param run_id: str (optional)
+          ID of the run to log under
+        :param tags: List[:class:`RunTag`] (optional)
+          Tags to log. A single request can contain up to 100 tags, and up to 1000 metrics, params, and tags
+          in total.
+        
+        
+        
 
     .. py:method:: log_inputs( [, datasets: Optional[List[DatasetInput]], run_id: Optional[str]])
 
         Log inputs to a run.
-
-**NOTE:** Experimental: This API may change or be removed in a future release without warning.
-
-:param datasets: List[:class:`DatasetInput`] (optional)
-  Dataset inputs
-:param run_id: str (optional)
-  ID of the run to log under
-
-
-
+        
+        **NOTE:** Experimental: This API may change or be removed in a future release without warning.
+        
+        :param datasets: List[:class:`DatasetInput`] (optional)
+          Dataset inputs
+        :param run_id: str (optional)
+          ID of the run to log under
+        
+        
+        
 
     .. py:method:: log_metric(key: str, value: float, timestamp: int [, run_id: Optional[str], run_uuid: Optional[str], step: Optional[int]])
 
         Log a metric.
-
-Logs a metric for a run. A metric is a key-value pair (string key, float value) with an associated
-timestamp. Examples include the various metrics that represent ML model accuracy. A metric can be
-logged multiple times.
-
-:param key: str
-  Name of the metric.
-:param value: float
-  Double value of the metric being logged.
-:param timestamp: int
-  Unix timestamp in milliseconds at the time metric was logged.
-:param run_id: str (optional)
-  ID of the run under which to log the metric. Must be provided.
-:param run_uuid: str (optional)
-  [Deprecated, use run_id instead] ID of the run under which to log the metric. This field will be
-  removed in a future MLflow version.
-:param step: int (optional)
-  Step at which to log the metric
-
-
-
+        
+        Logs a metric for a run. A metric is a key-value pair (string key, float value) with an associated
+        timestamp. Examples include the various metrics that represent ML model accuracy. A metric can be
+        logged multiple times.
+        
+        :param key: str
+          Name of the metric.
+        :param value: float
+          Double value of the metric being logged.
+        :param timestamp: int
+          Unix timestamp in milliseconds at the time metric was logged.
+        :param run_id: str (optional)
+          ID of the run under which to log the metric. Must be provided.
+        :param run_uuid: str (optional)
+          [Deprecated, use run_id instead] ID of the run under which to log the metric. This field will be
+          removed in a future MLflow version.
+        :param step: int (optional)
+          Step at which to log the metric
+        
+        
+        
 
     .. py:method:: log_model( [, model_json: Optional[str], run_id: Optional[str]])
 
         Log a model.
-
-**NOTE:** Experimental: This API may change or be removed in a future release without warning.
-
-:param model_json: str (optional)
-  MLmodel file in json format.
-:param run_id: str (optional)
-  ID of the run to log under
-
-
-
+        
+        **NOTE:** Experimental: This API may change or be removed in a future release without warning.
+        
+        :param model_json: str (optional)
+          MLmodel file in json format.
+        :param run_id: str (optional)
+          ID of the run to log under
+        
+        
+        
 
     .. py:method:: log_param(key: str, value: str [, run_id: Optional[str], run_uuid: Optional[str]])
 
         Log a param.
-
-Logs a param used for a run. A param is a key-value pair (string key, string value). Examples include
-hyperparameters used for ML model training and constant dates and values used in an ETL pipeline. A
-param can be logged only once for a run.
-
-:param key: str
-  Name of the param. Maximum size is 255 bytes.
-:param value: str
-  String value of the param being logged. Maximum size is 500 bytes.
-:param run_id: str (optional)
-  ID of the run under which to log the param. Must be provided.
-:param run_uuid: str (optional)
-  [Deprecated, use run_id instead] ID of the run under which to log the param. This field will be
-  removed in a future MLflow version.
-
-
-
+        
+        Logs a param used for a run. A param is a key-value pair (string key, string value). Examples include
+        hyperparameters used for ML model training and constant dates and values used in an ETL pipeline. A
+        param can be logged only once for a run.
+        
+        :param key: str
+          Name of the param. Maximum size is 255 bytes.
+        :param value: str
+          String value of the param being logged. Maximum size is 500 bytes.
+        :param run_id: str (optional)
+          ID of the run under which to log the param. Must be provided.
+        :param run_uuid: str (optional)
+          [Deprecated, use run_id instead] ID of the run under which to log the param. This field will be
+          removed in a future MLflow version.
+        
+        
+        
 
     .. py:method:: restore_experiment(experiment_id: str)
 
         Restores an experiment.
-
-Restore an experiment marked for deletion. This also restores associated metadata, runs, metrics,
-params, and tags. If experiment uses FileStore, underlying artifacts associated with experiment are
-also restored.
-
-Throws `RESOURCE_DOES_NOT_EXIST` if experiment was never created or was permanently deleted.
-
-:param experiment_id: str
-  ID of the associated experiment.
-
-
-
+        
+        Restore an experiment marked for deletion. This also restores associated metadata, runs, metrics,
+        params, and tags. If experiment uses FileStore, underlying artifacts associated with experiment are
+        also restored.
+        
+        Throws `RESOURCE_DOES_NOT_EXIST` if experiment was never created or was permanently deleted.
+        
+        :param experiment_id: str
+          ID of the associated experiment.
+        
+        
+        
 
     .. py:method:: restore_run(run_id: str)
 
         Restore a run.
-
-Restores a deleted run.
-
-:param run_id: str
-  ID of the run to restore.
-
-
-
+        
+        Restores a deleted run.
+        
+        :param run_id: str
+          ID of the run to restore.
+        
+        
+        
 
     .. py:method:: restore_runs(experiment_id: str, min_timestamp_millis: int [, max_runs: Optional[int]]) -> RestoreRunsResponse
 
         Restore runs by deletion time.
-
-Bulk restore runs in an experiment that were deleted no earlier than the specified timestamp. Restores
-at most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the
-client code snippet on https://learn.microsoft.com/en-us/azure/databricks/mlflow/runs#bulk-restore.
-
-:param experiment_id: str
-  The ID of the experiment containing the runs to restore.
-:param min_timestamp_millis: int
-  The minimum deletion timestamp in milliseconds since the UNIX epoch for restoring runs. Only runs
-  deleted no earlier than this timestamp are restored.
-:param max_runs: int (optional)
-  An optional positive integer indicating the maximum number of runs to restore. The maximum allowed
-  value for max_runs is 10000.
-
-:returns: :class:`RestoreRunsResponse`
-
+        
+        Bulk restore runs in an experiment that were deleted no earlier than the specified timestamp. Restores
+        at most max_runs per request. To call this API from a Databricks Notebook in Python, you can use the
+        client code snippet on https://learn.microsoft.com/en-us/azure/databricks/mlflow/runs#bulk-restore.
+        
+        :param experiment_id: str
+          The ID of the experiment containing the runs to restore.
+        :param min_timestamp_millis: int
+          The minimum deletion timestamp in milliseconds since the UNIX epoch for restoring runs. Only runs
+          deleted no earlier than this timestamp are restored.
+        :param max_runs: int (optional)
+          An optional positive integer indicating the maximum number of runs to restore. The maximum allowed
+          value for max_runs is 10000.
+        
+        :returns: :class:`RestoreRunsResponse`
+        
 
     .. py:method:: search_experiments( [, filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str], view_type: Optional[SearchExperimentsViewType]]) -> Iterator[Experiment]
 
         Search experiments.
-
-Searches for experiments that satisfy specified search criteria.
-
-:param filter: str (optional)
-  String representing a SQL filter condition (e.g. "name ILIKE 'my-experiment%'")
-:param max_results: int (optional)
-  Maximum number of experiments desired. Max threshold is 3000.
-:param order_by: List[str] (optional)
-  List of columns for ordering search results, which can include experiment name and last updated
-  timestamp with an optional "DESC" or "ASC" annotation, where "ASC" is the default. Tiebreaks are
-  done by experiment id DESC.
-:param page_token: str (optional)
-  Token indicating the page of experiments to fetch
-:param view_type: :class:`SearchExperimentsViewType` (optional)
-  Qualifier for type of experiments to be returned. If unspecified, return only active experiments.
-
-:returns: Iterator over :class:`Experiment`
-
+        
+        Searches for experiments that satisfy specified search criteria.
+        
+        :param filter: str (optional)
+          String representing a SQL filter condition (e.g. "name ILIKE 'my-experiment%'")
+        :param max_results: int (optional)
+          Maximum number of experiments desired. Max threshold is 3000.
+        :param order_by: List[str] (optional)
+          List of columns for ordering search results, which can include experiment name and last updated
+          timestamp with an optional "DESC" or "ASC" annotation, where "ASC" is the default. Tiebreaks are
+          done by experiment id DESC.
+        :param page_token: str (optional)
+          Token indicating the page of experiments to fetch
+        :param view_type: :class:`SearchExperimentsViewType` (optional)
+          Qualifier for type of experiments to be returned. If unspecified, return only active experiments.
+        
+        :returns: Iterator over :class:`Experiment`
+        
 
     .. py:method:: search_runs( [, experiment_ids: Optional[List[str]], filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str], run_view_type: Optional[SearchRunsRunViewType]]) -> Iterator[Run]
 
         Search for runs.
-
-Searches for runs that satisfy expressions.
-
-Search expressions can use `mlflowMetric` and `mlflowParam` keys.",
-
-:param experiment_ids: List[str] (optional)
-  List of experiment IDs to search over.
-:param filter: str (optional)
-  A filter expression over params, metrics, and tags, that allows returning a subset of runs. The
-  syntax is a subset of SQL that supports ANDing together binary operations between a param, metric,
-  or tag and a constant.
-  
-  Example: `metrics.rmse < 1 and params.model_class = 'LogisticRegression'`
-  
-  You can select columns with special characters (hyphen, space, period, etc.) by using double quotes:
-  `metrics."model class" = 'LinearRegression' and tags."user-name" = 'Tomas'`
-  
-  Supported operators are `=`, `!=`, `>`, `>=`, `<`, and `<=`.
-:param max_results: int (optional)
-  Maximum number of runs desired. Max threshold is 50000
-:param order_by: List[str] (optional)
-  List of columns to be ordered by, including attributes, params, metrics, and tags with an optional
-  "DESC" or "ASC" annotation, where "ASC" is the default. Example: ["params.input DESC",
-  "metrics.alpha ASC", "metrics.rmse"] Tiebreaks are done by start_time DESC followed by run_id for
-  runs with the same start time (and this is the default ordering criterion if order_by is not
-  provided).
-:param page_token: str (optional)
-  Token for the current page of runs.
-:param run_view_type: :class:`SearchRunsRunViewType` (optional)
-  Whether to display only active, only deleted, or all runs. Defaults to only active runs.
-
-:returns: Iterator over :class:`Run`
-
+        
+        Searches for runs that satisfy expressions.
+        
+        Search expressions can use `mlflowMetric` and `mlflowParam` keys.",
+        
+        :param experiment_ids: List[str] (optional)
+          List of experiment IDs to search over.
+        :param filter: str (optional)
+          A filter expression over params, metrics, and tags, that allows returning a subset of runs. The
+          syntax is a subset of SQL that supports ANDing together binary operations between a param, metric,
+          or tag and a constant.
+          
+          Example: `metrics.rmse < 1 and params.model_class = 'LogisticRegression'`
+          
+          You can select columns with special characters (hyphen, space, period, etc.) by using double quotes:
+          `metrics."model class" = 'LinearRegression' and tags."user-name" = 'Tomas'`
+          
+          Supported operators are `=`, `!=`, `>`, `>=`, `<`, and `<=`.
+        :param max_results: int (optional)
+          Maximum number of runs desired. Max threshold is 50000
+        :param order_by: List[str] (optional)
+          List of columns to be ordered by, including attributes, params, metrics, and tags with an optional
+          "DESC" or "ASC" annotation, where "ASC" is the default. Example: ["params.input DESC",
+          "metrics.alpha ASC", "metrics.rmse"] Tiebreaks are done by start_time DESC followed by run_id for
+          runs with the same start time (and this is the default ordering criterion if order_by is not
+          provided).
+        :param page_token: str (optional)
+          Token for the current page of runs.
+        :param run_view_type: :class:`SearchRunsRunViewType` (optional)
+          Whether to display only active, only deleted, or all runs. Defaults to only active runs.
+        
+        :returns: Iterator over :class:`Run`
+        
 
     .. py:method:: set_experiment_tag(experiment_id: str, key: str, value: str)
 
         Set a tag.
-
-Sets a tag on an experiment. Experiment tags are metadata that can be updated.
-
-:param experiment_id: str
-  ID of the experiment under which to log the tag. Must be provided.
-:param key: str
-  Name of the tag. Maximum size depends on storage backend. All storage backends are guaranteed to
-  support key values up to 250 bytes in size.
-:param value: str
-  String value of the tag being logged. Maximum size depends on storage backend. All storage backends
-  are guaranteed to support key values up to 5000 bytes in size.
-
-
-
+        
+        Sets a tag on an experiment. Experiment tags are metadata that can be updated.
+        
+        :param experiment_id: str
+          ID of the experiment under which to log the tag. Must be provided.
+        :param key: str
+          Name of the tag. Maximum size depends on storage backend. All storage backends are guaranteed to
+          support key values up to 250 bytes in size.
+        :param value: str
+          String value of the tag being logged. Maximum size depends on storage backend. All storage backends
+          are guaranteed to support key values up to 5000 bytes in size.
+        
+        
+        
 
     .. py:method:: set_permissions(experiment_id: str [, access_control_list: Optional[List[ExperimentAccessControlRequest]]]) -> ExperimentPermissions
 
         Set experiment permissions.
-
-Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-permissions if none are specified. Objects can inherit permissions from their root object.
-
-:param experiment_id: str
-  The experiment for which to get or manage permissions.
-:param access_control_list: List[:class:`ExperimentAccessControlRequest`] (optional)
-
-:returns: :class:`ExperimentPermissions`
-
+        
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
+        
+        :param experiment_id: str
+          The experiment for which to get or manage permissions.
+        :param access_control_list: List[:class:`ExperimentAccessControlRequest`] (optional)
+        
+        :returns: :class:`ExperimentPermissions`
+        
 
     .. py:method:: set_tag(key: str, value: str [, run_id: Optional[str], run_uuid: Optional[str]])
 
         Set a tag.
-
-Sets a tag on a run. Tags are run metadata that can be updated during a run and after a run completes.
-
-:param key: str
-  Name of the tag. Maximum size depends on storage backend. All storage backends are guaranteed to
-  support key values up to 250 bytes in size.
-:param value: str
-  String value of the tag being logged. Maximum size depends on storage backend. All storage backends
-  are guaranteed to support key values up to 5000 bytes in size.
-:param run_id: str (optional)
-  ID of the run under which to log the tag. Must be provided.
-:param run_uuid: str (optional)
-  [Deprecated, use run_id instead] ID of the run under which to log the tag. This field will be
-  removed in a future MLflow version.
-
-
-
+        
+        Sets a tag on a run. Tags are run metadata that can be updated during a run and after a run completes.
+        
+        :param key: str
+          Name of the tag. Maximum size depends on storage backend. All storage backends are guaranteed to
+          support key values up to 250 bytes in size.
+        :param value: str
+          String value of the tag being logged. Maximum size depends on storage backend. All storage backends
+          are guaranteed to support key values up to 5000 bytes in size.
+        :param run_id: str (optional)
+          ID of the run under which to log the tag. Must be provided.
+        :param run_uuid: str (optional)
+          [Deprecated, use run_id instead] ID of the run under which to log the tag. This field will be
+          removed in a future MLflow version.
+        
+        
+        
 
     .. py:method:: update_experiment(experiment_id: str [, new_name: Optional[str]])
 
@@ -630,29 +630,29 @@ Sets a tag on a run. Tags are run metadata that can be updated during a run and
             w.experiments.delete_experiment(experiment_id=experiment.experiment_id)
 
         Update an experiment.
-
-Updates experiment metadata.
-
-:param experiment_id: str
-  ID of the associated experiment.
-:param new_name: str (optional)
-  If provided, the experiment's name is changed to the new name. The new name must be unique.
-
-
-
+        
+        Updates experiment metadata.
+        
+        :param experiment_id: str
+          ID of the associated experiment.
+        :param new_name: str (optional)
+          If provided, the experiment's name is changed to the new name. The new name must be unique.
+        
+        
+        
 
     .. py:method:: update_permissions(experiment_id: str [, access_control_list: Optional[List[ExperimentAccessControlRequest]]]) -> ExperimentPermissions
 
         Update experiment permissions.
-
-Updates the permissions on an experiment. Experiments can inherit permissions from their root object.
-
-:param experiment_id: str
-  The experiment for which to get or manage permissions.
-:param access_control_list: List[:class:`ExperimentAccessControlRequest`] (optional)
-
-:returns: :class:`ExperimentPermissions`
-
+        
+        Updates the permissions on an experiment. Experiments can inherit permissions from their root object.
+        
+        :param experiment_id: str
+          The experiment for which to get or manage permissions.
+        :param access_control_list: List[:class:`ExperimentAccessControlRequest`] (optional)
+        
+        :returns: :class:`ExperimentPermissions`
+        
 
     .. py:method:: update_run( [, end_time: Optional[int], run_id: Optional[str], run_uuid: Optional[str], status: Optional[UpdateRunStatus]]) -> UpdateRunResponse
 
@@ -680,17 +680,18 @@ Updates the permissions on an experiment. Experiments can inherit permissions fr
             w.experiments.delete_run(run_id=created.run.info.run_id)
 
         Update a run.
-
-Updates run metadata.
-
-:param end_time: int (optional)
-  Unix timestamp in milliseconds of when the run ended.
-:param run_id: str (optional)
-  ID of the run to update. Must be provided.
-:param run_uuid: str (optional)
-  [Deprecated, use run_id instead] ID of the run to update.. This field will be removed in a future
-  MLflow version.
-:param status: :class:`UpdateRunStatus` (optional)
-  Updated status of the run.
-
-:returns: :class:`UpdateRunResponse`
+        
+        Updates run metadata.
+        
+        :param end_time: int (optional)
+          Unix timestamp in milliseconds of when the run ended.
+        :param run_id: str (optional)
+          ID of the run to update. Must be provided.
+        :param run_uuid: str (optional)
+          [Deprecated, use run_id instead] ID of the run to update.. This field will be removed in a future
+          MLflow version.
+        :param status: :class:`UpdateRunStatus` (optional)
+          Updated status of the run.
+        
+        :returns: :class:`UpdateRunResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst
index efc7475c5..d08a85415 100644
--- a/docs/workspace/ml/model_registry.rst
+++ b/docs/workspace/ml/model_registry.rst
@@ -5,40 +5,40 @@
 .. py:class:: ModelRegistryAPI
 
     Note: This API reference documents APIs for the Workspace Model Registry. Databricks recommends using
-[Models in Unity Catalog](/api/workspace/registeredmodels) instead. Models in Unity Catalog provides
-centralized model governance, cross-workspace access, lineage, and deployment. Workspace Model Registry
-will be deprecated in the future.
-
-The Workspace Model Registry is a centralized model repository and a UI and set of APIs that enable you to
-manage the full lifecycle of MLflow Models.
+    [Models in Unity Catalog](/api/workspace/registeredmodels) instead. Models in Unity Catalog provides
+    centralized model governance, cross-workspace access, lineage, and deployment. Workspace Model Registry
+    will be deprecated in the future.
+    
+    The Workspace Model Registry is a centralized model repository and a UI and set of APIs that enable you to
+    manage the full lifecycle of MLflow Models.
 
     .. py:method:: approve_transition_request(name: str, version: str, stage: Stage, archive_existing_versions: bool [, comment: Optional[str]]) -> ApproveTransitionRequestResponse
 
         Approve transition request.
-
-Approves a model version stage transition request.
-
-:param name: str
-  Name of the model.
-:param version: str
-  Version of the model.
-:param stage: :class:`Stage`
-  Target stage of the transition. Valid values are:
-  
-  * `None`: The initial stage of a model version.
-  
-  * `Staging`: Staging or pre-production stage.
-  
-  * `Production`: Production stage.
-  
-  * `Archived`: Archived stage.
-:param archive_existing_versions: bool
-  Specifies whether to archive all current model versions in the target stage.
-:param comment: str (optional)
-  User-provided comment on the action.
-
-:returns: :class:`ApproveTransitionRequestResponse`
-
+        
+        Approves a model version stage transition request.
+        
+        :param name: str
+          Name of the model.
+        :param version: str
+          Version of the model.
+        :param stage: :class:`Stage`
+          Target stage of the transition. Valid values are:
+          
+          * `None`: The initial stage of a model version.
+          
+          * `Staging`: Staging or pre-production stage.
+          
+          * `Production`: Production stage.
+          
+          * `Archived`: Archived stage.
+        :param archive_existing_versions: bool
+          Specifies whether to archive all current model versions in the target stage.
+        :param comment: str (optional)
+          User-provided comment on the action.
+        
+        :returns: :class:`ApproveTransitionRequestResponse`
+        
 
     .. py:method:: create_comment(name: str, version: str, comment: str) -> CreateCommentResponse
 
@@ -65,19 +65,19 @@ Approves a model version stage transition request.
             w.model_registry.delete_comment(id=created.comment.id)
 
         Post a comment.
-
-Posts a comment on a model version. A comment can be submitted either by a user or programmatically to
-display relevant information about the model. For example, test results or deployment errors.
-
-:param name: str
-  Name of the model.
-:param version: str
-  Version of the model.
-:param comment: str
-  User-provided comment on the action.
-
-:returns: :class:`CreateCommentResponse`
-
+        
+        Posts a comment on a model version. A comment can be submitted either by a user or programmatically to
+        display relevant information about the model. For example, test results or deployment errors.
+        
+        :param name: str
+          Name of the model.
+        :param version: str
+          Version of the model.
+        :param comment: str
+          User-provided comment on the action.
+        
+        :returns: :class:`CreateCommentResponse`
+        
 
     .. py:method:: create_model(name: str [, description: Optional[str], tags: Optional[List[ModelTag]]]) -> CreateModelResponse
 
@@ -95,20 +95,20 @@ display relevant information about the model. For example, test results or deplo
             model = w.model_registry.create_model(name=f'sdk-{time.time_ns()}')
 
         Create a model.
-
-Creates a new registered model with the name specified in the request body.
-
-Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists.
-
-:param name: str
-  Register models under this name
-:param description: str (optional)
-  Optional description for registered model.
-:param tags: List[:class:`ModelTag`] (optional)
-  Additional metadata for registered model.
-
-:returns: :class:`CreateModelResponse`
-
+        
+        Creates a new registered model with the name specified in the request body.
+        
+        Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exists.
+        
+        :param name: str
+          Register models under this name
+        :param description: str (optional)
+          Optional description for registered model.
+        :param tags: List[:class:`ModelTag`] (optional)
+          Additional metadata for registered model.
+        
+        :returns: :class:`CreateModelResponse`
+        
 
     .. py:method:: create_model_version(name: str, source: str [, description: Optional[str], run_id: Optional[str], run_link: Optional[str], tags: Optional[List[ModelVersionTag]]]) -> CreateModelVersionResponse
 
@@ -128,52 +128,52 @@ Throws `RESOURCE_ALREADY_EXISTS` if a registered model with the given name exist
             mv = w.model_registry.create_model_version(name=model.registered_model.name, source="dbfs:/tmp")
 
         Create a model version.
-
-Creates a model version.
-
-:param name: str
-  Register model under this name
-:param source: str
-  URI indicating the location of the model artifacts.
-:param description: str (optional)
-  Optional description for model version.
-:param run_id: str (optional)
-  MLflow run ID for correlation, if `source` was generated by an experiment run in MLflow tracking
-  server
-:param run_link: str (optional)
-  MLflow run link - this is the exact link of the run that generated this model version, potentially
-  hosted at another instance of MLflow.
-:param tags: List[:class:`ModelVersionTag`] (optional)
-  Additional metadata for model version.
-
-:returns: :class:`CreateModelVersionResponse`
-
+        
+        Creates a model version.
+        
+        :param name: str
+          Register model under this name
+        :param source: str
+          URI indicating the location of the model artifacts.
+        :param description: str (optional)
+          Optional description for model version.
+        :param run_id: str (optional)
+          MLflow run ID for correlation, if `source` was generated by an experiment run in MLflow tracking
+          server
+        :param run_link: str (optional)
+          MLflow run link - this is the exact link of the run that generated this model version, potentially
+          hosted at another instance of MLflow.
+        :param tags: List[:class:`ModelVersionTag`] (optional)
+          Additional metadata for model version.
+        
+        :returns: :class:`CreateModelVersionResponse`
+        
 
     .. py:method:: create_transition_request(name: str, version: str, stage: Stage [, comment: Optional[str]]) -> CreateTransitionRequestResponse
 
         Make a transition request.
-
-Creates a model version stage transition request.
-
-:param name: str
-  Name of the model.
-:param version: str
-  Version of the model.
-:param stage: :class:`Stage`
-  Target stage of the transition. Valid values are:
-  
-  * `None`: The initial stage of a model version.
-  
-  * `Staging`: Staging or pre-production stage.
-  
-  * `Production`: Production stage.
-  
-  * `Archived`: Archived stage.
-:param comment: str (optional)
-  User-provided comment on the action.
-
-:returns: :class:`CreateTransitionRequestResponse`
-
+        
+        Creates a model version stage transition request.
+        
+        :param name: str
+          Name of the model.
+        :param version: str
+          Version of the model.
+        :param stage: :class:`Stage`
+          Target stage of the transition. Valid values are:
+          
+          * `None`: The initial stage of a model version.
+          
+          * `Staging`: Staging or pre-production stage.
+          
+          * `Production`: Production stage.
+          
+          * `Archived`: Archived stage.
+        :param comment: str (optional)
+          User-provided comment on the action.
+        
+        :returns: :class:`CreateTransitionRequestResponse`
+        
 
     .. py:method:: create_webhook(events: List[RegistryWebhookEvent] [, description: Optional[str], http_url_spec: Optional[HttpUrlSpec], job_spec: Optional[JobSpec], model_name: Optional[str], status: Optional[RegistryWebhookStatus]]) -> CreateWebhookResponse
 
@@ -197,183 +197,183 @@ Creates a model version stage transition request.
             w.model_registry.delete_webhook(id=created.webhook.id)
 
         Create a webhook.
-
-**NOTE**: This endpoint is in Public Preview.
-
-Creates a registry webhook.
-
-:param events: List[:class:`RegistryWebhookEvent`]
-  Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was
-  created for the associated model.
-  
-  * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed.
-  
-  * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned.
-  
-  * `COMMENT_CREATED`: A user wrote a comment on a registered model.
-  
-  * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be
-  specified for a registry-wide webhook, which can be created by not specifying a model name in the
-  create request.
-  
-  * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version.
-  
-  * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging.
-  
-  * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production.
-  
-  * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived.
-  
-  * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to
-  staging.
-  
-  * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to
-  production.
-  
-  * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.
-:param description: str (optional)
-  User-specified description for the webhook.
-:param http_url_spec: :class:`HttpUrlSpec` (optional)
-:param job_spec: :class:`JobSpec` (optional)
-:param model_name: str (optional)
-  Name of the model whose events would trigger this webhook.
-:param status: :class:`RegistryWebhookStatus` (optional)
-  Enable or disable triggering the webhook, or put the webhook into test mode. The default is
-  `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens.
-  
-  * `DISABLED`: Webhook is not triggered.
-  
-  * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real
-  event.
-
-:returns: :class:`CreateWebhookResponse`
-
+        
+        **NOTE**: This endpoint is in Public Preview.
+        
+        Creates a registry webhook.
+        
+        :param events: List[:class:`RegistryWebhookEvent`]
+          Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was
+          created for the associated model.
+          
+          * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed.
+          
+          * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned.
+          
+          * `COMMENT_CREATED`: A user wrote a comment on a registered model.
+          
+          * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be
+          specified for a registry-wide webhook, which can be created by not specifying a model name in the
+          create request.
+          
+          * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version.
+          
+          * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging.
+          
+          * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production.
+          
+          * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived.
+          
+          * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to
+          staging.
+          
+          * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to
+          production.
+          
+          * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.
+        :param description: str (optional)
+          User-specified description for the webhook.
+        :param http_url_spec: :class:`HttpUrlSpec` (optional)
+        :param job_spec: :class:`JobSpec` (optional)
+        :param model_name: str (optional)
+          Name of the model whose events would trigger this webhook.
+        :param status: :class:`RegistryWebhookStatus` (optional)
+          Enable or disable triggering the webhook, or put the webhook into test mode. The default is
+          `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens.
+          
+          * `DISABLED`: Webhook is not triggered.
+          
+          * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real
+          event.
+        
+        :returns: :class:`CreateWebhookResponse`
+        
 
     .. py:method:: delete_comment(id: str)
 
         Delete a comment.
-
-Deletes a comment on a model version.
-
-:param id: str
-
-
-
+        
+        Deletes a comment on a model version.
+        
+        :param id: str
+        
+        
+        
 
     .. py:method:: delete_model(name: str)
 
         Delete a model.
-
-Deletes a registered model.
-
-:param name: str
-  Registered model unique name identifier.
-
-
-
+        
+        Deletes a registered model.
+        
+        :param name: str
+          Registered model unique name identifier.
+        
+        
+        
 
     .. py:method:: delete_model_tag(name: str, key: str)
 
         Delete a model tag.
-
-Deletes the tag for a registered model.
-
-:param name: str
-  Name of the registered model that the tag was logged under.
-:param key: str
-  Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum size
-  is 250 bytes.
-
-
-
+        
+        Deletes the tag for a registered model.
+        
+        :param name: str
+          Name of the registered model that the tag was logged under.
+        :param key: str
+          Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum size
+          is 250 bytes.
+        
+        
+        
 
     .. py:method:: delete_model_version(name: str, version: str)
 
         Delete a model version.
-
-Deletes a model version.
-
-:param name: str
-  Name of the registered model
-:param version: str
-  Model version number
-
-
-
+        
+        Deletes a model version.
+        
+        :param name: str
+          Name of the registered model
+        :param version: str
+          Model version number
+        
+        
+        
 
     .. py:method:: delete_model_version_tag(name: str, version: str, key: str)
 
         Delete a model version tag.
-
-Deletes a model version tag.
-
-:param name: str
-  Name of the registered model that the tag was logged under.
-:param version: str
-  Model version number that the tag was logged under.
-:param key: str
-  Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum size
-  is 250 bytes.
-
-
-
+        
+        Deletes a model version tag.
+        
+        :param name: str
+          Name of the registered model that the tag was logged under.
+        :param version: str
+          Model version number that the tag was logged under.
+        :param key: str
+          Name of the tag. The name must be an exact match; wild-card deletion is not supported. Maximum size
+          is 250 bytes.
+        
+        
+        
 
     .. py:method:: delete_transition_request(name: str, version: str, stage: DeleteTransitionRequestStage, creator: str [, comment: Optional[str]])
 
         Delete a transition request.
-
-Cancels a model version stage transition request.
-
-:param name: str
-  Name of the model.
-:param version: str
-  Version of the model.
-:param stage: :class:`DeleteTransitionRequestStage`
-  Target stage of the transition request. Valid values are:
-  
-  * `None`: The initial stage of a model version.
-  
-  * `Staging`: Staging or pre-production stage.
-  
-  * `Production`: Production stage.
-  
-  * `Archived`: Archived stage.
-:param creator: str
-  Username of the user who created this request. Of the transition requests matching the specified
-  details, only the one transition created by this user will be deleted.
-:param comment: str (optional)
-  User-provided comment on the action.
-
-
-
+        
+        Cancels a model version stage transition request.
+        
+        :param name: str
+          Name of the model.
+        :param version: str
+          Version of the model.
+        :param stage: :class:`DeleteTransitionRequestStage`
+          Target stage of the transition request. Valid values are:
+          
+          * `None`: The initial stage of a model version.
+          
+          * `Staging`: Staging or pre-production stage.
+          
+          * `Production`: Production stage.
+          
+          * `Archived`: Archived stage.
+        :param creator: str
+          Username of the user who created this request. Of the transition requests matching the specified
+          details, only the one transition created by this user will be deleted.
+        :param comment: str (optional)
+          User-provided comment on the action.
+        
+        
+        
 
     .. py:method:: delete_webhook( [, id: Optional[str]])
 
         Delete a webhook.
-
-**NOTE:** This endpoint is in Public Preview.
-
-Deletes a registry webhook.
-
-:param id: str (optional)
-  Webhook ID required to delete a registry webhook.
-
-
-
+        
+        **NOTE:** This endpoint is in Public Preview.
+        
+        Deletes a registry webhook.
+        
+        :param id: str (optional)
+          Webhook ID required to delete a registry webhook.
+        
+        
+        
 
     .. py:method:: get_latest_versions(name: str [, stages: Optional[List[str]]]) -> Iterator[ModelVersion]
 
         Get the latest version.
-
-Gets the latest version of a registered model.
-
-:param name: str
-  Registered model unique name identifier.
-:param stages: List[str] (optional)
-  List of stages.
-
-:returns: Iterator over :class:`ModelVersion`
-
+        
+        Gets the latest version of a registered model.
+        
+        :param name: str
+          Registered model unique name identifier.
+        :param stages: List[str] (optional)
+          List of stages.
+        
+        :returns: Iterator over :class:`ModelVersion`
+        
 
     .. py:method:: get_model(name: str) -> GetModelResponse
 
@@ -393,71 +393,71 @@ Gets the latest version of a registered model.
             model = w.model_registry.get_model(name=created.registered_model.name)
 
         Get model.
-
-Get the details of a model. This is a Databricks workspace version of the [MLflow endpoint] that also
-returns the model's Databricks workspace ID and the permission level of the requesting user on the
-model.
-
-[MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#get-registeredmodel
-
-:param name: str
-  Registered model unique name identifier.
-
-:returns: :class:`GetModelResponse`
-
+        
+        Get the details of a model. This is a Databricks workspace version of the [MLflow endpoint] that also
+        returns the model's Databricks workspace ID and the permission level of the requesting user on the
+        model.
+        
+        [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#get-registeredmodel
+        
+        :param name: str
+          Registered model unique name identifier.
+        
+        :returns: :class:`GetModelResponse`
+        
 
     .. py:method:: get_model_version(name: str, version: str) -> GetModelVersionResponse
 
         Get a model version.
-
-Get a model version.
-
-:param name: str
-  Name of the registered model
-:param version: str
-  Model version number
-
-:returns: :class:`GetModelVersionResponse`
-
+        
+        Get a model version.
+        
+        :param name: str
+          Name of the registered model
+        :param version: str
+          Model version number
+        
+        :returns: :class:`GetModelVersionResponse`
+        
 
     .. py:method:: get_model_version_download_uri(name: str, version: str) -> GetModelVersionDownloadUriResponse
 
         Get a model version URI.
-
-Gets a URI to download the model version.
-
-:param name: str
-  Name of the registered model
-:param version: str
-  Model version number
-
-:returns: :class:`GetModelVersionDownloadUriResponse`
-
+        
+        Gets a URI to download the model version.
+        
+        :param name: str
+          Name of the registered model
+        :param version: str
+          Model version number
+        
+        :returns: :class:`GetModelVersionDownloadUriResponse`
+        
 
     .. py:method:: get_permission_levels(registered_model_id: str) -> GetRegisteredModelPermissionLevelsResponse
 
         Get registered model permission levels.
-
-Gets the permission levels that a user can have on an object.
-
-:param registered_model_id: str
-  The registered model for which to get or manage permissions.
-
-:returns: :class:`GetRegisteredModelPermissionLevelsResponse`
-
+        
+        Gets the permission levels that a user can have on an object.
+        
+        :param registered_model_id: str
+          The registered model for which to get or manage permissions.
+        
+        :returns: :class:`GetRegisteredModelPermissionLevelsResponse`
+        
 
     .. py:method:: get_permissions(registered_model_id: str) -> RegisteredModelPermissions
 
         Get registered model permissions.
-
-Gets the permissions of a registered model. Registered models can inherit permissions from their root
-object.
-
-:param registered_model_id: str
-  The registered model for which to get or manage permissions.
-
-:returns: :class:`RegisteredModelPermissions`
-
+        
+        Gets the permissions of a registered model. Registered models can inherit permissions from their root
+        object.
+        
+        :param registered_model_id: str
+          The registered model for which to get or manage permissions.
+        
+        :returns: :class:`RegisteredModelPermissions`
+        
 
     .. py:method:: list_models( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[Model]
 
@@ -474,30 +474,30 @@ object.
             all = w.model_registry.list_models(ml.ListModelsRequest())
 
         List models.
-
-Lists all available registered models, up to the limit specified in __max_results__.
-
-:param max_results: int (optional)
-  Maximum number of registered models desired. Max threshold is 1000.
-:param page_token: str (optional)
-  Pagination token to go to the next page based on a previous query.
-
-:returns: Iterator over :class:`Model`
-
+        
+        Lists all available registered models, up to the limit specified in __max_results__.
+        
+        :param max_results: int (optional)
+          Maximum number of registered models desired. Max threshold is 1000.
+        :param page_token: str (optional)
+          Pagination token to go to the next page based on a previous query.
+        
+        :returns: Iterator over :class:`Model`
+        
 
     .. py:method:: list_transition_requests(name: str, version: str) -> Iterator[Activity]
 
         List transition requests.
-
-Gets a list of all open stage transition requests for the model version.
-
-:param name: str
-  Name of the model.
-:param version: str
-  Version of the model.
-
-:returns: Iterator over :class:`Activity`
-
+        
+        Gets a list of all open stage transition requests for the model version.
+        
+        :param name: str
+          Name of the model.
+        :param version: str
+          Version of the model.
+        
+        :returns: Iterator over :class:`Activity`
+        
 
     .. py:method:: list_webhooks( [, events: Optional[List[RegistryWebhookEvent]], model_name: Optional[str], page_token: Optional[str]]) -> Iterator[RegistryWebhook]
 
@@ -514,207 +514,207 @@ Gets a list of all open stage transition requests for the model version.
             all = w.model_registry.list_webhooks(ml.ListWebhooksRequest())
 
         List registry webhooks.
-
-**NOTE:** This endpoint is in Public Preview.
-
-Lists all registry webhooks.
-
-:param events: List[:class:`RegistryWebhookEvent`] (optional)
-  If `events` is specified, any webhook with one or more of the specified trigger events is included
-  in the output. If `events` is not specified, webhooks of all event types are included in the output.
-:param model_name: str (optional)
-  If not specified, all webhooks associated with the specified events are listed, regardless of their
-  associated model.
-:param page_token: str (optional)
-  Token indicating the page of artifact results to fetch
-
-:returns: Iterator over :class:`RegistryWebhook`
-
+        
+        **NOTE:** This endpoint is in Public Preview.
+        
+        Lists all registry webhooks.
+        
+        :param events: List[:class:`RegistryWebhookEvent`] (optional)
+          If `events` is specified, any webhook with one or more of the specified trigger events is included
+          in the output. If `events` is not specified, webhooks of all event types are included in the output.
+        :param model_name: str (optional)
+          If not specified, all webhooks associated with the specified events are listed, regardless of their
+          associated model.
+        :param page_token: str (optional)
+          Token indicating the page of artifact results to fetch
+        
+        :returns: Iterator over :class:`RegistryWebhook`
+        
 
     .. py:method:: reject_transition_request(name: str, version: str, stage: Stage [, comment: Optional[str]]) -> RejectTransitionRequestResponse
 
         Reject a transition request.
-
-Rejects a model version stage transition request.
-
-:param name: str
-  Name of the model.
-:param version: str
-  Version of the model.
-:param stage: :class:`Stage`
-  Target stage of the transition. Valid values are:
-  
-  * `None`: The initial stage of a model version.
-  
-  * `Staging`: Staging or pre-production stage.
-  
-  * `Production`: Production stage.
-  
-  * `Archived`: Archived stage.
-:param comment: str (optional)
-  User-provided comment on the action.
-
-:returns: :class:`RejectTransitionRequestResponse`
-
+        
+        Rejects a model version stage transition request.
+        
+        :param name: str
+          Name of the model.
+        :param version: str
+          Version of the model.
+        :param stage: :class:`Stage`
+          Target stage of the transition. Valid values are:
+          
+          * `None`: The initial stage of a model version.
+          
+          * `Staging`: Staging or pre-production stage.
+          
+          * `Production`: Production stage.
+          
+          * `Archived`: Archived stage.
+        :param comment: str (optional)
+          User-provided comment on the action.
+        
+        :returns: :class:`RejectTransitionRequestResponse`
+        
 
     .. py:method:: rename_model(name: str [, new_name: Optional[str]]) -> RenameModelResponse
 
         Rename a model.
-
-Renames a registered model.
-
-:param name: str
-  Registered model unique name identifier.
-:param new_name: str (optional)
-  If provided, updates the name for this `registered_model`.
-
-:returns: :class:`RenameModelResponse`
-
+        
+        Renames a registered model.
+        
+        :param name: str
+          Registered model unique name identifier.
+        :param new_name: str (optional)
+          If provided, updates the name for this `registered_model`.
+        
+        :returns: :class:`RenameModelResponse`
+        
 
     .. py:method:: search_model_versions( [, filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str]]) -> Iterator[ModelVersion]
 
         Searches model versions.
-
-Searches for specific model versions based on the supplied __filter__.
-
-:param filter: str (optional)
-  String filter condition, like "name='my-model-name'". Must be a single boolean condition, with
-  string values wrapped in single quotes.
-:param max_results: int (optional)
-  Maximum number of models desired. Max threshold is 10K.
-:param order_by: List[str] (optional)
-  List of columns to be ordered by including model name, version, stage with an optional "DESC" or
-  "ASC" annotation, where "ASC" is the default. Tiebreaks are done by latest stage transition
-  timestamp, followed by name ASC, followed by version DESC.
-:param page_token: str (optional)
-  Pagination token to go to next page based on previous search query.
-
-:returns: Iterator over :class:`ModelVersion`
-
+        
+        Searches for specific model versions based on the supplied __filter__.
+        
+        :param filter: str (optional)
+          String filter condition, like "name='my-model-name'". Must be a single boolean condition, with
+          string values wrapped in single quotes.
+        :param max_results: int (optional)
+          Maximum number of models desired. Max threshold is 10K.
+        :param order_by: List[str] (optional)
+          List of columns to be ordered by including model name, version, stage with an optional "DESC" or
+          "ASC" annotation, where "ASC" is the default. Tiebreaks are done by latest stage transition
+          timestamp, followed by name ASC, followed by version DESC.
+        :param page_token: str (optional)
+          Pagination token to go to next page based on previous search query.
+        
+        :returns: Iterator over :class:`ModelVersion`
+        
 
     .. py:method:: search_models( [, filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str]]) -> Iterator[Model]
 
         Search models.
-
-Search for registered models based on the specified __filter__.
-
-:param filter: str (optional)
-  String filter condition, like "name LIKE 'my-model-name'". Interpreted in the backend automatically
-  as "name LIKE '%my-model-name%'". Single boolean condition, with string values wrapped in single
-  quotes.
-:param max_results: int (optional)
-  Maximum number of models desired. Default is 100. Max threshold is 1000.
-:param order_by: List[str] (optional)
-  List of columns for ordering search results, which can include model name and last updated timestamp
-  with an optional "DESC" or "ASC" annotation, where "ASC" is the default. Tiebreaks are done by model
-  name ASC.
-:param page_token: str (optional)
-  Pagination token to go to the next page based on a previous search query.
-
-:returns: Iterator over :class:`Model`
-
+        
+        Search for registered models based on the specified __filter__.
+        
+        :param filter: str (optional)
+          String filter condition, like "name LIKE 'my-model-name'". Interpreted in the backend automatically
+          as "name LIKE '%my-model-name%'". Single boolean condition, with string values wrapped in single
+          quotes.
+        :param max_results: int (optional)
+          Maximum number of models desired. Default is 100. Max threshold is 1000.
+        :param order_by: List[str] (optional)
+          List of columns for ordering search results, which can include model name and last updated timestamp
+          with an optional "DESC" or "ASC" annotation, where "ASC" is the default. Tiebreaks are done by model
+          name ASC.
+        :param page_token: str (optional)
+          Pagination token to go to the next page based on a previous search query.
+        
+        :returns: Iterator over :class:`Model`
+        
 
     .. py:method:: set_model_tag(name: str, key: str, value: str)
 
         Set a tag.
-
-Sets a tag on a registered model.
-
-:param name: str
-  Unique name of the model.
-:param key: str
-  Name of the tag. Maximum size depends on storage backend. If a tag with this name already exists,
-  its preexisting value will be replaced by the specified `value`. All storage backends are guaranteed
-  to support key values up to 250 bytes in size.
-:param value: str
-  String value of the tag being logged. Maximum size depends on storage backend. All storage backends
-  are guaranteed to support key values up to 5000 bytes in size.
-
-
-
+        
+        Sets a tag on a registered model.
+        
+        :param name: str
+          Unique name of the model.
+        :param key: str
+          Name of the tag. Maximum size depends on storage backend. If a tag with this name already exists,
+          its preexisting value will be replaced by the specified `value`. All storage backends are guaranteed
+          to support key values up to 250 bytes in size.
+        :param value: str
+          String value of the tag being logged. Maximum size depends on storage backend. All storage backends
+          are guaranteed to support key values up to 5000 bytes in size.
+        
+        
+        
 
     .. py:method:: set_model_version_tag(name: str, version: str, key: str, value: str)
 
         Set a version tag.
-
-Sets a model version tag.
-
-:param name: str
-  Unique name of the model.
-:param version: str
-  Model version number.
-:param key: str
-  Name of the tag. Maximum size depends on storage backend. If a tag with this name already exists,
-  its preexisting value will be replaced by the specified `value`. All storage backends are guaranteed
-  to support key values up to 250 bytes in size.
-:param value: str
-  String value of the tag being logged. Maximum size depends on storage backend. All storage backends
-  are guaranteed to support key values up to 5000 bytes in size.
-
-
-
+        
+        Sets a model version tag.
+        
+        :param name: str
+          Unique name of the model.
+        :param version: str
+          Model version number.
+        :param key: str
+          Name of the tag. Maximum size depends on storage backend. If a tag with this name already exists,
+          its preexisting value will be replaced by the specified `value`. All storage backends are guaranteed
+          to support key values up to 250 bytes in size.
+        :param value: str
+          String value of the tag being logged. Maximum size depends on storage backend. All storage backends
+          are guaranteed to support key values up to 5000 bytes in size.
+        
+        
+        
 
     .. py:method:: set_permissions(registered_model_id: str [, access_control_list: Optional[List[RegisteredModelAccessControlRequest]]]) -> RegisteredModelPermissions
 
         Set registered model permissions.
-
-Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-permissions if none are specified. Objects can inherit permissions from their root object.
-
-:param registered_model_id: str
-  The registered model for which to get or manage permissions.
-:param access_control_list: List[:class:`RegisteredModelAccessControlRequest`] (optional)
-
-:returns: :class:`RegisteredModelPermissions`
-
+        
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
+        
+        :param registered_model_id: str
+          The registered model for which to get or manage permissions.
+        :param access_control_list: List[:class:`RegisteredModelAccessControlRequest`] (optional)
+        
+        :returns: :class:`RegisteredModelPermissions`
+        
 
     .. py:method:: test_registry_webhook(id: str [, event: Optional[RegistryWebhookEvent]]) -> TestRegistryWebhookResponse
 
         Test a webhook.
-
-**NOTE:** This endpoint is in Public Preview.
-
-Tests a registry webhook.
-
-:param id: str
-  Webhook ID
-:param event: :class:`RegistryWebhookEvent` (optional)
-  If `event` is specified, the test trigger uses the specified event. If `event` is not specified, the
-  test trigger uses a randomly chosen event associated with the webhook.
-
-:returns: :class:`TestRegistryWebhookResponse`
-
+        
+        **NOTE:** This endpoint is in Public Preview.
+        
+        Tests a registry webhook.
+        
+        :param id: str
+          Webhook ID
+        :param event: :class:`RegistryWebhookEvent` (optional)
+          If `event` is specified, the test trigger uses the specified event. If `event` is not specified, the
+          test trigger uses a randomly chosen event associated with the webhook.
+        
+        :returns: :class:`TestRegistryWebhookResponse`
+        
 
     .. py:method:: transition_stage(name: str, version: str, stage: Stage, archive_existing_versions: bool [, comment: Optional[str]]) -> TransitionStageResponse
 
         Transition a stage.
-
-Transition a model version's stage. This is a Databricks workspace version of the [MLflow endpoint]
-that also accepts a comment associated with the transition to be recorded.",
-
-[MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage
-
-:param name: str
-  Name of the model.
-:param version: str
-  Version of the model.
-:param stage: :class:`Stage`
-  Target stage of the transition. Valid values are:
-  
-  * `None`: The initial stage of a model version.
-  
-  * `Staging`: Staging or pre-production stage.
-  
-  * `Production`: Production stage.
-  
-  * `Archived`: Archived stage.
-:param archive_existing_versions: bool
-  Specifies whether to archive all current model versions in the target stage.
-:param comment: str (optional)
-  User-provided comment on the action.
-
-:returns: :class:`TransitionStageResponse`
-
+        
+        Transition a model version's stage. This is a Databricks workspace version of the [MLflow endpoint]
+        that also accepts a comment associated with the transition to be recorded.",
+        
+        [MLflow endpoint]: https://www.mlflow.org/docs/latest/rest-api.html#transition-modelversion-stage
+        
+        :param name: str
+          Name of the model.
+        :param version: str
+          Version of the model.
+        :param stage: :class:`Stage`
+          Target stage of the transition. Valid values are:
+          
+          * `None`: The initial stage of a model version.
+          
+          * `Staging`: Staging or pre-production stage.
+          
+          * `Production`: Production stage.
+          
+          * `Archived`: Archived stage.
+        :param archive_existing_versions: bool
+          Specifies whether to archive all current model versions in the target stage.
+        :param comment: str (optional)
+          User-provided comment on the action.
+        
+        :returns: :class:`TransitionStageResponse`
+        
 
     .. py:method:: update_comment(id: str, comment: str) -> UpdateCommentResponse
 
@@ -743,16 +743,16 @@ that also accepts a comment associated with the transition to be recorded.",
             w.model_registry.delete_comment(id=created.comment.id)
 
         Update a comment.
-
-Post an edit to a comment on a model version.
-
-:param id: str
-  Unique identifier of an activity
-:param comment: str
-  User-provided comment on the action.
-
-:returns: :class:`UpdateCommentResponse`
-
+        
+        Post an edit to a comment on a model version.
+        
+        :param id: str
+          Unique identifier of an activity
+        :param comment: str
+          User-provided comment on the action.
+        
+        :returns: :class:`UpdateCommentResponse`
+        
 
     .. py:method:: update_model(name: str [, description: Optional[str]])
 
@@ -776,16 +776,16 @@ Post an edit to a comment on a model version.
                                                   version=created.model_version.version)
 
         Update model.
-
-Updates a registered model.
-
-:param name: str
-  Registered model unique name identifier.
-:param description: str (optional)
-  If provided, updates the description for this `registered_model`.
-
-
-
+        
+        Updates a registered model.
+        
+        :param name: str
+          Registered model unique name identifier.
+        :param description: str (optional)
+          If provided, updates the description for this `registered_model`.
+        
+        
+        
 
     .. py:method:: update_model_version(name: str, version: str [, description: Optional[str]])
 
@@ -809,32 +809,32 @@ Updates a registered model.
                                                   version=created.model_version.version)
 
         Update model version.
-
-Updates the model version.
-
-:param name: str
-  Name of the registered model
-:param version: str
-  Model version number
-:param description: str (optional)
-  If provided, updates the description for this `registered_model`.
-
-
-
+        
+        Updates the model version.
+        
+        :param name: str
+          Name of the registered model
+        :param version: str
+          Model version number
+        :param description: str (optional)
+          If provided, updates the description for this `registered_model`.
+        
+        
+        
 
     .. py:method:: update_permissions(registered_model_id: str [, access_control_list: Optional[List[RegisteredModelAccessControlRequest]]]) -> RegisteredModelPermissions
 
         Update registered model permissions.
-
-Updates the permissions on a registered model. Registered models can inherit permissions from their
-root object.
-
-:param registered_model_id: str
-  The registered model for which to get or manage permissions.
-:param access_control_list: List[:class:`RegisteredModelAccessControlRequest`] (optional)
-
-:returns: :class:`RegisteredModelPermissions`
-
+        
+        Updates the permissions on a registered model. Registered models can inherit permissions from their
+        root object.
+        
+        :param registered_model_id: str
+          The registered model for which to get or manage permissions.
+        :param access_control_list: List[:class:`RegisteredModelAccessControlRequest`] (optional)
+        
+        :returns: :class:`RegisteredModelPermissions`
+        
 
     .. py:method:: update_webhook(id: str [, description: Optional[str], events: Optional[List[RegistryWebhookEvent]], http_url_spec: Optional[HttpUrlSpec], job_spec: Optional[JobSpec], status: Optional[RegistryWebhookStatus]])
 
@@ -860,53 +860,54 @@ root object.
             w.model_registry.delete_webhook(id=created.webhook.id)
 
         Update a webhook.
-
-**NOTE:** This endpoint is in Public Preview.
-
-Updates a registry webhook.
-
-:param id: str
-  Webhook ID
-:param description: str (optional)
-  User-specified description for the webhook.
-:param events: List[:class:`RegistryWebhookEvent`] (optional)
-  Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was
-  created for the associated model.
-  
-  * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed.
-  
-  * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned.
-  
-  * `COMMENT_CREATED`: A user wrote a comment on a registered model.
-  
-  * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be
-  specified for a registry-wide webhook, which can be created by not specifying a model name in the
-  create request.
-  
-  * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version.
-  
-  * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging.
-  
-  * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production.
-  
-  * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived.
-  
-  * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to
-  staging.
-  
-  * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to
-  production.
-  
-  * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.
-:param http_url_spec: :class:`HttpUrlSpec` (optional)
-:param job_spec: :class:`JobSpec` (optional)
-:param status: :class:`RegistryWebhookStatus` (optional)
-  Enable or disable triggering the webhook, or put the webhook into test mode. The default is
-  `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens.
-  
-  * `DISABLED`: Webhook is not triggered.
-  
-  * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real
-  event.
-
-
+        
+        **NOTE:** This endpoint is in Public Preview.
+        
+        Updates a registry webhook.
+        
+        :param id: str
+          Webhook ID
+        :param description: str (optional)
+          User-specified description for the webhook.
+        :param events: List[:class:`RegistryWebhookEvent`] (optional)
+          Events that can trigger a registry webhook: * `MODEL_VERSION_CREATED`: A new model version was
+          created for the associated model.
+          
+          * `MODEL_VERSION_TRANSITIONED_STAGE`: A model version’s stage was changed.
+          
+          * `TRANSITION_REQUEST_CREATED`: A user requested a model version’s stage be transitioned.
+          
+          * `COMMENT_CREATED`: A user wrote a comment on a registered model.
+          
+          * `REGISTERED_MODEL_CREATED`: A new registered model was created. This event type can only be
+          specified for a registry-wide webhook, which can be created by not specifying a model name in the
+          create request.
+          
+          * `MODEL_VERSION_TAG_SET`: A user set a tag on the model version.
+          
+          * `MODEL_VERSION_TRANSITIONED_TO_STAGING`: A model version was transitioned to staging.
+          
+          * `MODEL_VERSION_TRANSITIONED_TO_PRODUCTION`: A model version was transitioned to production.
+          
+          * `MODEL_VERSION_TRANSITIONED_TO_ARCHIVED`: A model version was archived.
+          
+          * `TRANSITION_REQUEST_TO_STAGING_CREATED`: A user requested a model version be transitioned to
+          staging.
+          
+          * `TRANSITION_REQUEST_TO_PRODUCTION_CREATED`: A user requested a model version be transitioned to
+          production.
+          
+          * `TRANSITION_REQUEST_TO_ARCHIVED_CREATED`: A user requested a model version be archived.
+        :param http_url_spec: :class:`HttpUrlSpec` (optional)
+        :param job_spec: :class:`JobSpec` (optional)
+        :param status: :class:`RegistryWebhookStatus` (optional)
+          Enable or disable triggering the webhook, or put the webhook into test mode. The default is
+          `ACTIVE`: * `ACTIVE`: Webhook is triggered when an associated event happens.
+          
+          * `DISABLED`: Webhook is not triggered.
+          
+          * `TEST_MODE`: Webhook can be triggered through the test endpoint, but is not triggered on a real
+          event.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst
index 4d261b0b4..ec31991ef 100644
--- a/docs/workspace/pipelines/pipelines.rst
+++ b/docs/workspace/pipelines/pipelines.rst
@@ -5,15 +5,15 @@
 .. py:class:: PipelinesAPI
 
     The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines.
-
-Delta Live Tables is a framework for building reliable, maintainable, and testable data processing
-pipelines. You define the transformations to perform on your data, and Delta Live Tables manages task
-orchestration, cluster management, monitoring, data quality, and error handling.
-
-Instead of defining your data pipelines using a series of separate Apache Spark tasks, Delta Live Tables
-manages how your data is transformed based on a target schema you define for each processing step. You can
-also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected
-data quality and specify how to handle records that fail those expectations.
+    
+    Delta Live Tables is a framework for building reliable, maintainable, and testable data processing
+    pipelines. You define the transformations to perform on your data, and Delta Live Tables manages task
+    orchestration, cluster management, monitoring, data quality, and error handling.
+    
+    Instead of defining your data pipelines using a series of separate Apache Spark tasks, Delta Live Tables
+    manages how your data is transformed based on a target schema you define for each processing step. You can
+    also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected
+    data quality and specify how to handle records that fail those expectations.
 
     .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse
 
@@ -49,84 +49,84 @@ data quality and specify how to handle records that fail those expectations.
             w.pipelines.delete(pipeline_id=created.pipeline_id)
 
         Create a pipeline.
-
-Creates a new data processing pipeline based on the requested configuration. If successful, this
-method returns the ID of the new pipeline.
-
-:param allow_duplicate_names: bool (optional)
-  If false, deployment will fail if name conflicts with that of another pipeline.
-:param budget_policy_id: str (optional)
-  Budget policy of this pipeline.
-:param catalog: str (optional)
-  A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables
-  in this pipeline are published to a `target` schema inside `catalog` (for example,
-  `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity Catalog.
-:param channel: str (optional)
-  DLT Release Channel that specifies which version to use.
-:param clusters: List[:class:`PipelineCluster`] (optional)
-  Cluster settings for this pipeline deployment.
-:param configuration: Dict[str,str] (optional)
-  String-String configuration for this pipeline execution.
-:param continuous: bool (optional)
-  Whether the pipeline is continuous or triggered. This replaces `trigger`.
-:param deployment: :class:`PipelineDeployment` (optional)
-  Deployment type of this pipeline.
-:param development: bool (optional)
-  Whether the pipeline is in Development mode. Defaults to false.
-:param dry_run: bool (optional)
-:param edition: str (optional)
-  Pipeline product edition.
-:param filters: :class:`Filters` (optional)
-  Filters on which Pipeline packages to include in the deployed graph.
-:param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
-  The definition of a gateway pipeline to support change data capture.
-:param id: str (optional)
-  Unique identifier for this pipeline.
-:param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
-  The configuration for a managed ingestion pipeline. These settings cannot be used with the
-  'libraries', 'target' or 'catalog' settings.
-:param libraries: List[:class:`PipelineLibrary`] (optional)
-  Libraries or code needed by this deployment.
-:param name: str (optional)
-  Friendly identifier for this pipeline.
-:param notifications: List[:class:`Notifications`] (optional)
-  List of notification settings for this pipeline.
-:param photon: bool (optional)
-  Whether Photon is enabled for this pipeline.
-:param restart_window: :class:`RestartWindow` (optional)
-  Restart window of this pipeline.
-:param run_as: :class:`RunAs` (optional)
-  Write-only setting, available only in Create/Update calls. Specifies the user or service principal
-  that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
-  
-  Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
-  thrown.
-:param schema: str (optional)
-  The default schema (database) where tables are read from or published to. The presence of this field
-  implies that the pipeline is in direct publishing mode.
-:param serverless: bool (optional)
-  Whether serverless compute is enabled for this pipeline.
-:param storage: str (optional)
-  DBFS root directory for storing checkpoints and tables.
-:param target: str (optional)
-  Target schema (database) to add tables in this pipeline to. If not specified, no data is published
-  to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify `catalog`.
-:param trigger: :class:`PipelineTrigger` (optional)
-  Which pipeline trigger to use. Deprecated: Use `continuous` instead.
-
-:returns: :class:`CreatePipelineResponse`
-
+        
+        Creates a new data processing pipeline based on the requested configuration. If successful, this
+        method returns the ID of the new pipeline.
+        
+        :param allow_duplicate_names: bool (optional)
+          If false, deployment will fail if name conflicts with that of another pipeline.
+        :param budget_policy_id: str (optional)
+          Budget policy of this pipeline.
+        :param catalog: str (optional)
+          A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables
+          in this pipeline are published to a `target` schema inside `catalog` (for example,
+          `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity Catalog.
+        :param channel: str (optional)
+          DLT Release Channel that specifies which version to use.
+        :param clusters: List[:class:`PipelineCluster`] (optional)
+          Cluster settings for this pipeline deployment.
+        :param configuration: Dict[str,str] (optional)
+          String-String configuration for this pipeline execution.
+        :param continuous: bool (optional)
+          Whether the pipeline is continuous or triggered. This replaces `trigger`.
+        :param deployment: :class:`PipelineDeployment` (optional)
+          Deployment type of this pipeline.
+        :param development: bool (optional)
+          Whether the pipeline is in Development mode. Defaults to false.
+        :param dry_run: bool (optional)
+        :param edition: str (optional)
+          Pipeline product edition.
+        :param filters: :class:`Filters` (optional)
+          Filters on which Pipeline packages to include in the deployed graph.
+        :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
+          The definition of a gateway pipeline to support change data capture.
+        :param id: str (optional)
+          Unique identifier for this pipeline.
+        :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
+          The configuration for a managed ingestion pipeline. These settings cannot be used with the
+          'libraries', 'target' or 'catalog' settings.
+        :param libraries: List[:class:`PipelineLibrary`] (optional)
+          Libraries or code needed by this deployment.
+        :param name: str (optional)
+          Friendly identifier for this pipeline.
+        :param notifications: List[:class:`Notifications`] (optional)
+          List of notification settings for this pipeline.
+        :param photon: bool (optional)
+          Whether Photon is enabled for this pipeline.
+        :param restart_window: :class:`RestartWindow` (optional)
+          Restart window of this pipeline.
+        :param run_as: :class:`RunAs` (optional)
+          Write-only setting, available only in Create/Update calls. Specifies the user or service principal
+          that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
+          
+          Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
+          thrown.
+        :param schema: str (optional)
+          The default schema (database) where tables are read from or published to. The presence of this field
+          implies that the pipeline is in direct publishing mode.
+        :param serverless: bool (optional)
+          Whether serverless compute is enabled for this pipeline.
+        :param storage: str (optional)
+          DBFS root directory for storing checkpoints and tables.
+        :param target: str (optional)
+          Target schema (database) to add tables in this pipeline to. If not specified, no data is published
+          to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify `catalog`.
+        :param trigger: :class:`PipelineTrigger` (optional)
+          Which pipeline trigger to use. Deprecated: Use `continuous` instead.
+        
+        :returns: :class:`CreatePipelineResponse`
+        
 
     .. py:method:: delete(pipeline_id: str)
 
         Delete a pipeline.
-
-Deletes a pipeline.
-
-:param pipeline_id: str
-
-
-
+        
+        Deletes a pipeline.
+        
+        :param pipeline_id: str
+        
+        
+        
 
     .. py:method:: get(pipeline_id: str) -> GetPipelineResponse
 
@@ -164,49 +164,49 @@ Deletes a pipeline.
             w.pipelines.delete(pipeline_id=created.pipeline_id)
 
         Get a pipeline.
-
-:param pipeline_id: str
-
-:returns: :class:`GetPipelineResponse`
-
+        
+        :param pipeline_id: str
+        
+        :returns: :class:`GetPipelineResponse`
+        
 
     .. py:method:: get_permission_levels(pipeline_id: str) -> GetPipelinePermissionLevelsResponse
 
         Get pipeline permission levels.
-
-Gets the permission levels that a user can have on an object.
-
-:param pipeline_id: str
-  The pipeline for which to get or manage permissions.
-
-:returns: :class:`GetPipelinePermissionLevelsResponse`
-
+        
+        Gets the permission levels that a user can have on an object.
+        
+        :param pipeline_id: str
+          The pipeline for which to get or manage permissions.
+        
+        :returns: :class:`GetPipelinePermissionLevelsResponse`
+        
 
     .. py:method:: get_permissions(pipeline_id: str) -> PipelinePermissions
 
         Get pipeline permissions.
-
-Gets the permissions of a pipeline. Pipelines can inherit permissions from their root object.
-
-:param pipeline_id: str
-  The pipeline for which to get or manage permissions.
-
-:returns: :class:`PipelinePermissions`
-
+        
+        Gets the permissions of a pipeline. Pipelines can inherit permissions from their root object.
+        
+        :param pipeline_id: str
+          The pipeline for which to get or manage permissions.
+        
+        :returns: :class:`PipelinePermissions`
+        
 
     .. py:method:: get_update(pipeline_id: str, update_id: str) -> GetUpdateResponse
 
         Get a pipeline update.
-
-Gets an update from an active pipeline.
-
-:param pipeline_id: str
-  The ID of the pipeline.
-:param update_id: str
-  The ID of the update.
-
-:returns: :class:`GetUpdateResponse`
-
+        
+        Gets an update from an active pipeline.
+        
+        :param pipeline_id: str
+          The ID of the pipeline.
+        :param update_id: str
+          The ID of the update.
+        
+        :returns: :class:`GetUpdateResponse`
+        
 
     .. py:method:: list_pipeline_events(pipeline_id: str [, filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str]]) -> Iterator[PipelineEvent]
 
@@ -244,31 +244,31 @@ Gets an update from an active pipeline.
             w.pipelines.delete(pipeline_id=created.pipeline_id)
 
         List pipeline events.
-
-Retrieves events for a pipeline.
-
-:param pipeline_id: str
-:param filter: str (optional)
-  Criteria to select a subset of results, expressed using a SQL-like syntax. The supported filters
-  are: 1. level='INFO' (or WARN or ERROR) 2. level in ('INFO', 'WARN') 3. id='[event-id]' 4. timestamp
-  > 'TIMESTAMP' (or >=,<,<=,=)
-  
-  Composite expressions are supported, for example: level in ('ERROR', 'WARN') AND timestamp>
-  '2021-07-22T06:37:33.083Z'
-:param max_results: int (optional)
-  Max number of entries to return in a single page. The system may return fewer than max_results
-  events in a response, even if there are more events available.
-:param order_by: List[str] (optional)
-  A string indicating a sort order by timestamp for the results, for example, ["timestamp asc"]. The
-  sort order can be ascending or descending. By default, events are returned in descending order by
-  timestamp.
-:param page_token: str (optional)
-  Page token returned by previous call. This field is mutually exclusive with all fields in this
-  request except max_results. An error is returned if any fields other than max_results are set when
-  this field is set.
-
-:returns: Iterator over :class:`PipelineEvent`
-
+        
+        Retrieves events for a pipeline.
+        
+        :param pipeline_id: str
+        :param filter: str (optional)
+          Criteria to select a subset of results, expressed using a SQL-like syntax. The supported filters
+          are: 1. level='INFO' (or WARN or ERROR) 2. level in ('INFO', 'WARN') 3. id='[event-id]' 4. timestamp
+          > 'TIMESTAMP' (or >=,<,<=,=)
+          
+          Composite expressions are supported, for example: level in ('ERROR', 'WARN') AND timestamp>
+          '2021-07-22T06:37:33.083Z'
+        :param max_results: int (optional)
+          Max number of entries to return in a single page. The system may return fewer than max_results
+          events in a response, even if there are more events available.
+        :param order_by: List[str] (optional)
+          A string indicating a sort order by timestamp for the results, for example, ["timestamp asc"]. The
+          sort order can be ascending or descending. By default, events are returned in descending order by
+          timestamp.
+        :param page_token: str (optional)
+          Page token returned by previous call. This field is mutually exclusive with all fields in this
+          request except max_results. An error is returned if any fields other than max_results are set when
+          this field is set.
+        
+        :returns: Iterator over :class:`PipelineEvent`
+        
 
     .. py:method:: list_pipelines( [, filter: Optional[str], max_results: Optional[int], order_by: Optional[List[str]], page_token: Optional[str]]) -> Iterator[PipelineStateInfo]
 
@@ -285,102 +285,102 @@ Retrieves events for a pipeline.
             all = w.pipelines.list_pipelines(pipelines.ListPipelinesRequest())
 
         List pipelines.
-
-Lists pipelines defined in the Delta Live Tables system.
-
-:param filter: str (optional)
-  Select a subset of results based on the specified criteria. The supported filters are:
-  
-  * `notebook=''` to select pipelines that reference the provided notebook path. * `name LIKE
-  '[pattern]'` to select pipelines with a name that matches pattern. Wildcards are supported, for
-  example: `name LIKE '%shopping%'`
-  
-  Composite filters are not supported. This field is optional.
-:param max_results: int (optional)
-  The maximum number of entries to return in a single page. The system may return fewer than
-  max_results events in a response, even if there are more events available. This field is optional.
-  The default value is 25. The maximum value is 100. An error is returned if the value of max_results
-  is greater than 100.
-:param order_by: List[str] (optional)
-  A list of strings specifying the order of results. Supported order_by fields are id and name. The
-  default is id asc. This field is optional.
-:param page_token: str (optional)
-  Page token returned by previous call
-
-:returns: Iterator over :class:`PipelineStateInfo`
-
+        
+        Lists pipelines defined in the Delta Live Tables system.
+        
+        :param filter: str (optional)
+          Select a subset of results based on the specified criteria. The supported filters are:
+          
+          * `notebook=''` to select pipelines that reference the provided notebook path. * `name LIKE
+          '[pattern]'` to select pipelines with a name that matches pattern. Wildcards are supported, for
+          example: `name LIKE '%shopping%'`
+          
+          Composite filters are not supported. This field is optional.
+        :param max_results: int (optional)
+          The maximum number of entries to return in a single page. The system may return fewer than
+          max_results events in a response, even if there are more events available. This field is optional.
+          The default value is 25. The maximum value is 100. An error is returned if the value of max_results
+          is greater than 100.
+        :param order_by: List[str] (optional)
+          A list of strings specifying the order of results. Supported order_by fields are id and name. The
+          default is id asc. This field is optional.
+        :param page_token: str (optional)
+          Page token returned by previous call
+        
+        :returns: Iterator over :class:`PipelineStateInfo`
+        
 
     .. py:method:: list_updates(pipeline_id: str [, max_results: Optional[int], page_token: Optional[str], until_update_id: Optional[str]]) -> ListUpdatesResponse
 
         List pipeline updates.
-
-List updates for an active pipeline.
-
-:param pipeline_id: str
-  The pipeline to return updates for.
-:param max_results: int (optional)
-  Max number of entries to return in a single page.
-:param page_token: str (optional)
-  Page token returned by previous call
-:param until_update_id: str (optional)
-  If present, returns updates until and including this update_id.
-
-:returns: :class:`ListUpdatesResponse`
-
+        
+        List updates for an active pipeline.
+        
+        :param pipeline_id: str
+          The pipeline to return updates for.
+        :param max_results: int (optional)
+          Max number of entries to return in a single page.
+        :param page_token: str (optional)
+          Page token returned by previous call
+        :param until_update_id: str (optional)
+          If present, returns updates until and including this update_id.
+        
+        :returns: :class:`ListUpdatesResponse`
+        
 
     .. py:method:: set_permissions(pipeline_id: str [, access_control_list: Optional[List[PipelineAccessControlRequest]]]) -> PipelinePermissions
 
         Set pipeline permissions.
-
-Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-permissions if none are specified. Objects can inherit permissions from their root object.
-
-:param pipeline_id: str
-  The pipeline for which to get or manage permissions.
-:param access_control_list: List[:class:`PipelineAccessControlRequest`] (optional)
-
-:returns: :class:`PipelinePermissions`
-
+        
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
+        
+        :param pipeline_id: str
+          The pipeline for which to get or manage permissions.
+        :param access_control_list: List[:class:`PipelineAccessControlRequest`] (optional)
+        
+        :returns: :class:`PipelinePermissions`
+        
 
     .. py:method:: start_update(pipeline_id: str [, cause: Optional[StartUpdateCause], full_refresh: Optional[bool], full_refresh_selection: Optional[List[str]], refresh_selection: Optional[List[str]], validate_only: Optional[bool]]) -> StartUpdateResponse
 
         Start a pipeline.
-
-Starts a new update for the pipeline. If there is already an active update for the pipeline, the
-request will fail and the active update will remain running.
-
-:param pipeline_id: str
-:param cause: :class:`StartUpdateCause` (optional)
-:param full_refresh: bool (optional)
-  If true, this update will reset all tables before running.
-:param full_refresh_selection: List[str] (optional)
-  A list of tables to update with fullRefresh. If both refresh_selection and full_refresh_selection
-  are empty, this is a full graph update. Full Refresh on a table means that the states of the table
-  will be reset before the refresh.
-:param refresh_selection: List[str] (optional)
-  A list of tables to update without fullRefresh. If both refresh_selection and full_refresh_selection
-  are empty, this is a full graph update. Full Refresh on a table means that the states of the table
-  will be reset before the refresh.
-:param validate_only: bool (optional)
-  If true, this update only validates the correctness of pipeline source code but does not materialize
-  or publish any datasets.
-
-:returns: :class:`StartUpdateResponse`
-
+        
+        Starts a new update for the pipeline. If there is already an active update for the pipeline, the
+        request will fail and the active update will remain running.
+        
+        :param pipeline_id: str
+        :param cause: :class:`StartUpdateCause` (optional)
+        :param full_refresh: bool (optional)
+          If true, this update will reset all tables before running.
+        :param full_refresh_selection: List[str] (optional)
+          A list of tables to update with fullRefresh. If both refresh_selection and full_refresh_selection
+          are empty, this is a full graph update. Full Refresh on a table means that the states of the table
+          will be reset before the refresh.
+        :param refresh_selection: List[str] (optional)
+          A list of tables to update without fullRefresh. If both refresh_selection and full_refresh_selection
+          are empty, this is a full graph update. Full Refresh on a table means that the states of the table
+          will be reset before the refresh.
+        :param validate_only: bool (optional)
+          If true, this update only validates the correctness of pipeline source code but does not materialize
+          or publish any datasets.
+        
+        :returns: :class:`StartUpdateResponse`
+        
 
     .. py:method:: stop(pipeline_id: str) -> Wait[GetPipelineResponse]
 
         Stop a pipeline.
-
-Stops the pipeline by canceling the active update. If there is no active update for the pipeline, this
-request is a no-op.
-
-:param pipeline_id: str
-
-:returns:
-  Long-running operation waiter for :class:`GetPipelineResponse`.
-  See :method:wait_get_pipeline_idle for more details.
-
+        
+        Stops the pipeline by canceling the active update. If there is no active update for the pipeline, this
+        request is a no-op.
+        
+        :param pipeline_id: str
+        
+        :returns:
+          Long-running operation waiter for :class:`GetPipelineResponse`.
+          See :method:wait_get_pipeline_idle for more details.
+        
 
     .. py:method:: stop_and_wait(pipeline_id: str, timeout: datetime.timedelta = 0:20:00) -> GetPipelineResponse
 
@@ -432,89 +432,89 @@ request is a no-op.
             w.pipelines.delete(pipeline_id=created.pipeline_id)
 
         Edit a pipeline.
-
-Updates a pipeline with the supplied configuration.
-
-:param pipeline_id: str
-  Unique identifier for this pipeline.
-:param allow_duplicate_names: bool (optional)
-  If false, deployment will fail if name has changed and conflicts the name of another pipeline.
-:param budget_policy_id: str (optional)
-  Budget policy of this pipeline.
-:param catalog: str (optional)
-  A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables
-  in this pipeline are published to a `target` schema inside `catalog` (for example,
-  `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity Catalog.
-:param channel: str (optional)
-  DLT Release Channel that specifies which version to use.
-:param clusters: List[:class:`PipelineCluster`] (optional)
-  Cluster settings for this pipeline deployment.
-:param configuration: Dict[str,str] (optional)
-  String-String configuration for this pipeline execution.
-:param continuous: bool (optional)
-  Whether the pipeline is continuous or triggered. This replaces `trigger`.
-:param deployment: :class:`PipelineDeployment` (optional)
-  Deployment type of this pipeline.
-:param development: bool (optional)
-  Whether the pipeline is in Development mode. Defaults to false.
-:param edition: str (optional)
-  Pipeline product edition.
-:param expected_last_modified: int (optional)
-  If present, the last-modified time of the pipeline settings before the edit. If the settings were
-  modified after that time, then the request will fail with a conflict.
-:param filters: :class:`Filters` (optional)
-  Filters on which Pipeline packages to include in the deployed graph.
-:param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
-  The definition of a gateway pipeline to support change data capture.
-:param id: str (optional)
-  Unique identifier for this pipeline.
-:param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
-  The configuration for a managed ingestion pipeline. These settings cannot be used with the
-  'libraries', 'target' or 'catalog' settings.
-:param libraries: List[:class:`PipelineLibrary`] (optional)
-  Libraries or code needed by this deployment.
-:param name: str (optional)
-  Friendly identifier for this pipeline.
-:param notifications: List[:class:`Notifications`] (optional)
-  List of notification settings for this pipeline.
-:param photon: bool (optional)
-  Whether Photon is enabled for this pipeline.
-:param restart_window: :class:`RestartWindow` (optional)
-  Restart window of this pipeline.
-:param run_as: :class:`RunAs` (optional)
-  Write-only setting, available only in Create/Update calls. Specifies the user or service principal
-  that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
-  
-  Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
-  thrown.
-:param schema: str (optional)
-  The default schema (database) where tables are read from or published to. The presence of this field
-  implies that the pipeline is in direct publishing mode.
-:param serverless: bool (optional)
-  Whether serverless compute is enabled for this pipeline.
-:param storage: str (optional)
-  DBFS root directory for storing checkpoints and tables.
-:param target: str (optional)
-  Target schema (database) to add tables in this pipeline to. If not specified, no data is published
-  to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify `catalog`.
-:param trigger: :class:`PipelineTrigger` (optional)
-  Which pipeline trigger to use. Deprecated: Use `continuous` instead.
-
-
-
+        
+        Updates a pipeline with the supplied configuration.
+        
+        :param pipeline_id: str
+          Unique identifier for this pipeline.
+        :param allow_duplicate_names: bool (optional)
+          If false, deployment will fail if name has changed and conflicts the name of another pipeline.
+        :param budget_policy_id: str (optional)
+          Budget policy of this pipeline.
+        :param catalog: str (optional)
+          A catalog in Unity Catalog to publish data from this pipeline to. If `target` is specified, tables
+          in this pipeline are published to a `target` schema inside `catalog` (for example,
+          `catalog`.`target`.`table`). If `target` is not specified, no data is published to Unity Catalog.
+        :param channel: str (optional)
+          DLT Release Channel that specifies which version to use.
+        :param clusters: List[:class:`PipelineCluster`] (optional)
+          Cluster settings for this pipeline deployment.
+        :param configuration: Dict[str,str] (optional)
+          String-String configuration for this pipeline execution.
+        :param continuous: bool (optional)
+          Whether the pipeline is continuous or triggered. This replaces `trigger`.
+        :param deployment: :class:`PipelineDeployment` (optional)
+          Deployment type of this pipeline.
+        :param development: bool (optional)
+          Whether the pipeline is in Development mode. Defaults to false.
+        :param edition: str (optional)
+          Pipeline product edition.
+        :param expected_last_modified: int (optional)
+          If present, the last-modified time of the pipeline settings before the edit. If the settings were
+          modified after that time, then the request will fail with a conflict.
+        :param filters: :class:`Filters` (optional)
+          Filters on which Pipeline packages to include in the deployed graph.
+        :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
+          The definition of a gateway pipeline to support change data capture.
+        :param id: str (optional)
+          Unique identifier for this pipeline.
+        :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
+          The configuration for a managed ingestion pipeline. These settings cannot be used with the
+          'libraries', 'target' or 'catalog' settings.
+        :param libraries: List[:class:`PipelineLibrary`] (optional)
+          Libraries or code needed by this deployment.
+        :param name: str (optional)
+          Friendly identifier for this pipeline.
+        :param notifications: List[:class:`Notifications`] (optional)
+          List of notification settings for this pipeline.
+        :param photon: bool (optional)
+          Whether Photon is enabled for this pipeline.
+        :param restart_window: :class:`RestartWindow` (optional)
+          Restart window of this pipeline.
+        :param run_as: :class:`RunAs` (optional)
+          Write-only setting, available only in Create/Update calls. Specifies the user or service principal
+          that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
+          
+          Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
+          thrown.
+        :param schema: str (optional)
+          The default schema (database) where tables are read from or published to. The presence of this field
+          implies that the pipeline is in direct publishing mode.
+        :param serverless: bool (optional)
+          Whether serverless compute is enabled for this pipeline.
+        :param storage: str (optional)
+          DBFS root directory for storing checkpoints and tables.
+        :param target: str (optional)
+          Target schema (database) to add tables in this pipeline to. If not specified, no data is published
+          to the Hive metastore or Unity Catalog. To publish to Unity Catalog, also specify `catalog`.
+        :param trigger: :class:`PipelineTrigger` (optional)
+          Which pipeline trigger to use. Deprecated: Use `continuous` instead.
+        
+        
+        
 
     .. py:method:: update_permissions(pipeline_id: str [, access_control_list: Optional[List[PipelineAccessControlRequest]]]) -> PipelinePermissions
 
         Update pipeline permissions.
-
-Updates the permissions on a pipeline. Pipelines can inherit permissions from their root object.
-
-:param pipeline_id: str
-  The pipeline for which to get or manage permissions.
-:param access_control_list: List[:class:`PipelineAccessControlRequest`] (optional)
-
-:returns: :class:`PipelinePermissions`
-
+        
+        Updates the permissions on a pipeline. Pipelines can inherit permissions from their root object.
+        
+        :param pipeline_id: str
+          The pipeline for which to get or manage permissions.
+        :param access_control_list: List[:class:`PipelineAccessControlRequest`] (optional)
+        
+        :returns: :class:`PipelinePermissions`
+        
 
     .. py:method:: wait_get_pipeline_idle(pipeline_id: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[GetPipelineResponse], None]]) -> GetPipelineResponse
 
diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst
index ce88af34b..687976f5d 100644
--- a/docs/workspace/serving/serving_endpoints.rst
+++ b/docs/workspace/serving/serving_endpoints.rst
@@ -5,54 +5,54 @@
 .. py:class:: ServingEndpointsExt
 
     The Serving Endpoints API allows you to create, update, and delete model serving endpoints.
-
-You can use a serving endpoint to serve models from the Databricks Model Registry or from Unity Catalog.
-Endpoints expose the underlying models as scalable REST API endpoints using serverless compute. This means
-the endpoints and associated compute resources are fully managed by Databricks and will not appear in your
-cloud account. A serving endpoint can consist of one or more MLflow models from the Databricks Model
-Registry, called served entities. A serving endpoint can have at most ten served entities. You can
-configure traffic settings to define how requests should be routed to your served entities behind an
-endpoint. Additionally, you can configure the scale of resources that should be applied to each served
-entity.
+    
+    You can use a serving endpoint to serve models from the Databricks Model Registry or from Unity Catalog.
+    Endpoints expose the underlying models as scalable REST API endpoints using serverless compute. This means
+    the endpoints and associated compute resources are fully managed by Databricks and will not appear in your
+    cloud account. A serving endpoint can consist of one or more MLflow models from the Databricks Model
+    Registry, called served entities. A serving endpoint can have at most ten served entities. You can
+    configure traffic settings to define how requests should be routed to your served entities behind an
+    endpoint. Additionally, you can configure the scale of resources that should be applied to each served
+    entity.
 
     .. py:method:: build_logs(name: str, served_model_name: str) -> BuildLogsResponse
 
         Get build logs for a served model.
-
-Retrieves the build logs associated with the provided served model.
-
-:param name: str
-  The name of the serving endpoint that the served model belongs to. This field is required.
-:param served_model_name: str
-  The name of the served model that build logs will be retrieved for. This field is required.
-
-:returns: :class:`BuildLogsResponse`
-
+        
+        Retrieves the build logs associated with the provided served model.
+        
+        :param name: str
+          The name of the serving endpoint that the served model belongs to. This field is required.
+        :param served_model_name: str
+          The name of the served model that build logs will be retrieved for. This field is required.
+        
+        :returns: :class:`BuildLogsResponse`
+        
 
     .. py:method:: create(name: str [, ai_gateway: Optional[AiGatewayConfig], config: Optional[EndpointCoreConfigInput], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]]]) -> Wait[ServingEndpointDetailed]
 
         Create a new serving endpoint.
-
-:param name: str
-  The name of the serving endpoint. This field is required and must be unique across a Databricks
-  workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.
-:param ai_gateway: :class:`AiGatewayConfig` (optional)
-  The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned
-  throughput endpoints are currently supported.
-:param config: :class:`EndpointCoreConfigInput` (optional)
-  The core config of the serving endpoint.
-:param rate_limits: List[:class:`RateLimit`] (optional)
-  Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
-  Gateway to manage rate limits.
-:param route_optimized: bool (optional)
-  Enable route optimization for the serving endpoint.
-:param tags: List[:class:`EndpointTag`] (optional)
-  Tags to be attached to the serving endpoint and automatically propagated to billing logs.
-
-:returns:
-  Long-running operation waiter for :class:`ServingEndpointDetailed`.
-  See :method:wait_get_serving_endpoint_not_updating for more details.
-
+        
+        :param name: str
+          The name of the serving endpoint. This field is required and must be unique across a Databricks
+          workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.
+        :param ai_gateway: :class:`AiGatewayConfig` (optional)
+          The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned
+          throughput endpoints are currently supported.
+        :param config: :class:`EndpointCoreConfigInput` (optional)
+          The core config of the serving endpoint.
+        :param rate_limits: List[:class:`RateLimit`] (optional)
+          Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
+          Gateway to manage rate limits.
+        :param route_optimized: bool (optional)
+          Enable route optimization for the serving endpoint.
+        :param tags: List[:class:`EndpointTag`] (optional)
+          Tags to be attached to the serving endpoint and automatically propagated to billing logs.
+        
+        :returns:
+          Long-running operation waiter for :class:`ServingEndpointDetailed`.
+          See :method:wait_get_serving_endpoint_not_updating for more details.
+        
 
     .. py:method:: create_and_wait(name: str [, ai_gateway: Optional[AiGatewayConfig], config: Optional[EndpointCoreConfigInput], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]], timeout: datetime.timedelta = 0:20:00]) -> ServingEndpointDetailed
 
@@ -60,36 +60,36 @@ Retrieves the build logs associated with the provided served model.
     .. py:method:: delete(name: str)
 
         Delete a serving endpoint.
-
-:param name: str
-
-
-
+        
+        :param name: str
+        
+        
+        
 
     .. py:method:: export_metrics(name: str) -> ExportMetricsResponse
 
         Get metrics of a serving endpoint.
-
-Retrieves the metrics associated with the provided serving endpoint in either Prometheus or
-OpenMetrics exposition format.
-
-:param name: str
-  The name of the serving endpoint to retrieve metrics for. This field is required.
-
-:returns: :class:`ExportMetricsResponse`
-
+        
+        Retrieves the metrics associated with the provided serving endpoint in either Prometheus or
+        OpenMetrics exposition format.
+        
+        :param name: str
+          The name of the serving endpoint to retrieve metrics for. This field is required.
+        
+        :returns: :class:`ExportMetricsResponse`
+        
 
     .. py:method:: get(name: str) -> ServingEndpointDetailed
 
         Get a single serving endpoint.
-
-Retrieves the details for a single serving endpoint.
-
-:param name: str
-  The name of the serving endpoint. This field is required.
-
-:returns: :class:`ServingEndpointDetailed`
-
+        
+        Retrieves the details for a single serving endpoint.
+        
+        :param name: str
+          The name of the serving endpoint. This field is required.
+        
+        :returns: :class:`ServingEndpointDetailed`
+        
 
     .. py:method:: get_langchain_chat_open_ai_client(model)
 
@@ -100,228 +100,228 @@ Retrieves the details for a single serving endpoint.
     .. py:method:: get_open_api(name: str) -> GetOpenApiResponse
 
         Get the schema for a serving endpoint.
-
-Get the query schema of the serving endpoint in OpenAPI format. The schema contains information for
-the supported paths, input and output format and datatypes.
-
-:param name: str
-  The name of the serving endpoint that the served model belongs to. This field is required.
-
-:returns: :class:`GetOpenApiResponse`
-
+        
+        Get the query schema of the serving endpoint in OpenAPI format. The schema contains information for
+        the supported paths, input and output format and datatypes.
+        
+        :param name: str
+          The name of the serving endpoint that the served model belongs to. This field is required.
+        
+        :returns: :class:`GetOpenApiResponse`
+        
 
     .. py:method:: get_permission_levels(serving_endpoint_id: str) -> GetServingEndpointPermissionLevelsResponse
 
         Get serving endpoint permission levels.
-
-Gets the permission levels that a user can have on an object.
-
-:param serving_endpoint_id: str
-  The serving endpoint for which to get or manage permissions.
-
-:returns: :class:`GetServingEndpointPermissionLevelsResponse`
-
+        
+        Gets the permission levels that a user can have on an object.
+        
+        :param serving_endpoint_id: str
+          The serving endpoint for which to get or manage permissions.
+        
+        :returns: :class:`GetServingEndpointPermissionLevelsResponse`
+        
 
     .. py:method:: get_permissions(serving_endpoint_id: str) -> ServingEndpointPermissions
 
         Get serving endpoint permissions.
-
-Gets the permissions of a serving endpoint. Serving endpoints can inherit permissions from their root
-object.
-
-:param serving_endpoint_id: str
-  The serving endpoint for which to get or manage permissions.
-
-:returns: :class:`ServingEndpointPermissions`
-
+        
+        Gets the permissions of a serving endpoint. Serving endpoints can inherit permissions from their root
+        object.
+        
+        :param serving_endpoint_id: str
+          The serving endpoint for which to get or manage permissions.
+        
+        :returns: :class:`ServingEndpointPermissions`
+        
 
     .. py:method:: http_request(conn: str, method: ExternalFunctionRequestHttpMethod, path: str [, headers: typing.Dict[str, str], json: typing.Dict[str, str], params: typing.Dict[str, str]]) -> Response
 
         Make external services call using the credentials stored in UC Connection.
-**NOTE:** Experimental: This API may change or be removed in a future release without warning.
-:param conn: str
-  The connection name to use. This is required to identify the external connection.
-:param method: :class:`ExternalFunctionRequestHttpMethod`
-  The HTTP method to use (e.g., 'GET', 'POST'). This is required.
-:param path: str
-  The relative path for the API endpoint. This is required.
-:param headers: Dict[str,str] (optional)
-  Additional headers for the request. If not provided, only auth headers from connections would be
-  passed.
-:param json: Dict[str,str] (optional)
-  JSON payload for the request.
-:param params: Dict[str,str] (optional)
-  Query parameters for the request.
-:returns: :class:`Response`
-
+        **NOTE:** Experimental: This API may change or be removed in a future release without warning.
+        :param conn: str
+          The connection name to use. This is required to identify the external connection.
+        :param method: :class:`ExternalFunctionRequestHttpMethod`
+          The HTTP method to use (e.g., 'GET', 'POST'). This is required.
+        :param path: str
+          The relative path for the API endpoint. This is required.
+        :param headers: Dict[str,str] (optional)
+          Additional headers for the request. If not provided, only auth headers from connections would be
+          passed.
+        :param json: Dict[str,str] (optional)
+          JSON payload for the request.
+        :param params: Dict[str,str] (optional)
+          Query parameters for the request.
+        :returns: :class:`Response`
+        
 
     .. py:method:: list() -> Iterator[ServingEndpoint]
 
         Get all serving endpoints.
-
-:returns: Iterator over :class:`ServingEndpoint`
-
+        
+        :returns: Iterator over :class:`ServingEndpoint`
+        
 
     .. py:method:: logs(name: str, served_model_name: str) -> ServerLogsResponse
 
         Get the latest logs for a served model.
-
-Retrieves the service logs associated with the provided served model.
-
-:param name: str
-  The name of the serving endpoint that the served model belongs to. This field is required.
-:param served_model_name: str
-  The name of the served model that logs will be retrieved for. This field is required.
-
-:returns: :class:`ServerLogsResponse`
-
+        
+        Retrieves the service logs associated with the provided served model.
+        
+        :param name: str
+          The name of the serving endpoint that the served model belongs to. This field is required.
+        :param served_model_name: str
+          The name of the served model that logs will be retrieved for. This field is required.
+        
+        :returns: :class:`ServerLogsResponse`
+        
 
     .. py:method:: patch(name: str [, add_tags: Optional[List[EndpointTag]], delete_tags: Optional[List[str]]]) -> EndpointTags
 
         Update tags of a serving endpoint.
-
-Used to batch add and delete tags from a serving endpoint with a single API call.
-
-:param name: str
-  The name of the serving endpoint who's tags to patch. This field is required.
-:param add_tags: List[:class:`EndpointTag`] (optional)
-  List of endpoint tags to add
-:param delete_tags: List[str] (optional)
-  List of tag keys to delete
-
-:returns: :class:`EndpointTags`
-
+        
+        Used to batch add and delete tags from a serving endpoint with a single API call.
+        
+        :param name: str
+          The name of the serving endpoint who's tags to patch. This field is required.
+        :param add_tags: List[:class:`EndpointTag`] (optional)
+          List of endpoint tags to add
+        :param delete_tags: List[str] (optional)
+          List of tag keys to delete
+        
+        :returns: :class:`EndpointTags`
+        
 
     .. py:method:: put(name: str [, rate_limits: Optional[List[RateLimit]]]) -> PutResponse
 
         Update rate limits of a serving endpoint.
-
-Used to update the rate limits of a serving endpoint. NOTE: Only foundation model endpoints are
-currently supported. For external models, use AI Gateway to manage rate limits.
-
-:param name: str
-  The name of the serving endpoint whose rate limits are being updated. This field is required.
-:param rate_limits: List[:class:`RateLimit`] (optional)
-  The list of endpoint rate limits.
-
-:returns: :class:`PutResponse`
-
+        
+        Used to update the rate limits of a serving endpoint. NOTE: Only foundation model endpoints are
+        currently supported. For external models, use AI Gateway to manage rate limits.
+        
+        :param name: str
+          The name of the serving endpoint whose rate limits are being updated. This field is required.
+        :param rate_limits: List[:class:`RateLimit`] (optional)
+          The list of endpoint rate limits.
+        
+        :returns: :class:`PutResponse`
+        
 
     .. py:method:: put_ai_gateway(name: str [, guardrails: Optional[AiGatewayGuardrails], inference_table_config: Optional[AiGatewayInferenceTableConfig], rate_limits: Optional[List[AiGatewayRateLimit]], usage_tracking_config: Optional[AiGatewayUsageTrackingConfig]]) -> PutAiGatewayResponse
 
         Update AI Gateway of a serving endpoint.
-
-Used to update the AI Gateway of a serving endpoint. NOTE: Only external model and provisioned
-throughput endpoints are currently supported.
-
-:param name: str
-  The name of the serving endpoint whose AI Gateway is being updated. This field is required.
-:param guardrails: :class:`AiGatewayGuardrails` (optional)
-  Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.
-:param inference_table_config: :class:`AiGatewayInferenceTableConfig` (optional)
-  Configuration for payload logging using inference tables. Use these tables to monitor and audit data
-  being sent to and received from model APIs and to improve model quality.
-:param rate_limits: List[:class:`AiGatewayRateLimit`] (optional)
-  Configuration for rate limits which can be set to limit endpoint traffic.
-:param usage_tracking_config: :class:`AiGatewayUsageTrackingConfig` (optional)
-  Configuration to enable usage tracking using system tables. These tables allow you to monitor
-  operational usage on endpoints and their associated costs.
-
-:returns: :class:`PutAiGatewayResponse`
-
+        
+        Used to update the AI Gateway of a serving endpoint. NOTE: Only external model and provisioned
+        throughput endpoints are currently supported.
+        
+        :param name: str
+          The name of the serving endpoint whose AI Gateway is being updated. This field is required.
+        :param guardrails: :class:`AiGatewayGuardrails` (optional)
+          Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and responses.
+        :param inference_table_config: :class:`AiGatewayInferenceTableConfig` (optional)
+          Configuration for payload logging using inference tables. Use these tables to monitor and audit data
+          being sent to and received from model APIs and to improve model quality.
+        :param rate_limits: List[:class:`AiGatewayRateLimit`] (optional)
+          Configuration for rate limits which can be set to limit endpoint traffic.
+        :param usage_tracking_config: :class:`AiGatewayUsageTrackingConfig` (optional)
+          Configuration to enable usage tracking using system tables. These tables allow you to monitor
+          operational usage on endpoints and their associated costs.
+        
+        :returns: :class:`PutAiGatewayResponse`
+        
 
     .. py:method:: query(name: str [, dataframe_records: Optional[List[Any]], dataframe_split: Optional[DataframeSplitInput], extra_params: Optional[Dict[str, str]], input: Optional[Any], inputs: Optional[Any], instances: Optional[List[Any]], max_tokens: Optional[int], messages: Optional[List[ChatMessage]], n: Optional[int], prompt: Optional[Any], stop: Optional[List[str]], stream: Optional[bool], temperature: Optional[float]]) -> QueryEndpointResponse
 
         Query a serving endpoint.
-
-:param name: str
-  The name of the serving endpoint. This field is required.
-:param dataframe_records: List[Any] (optional)
-  Pandas Dataframe input in the records orientation.
-:param dataframe_split: :class:`DataframeSplitInput` (optional)
-  Pandas Dataframe input in the split orientation.
-:param extra_params: Dict[str,str] (optional)
-  The extra parameters field used ONLY for __completions, chat,__ and __embeddings external &
-  foundation model__ serving endpoints. This is a map of strings and should only be used with other
-  external/foundation model query fields.
-:param input: Any (optional)
-  The input string (or array of strings) field used ONLY for __embeddings external & foundation
-  model__ serving endpoints and is the only field (along with extra_params if needed) used by
-  embeddings queries.
-:param inputs: Any (optional)
-  Tensor-based input in columnar format.
-:param instances: List[Any] (optional)
-  Tensor-based input in row format.
-:param max_tokens: int (optional)
-  The max tokens field used ONLY for __completions__ and __chat external & foundation model__ serving
-  endpoints. This is an integer and should only be used with other chat/completions query fields.
-:param messages: List[:class:`ChatMessage`] (optional)
-  The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is a
-  map of strings and should only be used with other chat query fields.
-:param n: int (optional)
-  The n (number of candidates) field used ONLY for __completions__ and __chat external & foundation
-  model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and should only be
-  used with other chat/completions query fields.
-:param prompt: Any (optional)
-  The prompt string (or array of strings) field used ONLY for __completions external & foundation
-  model__ serving endpoints and should only be used with other completions query fields.
-:param stop: List[str] (optional)
-  The stop sequences field used ONLY for __completions__ and __chat external & foundation model__
-  serving endpoints. This is a list of strings and should only be used with other chat/completions
-  query fields.
-:param stream: bool (optional)
-  The stream field used ONLY for __completions__ and __chat external & foundation model__ serving
-  endpoints. This is a boolean defaulting to false and should only be used with other chat/completions
-  query fields.
-:param temperature: float (optional)
-  The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving
-  endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with
-  other chat/completions query fields.
-
-:returns: :class:`QueryEndpointResponse`
-
+        
+        :param name: str
+          The name of the serving endpoint. This field is required.
+        :param dataframe_records: List[Any] (optional)
+          Pandas Dataframe input in the records orientation.
+        :param dataframe_split: :class:`DataframeSplitInput` (optional)
+          Pandas Dataframe input in the split orientation.
+        :param extra_params: Dict[str,str] (optional)
+          The extra parameters field used ONLY for __completions, chat,__ and __embeddings external &
+          foundation model__ serving endpoints. This is a map of strings and should only be used with other
+          external/foundation model query fields.
+        :param input: Any (optional)
+          The input string (or array of strings) field used ONLY for __embeddings external & foundation
+          model__ serving endpoints and is the only field (along with extra_params if needed) used by
+          embeddings queries.
+        :param inputs: Any (optional)
+          Tensor-based input in columnar format.
+        :param instances: List[Any] (optional)
+          Tensor-based input in row format.
+        :param max_tokens: int (optional)
+          The max tokens field used ONLY for __completions__ and __chat external & foundation model__ serving
+          endpoints. This is an integer and should only be used with other chat/completions query fields.
+        :param messages: List[:class:`ChatMessage`] (optional)
+          The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is a
+          map of strings and should only be used with other chat query fields.
+        :param n: int (optional)
+          The n (number of candidates) field used ONLY for __completions__ and __chat external & foundation
+          model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and should only be
+          used with other chat/completions query fields.
+        :param prompt: Any (optional)
+          The prompt string (or array of strings) field used ONLY for __completions external & foundation
+          model__ serving endpoints and should only be used with other completions query fields.
+        :param stop: List[str] (optional)
+          The stop sequences field used ONLY for __completions__ and __chat external & foundation model__
+          serving endpoints. This is a list of strings and should only be used with other chat/completions
+          query fields.
+        :param stream: bool (optional)
+          The stream field used ONLY for __completions__ and __chat external & foundation model__ serving
+          endpoints. This is a boolean defaulting to false and should only be used with other chat/completions
+          query fields.
+        :param temperature: float (optional)
+          The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving
+          endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with
+          other chat/completions query fields.
+        
+        :returns: :class:`QueryEndpointResponse`
+        
 
     .. py:method:: set_permissions(serving_endpoint_id: str [, access_control_list: Optional[List[ServingEndpointAccessControlRequest]]]) -> ServingEndpointPermissions
 
         Set serving endpoint permissions.
-
-Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-permissions if none are specified. Objects can inherit permissions from their root object.
-
-:param serving_endpoint_id: str
-  The serving endpoint for which to get or manage permissions.
-:param access_control_list: List[:class:`ServingEndpointAccessControlRequest`] (optional)
-
-:returns: :class:`ServingEndpointPermissions`
-
+        
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
+        
+        :param serving_endpoint_id: str
+          The serving endpoint for which to get or manage permissions.
+        :param access_control_list: List[:class:`ServingEndpointAccessControlRequest`] (optional)
+        
+        :returns: :class:`ServingEndpointPermissions`
+        
 
     .. py:method:: update_config(name: str [, auto_capture_config: Optional[AutoCaptureConfigInput], served_entities: Optional[List[ServedEntityInput]], served_models: Optional[List[ServedModelInput]], traffic_config: Optional[TrafficConfig]]) -> Wait[ServingEndpointDetailed]
 
         Update config of a serving endpoint.
-
-Updates any combination of the serving endpoint's served entities, the compute configuration of those
-served entities, and the endpoint's traffic config. An endpoint that already has an update in progress
-can not be updated until the current update completes or fails.
-
-:param name: str
-  The name of the serving endpoint to update. This field is required.
-:param auto_capture_config: :class:`AutoCaptureConfigInput` (optional)
-  Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.
-  Note: this field is deprecated for creating new provisioned throughput endpoints, or updating
-  existing provisioned throughput endpoints that never have inference table configured; in these cases
-  please use AI Gateway to manage inference tables.
-:param served_entities: List[:class:`ServedEntityInput`] (optional)
-  The list of served entities under the serving endpoint config.
-:param served_models: List[:class:`ServedModelInput`] (optional)
-  (Deprecated, use served_entities instead) The list of served models under the serving endpoint
-  config.
-:param traffic_config: :class:`TrafficConfig` (optional)
-  The traffic configuration associated with the serving endpoint config.
-
-:returns:
-  Long-running operation waiter for :class:`ServingEndpointDetailed`.
-  See :method:wait_get_serving_endpoint_not_updating for more details.
-
+        
+        Updates any combination of the serving endpoint's served entities, the compute configuration of those
+        served entities, and the endpoint's traffic config. An endpoint that already has an update in progress
+        can not be updated until the current update completes or fails.
+        
+        :param name: str
+          The name of the serving endpoint to update. This field is required.
+        :param auto_capture_config: :class:`AutoCaptureConfigInput` (optional)
+          Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.
+          Note: this field is deprecated for creating new provisioned throughput endpoints, or updating
+          existing provisioned throughput endpoints that never have inference table configured; in these cases
+          please use AI Gateway to manage inference tables.
+        :param served_entities: List[:class:`ServedEntityInput`] (optional)
+          The list of served entities under the serving endpoint config.
+        :param served_models: List[:class:`ServedModelInput`] (optional)
+          (Deprecated, use served_entities instead) The list of served models under the serving endpoint
+          config.
+        :param traffic_config: :class:`TrafficConfig` (optional)
+          The traffic configuration associated with the serving endpoint config.
+        
+        :returns:
+          Long-running operation waiter for :class:`ServingEndpointDetailed`.
+          See :method:wait_get_serving_endpoint_not_updating for more details.
+        
 
     .. py:method:: update_config_and_wait(name: str [, auto_capture_config: Optional[AutoCaptureConfigInput], served_entities: Optional[List[ServedEntityInput]], served_models: Optional[List[ServedModelInput]], traffic_config: Optional[TrafficConfig], timeout: datetime.timedelta = 0:20:00]) -> ServingEndpointDetailed
 
@@ -329,15 +329,15 @@ can not be updated until the current update completes or fails.
     .. py:method:: update_permissions(serving_endpoint_id: str [, access_control_list: Optional[List[ServingEndpointAccessControlRequest]]]) -> ServingEndpointPermissions
 
         Update serving endpoint permissions.
-
-Updates the permissions on a serving endpoint. Serving endpoints can inherit permissions from their
-root object.
-
-:param serving_endpoint_id: str
-  The serving endpoint for which to get or manage permissions.
-:param access_control_list: List[:class:`ServingEndpointAccessControlRequest`] (optional)
-
-:returns: :class:`ServingEndpointPermissions`
-
+        
+        Updates the permissions on a serving endpoint. Serving endpoints can inherit permissions from their
+        root object.
+        
+        :param serving_endpoint_id: str
+          The serving endpoint for which to get or manage permissions.
+        :param access_control_list: List[:class:`ServingEndpointAccessControlRequest`] (optional)
+        
+        :returns: :class:`ServingEndpointPermissions`
+        
 
     .. py:method:: wait_get_serving_endpoint_not_updating(name: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[ServingEndpointDetailed], None]]) -> ServingEndpointDetailed
diff --git a/docs/workspace/serving/serving_endpoints_data_plane.rst b/docs/workspace/serving/serving_endpoints_data_plane.rst
index b3501b121..8fb09e7ff 100644
--- a/docs/workspace/serving/serving_endpoints_data_plane.rst
+++ b/docs/workspace/serving/serving_endpoints_data_plane.rst
@@ -5,54 +5,55 @@
 .. py:class:: ServingEndpointsDataPlaneAPI
 
     Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving
-endpoints service.
+    endpoints service.
 
     .. py:method:: query(name: str [, dataframe_records: Optional[List[Any]], dataframe_split: Optional[DataframeSplitInput], extra_params: Optional[Dict[str, str]], input: Optional[Any], inputs: Optional[Any], instances: Optional[List[Any]], max_tokens: Optional[int], messages: Optional[List[ChatMessage]], n: Optional[int], prompt: Optional[Any], stop: Optional[List[str]], stream: Optional[bool], temperature: Optional[float]]) -> QueryEndpointResponse
 
         Query a serving endpoint.
-
-:param name: str
-  The name of the serving endpoint. This field is required.
-:param dataframe_records: List[Any] (optional)
-  Pandas Dataframe input in the records orientation.
-:param dataframe_split: :class:`DataframeSplitInput` (optional)
-  Pandas Dataframe input in the split orientation.
-:param extra_params: Dict[str,str] (optional)
-  The extra parameters field used ONLY for __completions, chat,__ and __embeddings external &
-  foundation model__ serving endpoints. This is a map of strings and should only be used with other
-  external/foundation model query fields.
-:param input: Any (optional)
-  The input string (or array of strings) field used ONLY for __embeddings external & foundation
-  model__ serving endpoints and is the only field (along with extra_params if needed) used by
-  embeddings queries.
-:param inputs: Any (optional)
-  Tensor-based input in columnar format.
-:param instances: List[Any] (optional)
-  Tensor-based input in row format.
-:param max_tokens: int (optional)
-  The max tokens field used ONLY for __completions__ and __chat external & foundation model__ serving
-  endpoints. This is an integer and should only be used with other chat/completions query fields.
-:param messages: List[:class:`ChatMessage`] (optional)
-  The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is a
-  map of strings and should only be used with other chat query fields.
-:param n: int (optional)
-  The n (number of candidates) field used ONLY for __completions__ and __chat external & foundation
-  model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and should only be
-  used with other chat/completions query fields.
-:param prompt: Any (optional)
-  The prompt string (or array of strings) field used ONLY for __completions external & foundation
-  model__ serving endpoints and should only be used with other completions query fields.
-:param stop: List[str] (optional)
-  The stop sequences field used ONLY for __completions__ and __chat external & foundation model__
-  serving endpoints. This is a list of strings and should only be used with other chat/completions
-  query fields.
-:param stream: bool (optional)
-  The stream field used ONLY for __completions__ and __chat external & foundation model__ serving
-  endpoints. This is a boolean defaulting to false and should only be used with other chat/completions
-  query fields.
-:param temperature: float (optional)
-  The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving
-  endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with
-  other chat/completions query fields.
-
-:returns: :class:`QueryEndpointResponse`
+        
+        :param name: str
+          The name of the serving endpoint. This field is required.
+        :param dataframe_records: List[Any] (optional)
+          Pandas Dataframe input in the records orientation.
+        :param dataframe_split: :class:`DataframeSplitInput` (optional)
+          Pandas Dataframe input in the split orientation.
+        :param extra_params: Dict[str,str] (optional)
+          The extra parameters field used ONLY for __completions, chat,__ and __embeddings external &
+          foundation model__ serving endpoints. This is a map of strings and should only be used with other
+          external/foundation model query fields.
+        :param input: Any (optional)
+          The input string (or array of strings) field used ONLY for __embeddings external & foundation
+          model__ serving endpoints and is the only field (along with extra_params if needed) used by
+          embeddings queries.
+        :param inputs: Any (optional)
+          Tensor-based input in columnar format.
+        :param instances: List[Any] (optional)
+          Tensor-based input in row format.
+        :param max_tokens: int (optional)
+          The max tokens field used ONLY for __completions__ and __chat external & foundation model__ serving
+          endpoints. This is an integer and should only be used with other chat/completions query fields.
+        :param messages: List[:class:`ChatMessage`] (optional)
+          The messages field used ONLY for __chat external & foundation model__ serving endpoints. This is a
+          map of strings and should only be used with other chat query fields.
+        :param n: int (optional)
+          The n (number of candidates) field used ONLY for __completions__ and __chat external & foundation
+          model__ serving endpoints. This is an integer between 1 and 5 with a default of 1 and should only be
+          used with other chat/completions query fields.
+        :param prompt: Any (optional)
+          The prompt string (or array of strings) field used ONLY for __completions external & foundation
+          model__ serving endpoints and should only be used with other completions query fields.
+        :param stop: List[str] (optional)
+          The stop sequences field used ONLY for __completions__ and __chat external & foundation model__
+          serving endpoints. This is a list of strings and should only be used with other chat/completions
+          query fields.
+        :param stream: bool (optional)
+          The stream field used ONLY for __completions__ and __chat external & foundation model__ serving
+          endpoints. This is a boolean defaulting to false and should only be used with other chat/completions
+          query fields.
+        :param temperature: float (optional)
+          The temperature field used ONLY for __completions__ and __chat external & foundation model__ serving
+          endpoints. This is a float between 0.0 and 2.0 with a default of 1.0 and should only be used with
+          other chat/completions query fields.
+        
+        :returns: :class:`QueryEndpointResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst
index 587b94d11..66c621997 100644
--- a/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst
+++ b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst
@@ -5,59 +5,60 @@
 .. py:class:: AibiDashboardEmbeddingAccessPolicyAPI
 
     Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the
-workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS).
+    workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS).
 
     .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse
 
         Delete the AI/BI dashboard embedding access policy.
-
-Delete the AI/BI dashboard embedding access policy, reverting back to the default.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`DeleteAibiDashboardEmbeddingAccessPolicySettingResponse`
-
+        
+        Delete the AI/BI dashboard embedding access policy, reverting back to the default.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteAibiDashboardEmbeddingAccessPolicySettingResponse`
+        
 
     .. py:method:: get( [, etag: Optional[str]]) -> AibiDashboardEmbeddingAccessPolicySetting
 
         Retrieve the AI/BI dashboard embedding access policy.
-
-Retrieves the AI/BI dashboard embedding access policy. The default setting is ALLOW_APPROVED_DOMAINS,
-permitting AI/BI dashboards to be embedded on approved domains.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
-
+        
+        Retrieves the AI/BI dashboard embedding access policy. The default setting is ALLOW_APPROVED_DOMAINS,
+        permitting AI/BI dashboards to be embedded on approved domains.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
+        
 
     .. py:method:: update(allow_missing: bool, setting: AibiDashboardEmbeddingAccessPolicySetting, field_mask: str) -> AibiDashboardEmbeddingAccessPolicySetting
 
         Update the AI/BI dashboard embedding access policy.
-
-Updates the AI/BI dashboard embedding access policy at the workspace level.
-
-:param allow_missing: bool
-  This should always be set to true for Settings API. Added for AIP compliance.
-:param setting: :class:`AibiDashboardEmbeddingAccessPolicySetting`
-:param field_mask: str
-  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-  the entire collection field can be specified. Field names must exactly match the resource field
-  names.
-  
-  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-  changes in the future.
-
-:returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
+        
+        Updates the AI/BI dashboard embedding access policy at the workspace level.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`AibiDashboardEmbeddingAccessPolicySetting`
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst
index 53e9cdcca..0c9294130 100644
--- a/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst
+++ b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst
@@ -5,60 +5,61 @@
 .. py:class:: AibiDashboardEmbeddingApprovedDomainsAPI
 
     Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains list
-can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS.
+    can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS.
 
     .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse
 
         Delete AI/BI dashboard embedding approved domains.
-
-Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the default
-empty list.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse`
-
+        
+        Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the default
+        empty list.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse`
+        
 
     .. py:method:: get( [, etag: Optional[str]]) -> AibiDashboardEmbeddingApprovedDomainsSetting
 
         Retrieve the list of domains approved to host embedded AI/BI dashboards.
-
-Retrieves the list of domains approved to host embedded AI/BI dashboards.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
-
+        
+        Retrieves the list of domains approved to host embedded AI/BI dashboards.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
+        
 
     .. py:method:: update(allow_missing: bool, setting: AibiDashboardEmbeddingApprovedDomainsSetting, field_mask: str) -> AibiDashboardEmbeddingApprovedDomainsSetting
 
         Update the list of domains approved to host embedded AI/BI dashboards.
-
-Updates the list of domains approved to host embedded AI/BI dashboards. This update will fail if the
-current workspace access policy is not ALLOW_APPROVED_DOMAINS.
-
-:param allow_missing: bool
-  This should always be set to true for Settings API. Added for AIP compliance.
-:param setting: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
-:param field_mask: str
-  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-  the entire collection field can be specified. Field names must exactly match the resource field
-  names.
-  
-  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-  changes in the future.
-
-:returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
+        
+        Updates the list of domains approved to host embedded AI/BI dashboards. This update will fail if the
+        current workspace access policy is not ALLOW_APPROVED_DOMAINS.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/automatic_cluster_update.rst b/docs/workspace/settings/automatic_cluster_update.rst
index 94d110154..350e0e713 100644
--- a/docs/workspace/settings/automatic_cluster_update.rst
+++ b/docs/workspace/settings/automatic_cluster_update.rst
@@ -5,45 +5,46 @@
 .. py:class:: AutomaticClusterUpdateAPI
 
     Controls whether automatic cluster update is enabled for the current workspace. By default, it is turned
-off.
+    off.
 
     .. py:method:: get( [, etag: Optional[str]]) -> AutomaticClusterUpdateSetting
 
         Get the automatic cluster update setting.
-
-Gets the automatic cluster update setting.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`AutomaticClusterUpdateSetting`
-
+        
+        Gets the automatic cluster update setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`AutomaticClusterUpdateSetting`
+        
 
     .. py:method:: update(allow_missing: bool, setting: AutomaticClusterUpdateSetting, field_mask: str) -> AutomaticClusterUpdateSetting
 
         Update the automatic cluster update setting.
-
-Updates the automatic cluster update setting for the workspace. A fresh etag needs to be provided in
-`PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request
-before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the
-request must be retried by using the fresh etag in the 409 response.
-
-:param allow_missing: bool
-  This should always be set to true for Settings API. Added for AIP compliance.
-:param setting: :class:`AutomaticClusterUpdateSetting`
-:param field_mask: str
-  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-  the entire collection field can be specified. Field names must exactly match the resource field
-  names.
-  
-  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-  changes in the future.
-
-:returns: :class:`AutomaticClusterUpdateSetting`
+        
+        Updates the automatic cluster update setting for the workspace. A fresh etag needs to be provided in
+        `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET` request
+        before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the
+        request must be retried by using the fresh etag in the 409 response.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`AutomaticClusterUpdateSetting`
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`AutomaticClusterUpdateSetting`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/compliance_security_profile.rst b/docs/workspace/settings/compliance_security_profile.rst
index 3d6a4704c..855451b82 100644
--- a/docs/workspace/settings/compliance_security_profile.rst
+++ b/docs/workspace/settings/compliance_security_profile.rst
@@ -5,47 +5,48 @@
 .. py:class:: ComplianceSecurityProfileAPI
 
     Controls whether to enable the compliance security profile for the current workspace. Enabling it on a
-workspace is permanent. By default, it is turned off.
-
-This settings can NOT be disabled once it is enabled.
+    workspace is permanent. By default, it is turned off.
+    
+    This settings can NOT be disabled once it is enabled.
 
     .. py:method:: get( [, etag: Optional[str]]) -> ComplianceSecurityProfileSetting
 
         Get the compliance security profile setting.
-
-Gets the compliance security profile setting.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`ComplianceSecurityProfileSetting`
-
+        
+        Gets the compliance security profile setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`ComplianceSecurityProfileSetting`
+        
 
     .. py:method:: update(allow_missing: bool, setting: ComplianceSecurityProfileSetting, field_mask: str) -> ComplianceSecurityProfileSetting
 
         Update the compliance security profile setting.
-
-Updates the compliance security profile setting for the workspace. A fresh etag needs to be provided
-in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET`
-request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and
-the request must be retried by using the fresh etag in the 409 response.
-
-:param allow_missing: bool
-  This should always be set to true for Settings API. Added for AIP compliance.
-:param setting: :class:`ComplianceSecurityProfileSetting`
-:param field_mask: str
-  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-  the entire collection field can be specified. Field names must exactly match the resource field
-  names.
-  
-  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-  changes in the future.
-
-:returns: :class:`ComplianceSecurityProfileSetting`
+        
+        Updates the compliance security profile setting for the workspace. A fresh etag needs to be provided
+        in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET`
+        request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and
+        the request must be retried by using the fresh etag in the 409 response.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`ComplianceSecurityProfileSetting`
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`ComplianceSecurityProfileSetting`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/credentials_manager.rst b/docs/workspace/settings/credentials_manager.rst
index eacb47eec..c8bfa4f30 100644
--- a/docs/workspace/settings/credentials_manager.rst
+++ b/docs/workspace/settings/credentials_manager.rst
@@ -5,20 +5,21 @@
 .. py:class:: CredentialsManagerAPI
 
     Credentials manager interacts with with Identity Providers to to perform token exchanges using stored
-credentials and refresh tokens.
+    credentials and refresh tokens.
 
     .. py:method:: exchange_token(partition_id: PartitionId, token_type: List[TokenType], scopes: List[str]) -> ExchangeTokenResponse
 
         Exchange token.
-
-Exchange tokens with an Identity Provider to get a new access token. It allows specifying scopes to
-determine token permissions.
-
-:param partition_id: :class:`PartitionId`
-  The partition of Credentials store
-:param token_type: List[:class:`TokenType`]
-  A list of token types being requested
-:param scopes: List[str]
-  Array of scopes for the token request.
-
-:returns: :class:`ExchangeTokenResponse`
+        
+        Exchange tokens with an Identity Provider to get a new access token. It allows specifying scopes to
+        determine token permissions.
+        
+        :param partition_id: :class:`PartitionId`
+          The partition of Credentials store
+        :param token_type: List[:class:`TokenType`]
+          A list of token types being requested
+        :param scopes: List[str]
+          Array of scopes for the token request.
+        
+        :returns: :class:`ExchangeTokenResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/default_namespace.rst b/docs/workspace/settings/default_namespace.rst
index 082011af0..960949930 100644
--- a/docs/workspace/settings/default_namespace.rst
+++ b/docs/workspace/settings/default_namespace.rst
@@ -5,81 +5,82 @@
 .. py:class:: DefaultNamespaceAPI
 
     The default namespace setting API allows users to configure the default namespace for a Databricks
-workspace.
-
-Through this API, users can retrieve, set, or modify the default namespace used when queries do not
-reference a fully qualified three-level name. For example, if you use the API to set 'retail_prod' as the
-default catalog, then a query 'SELECT * FROM myTable' would reference the object
-'retail_prod.default.myTable' (the schema 'default' is always assumed).
-
-This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default
-namespace only applies when using Unity Catalog-enabled compute.
+    workspace.
+    
+    Through this API, users can retrieve, set, or modify the default namespace used when queries do not
+    reference a fully qualified three-level name. For example, if you use the API to set 'retail_prod' as the
+    default catalog, then a query 'SELECT * FROM myTable' would reference the object
+    'retail_prod.default.myTable' (the schema 'default' is always assumed).
+    
+    This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default
+    namespace only applies when using Unity Catalog-enabled compute.
 
     .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDefaultNamespaceSettingResponse
 
         Delete the default namespace setting.
-
-Deletes the default namespace setting for the workspace. A fresh etag needs to be provided in `DELETE`
-requests (as a query parameter). The etag can be retrieved by making a `GET` request before the
-`DELETE` request. If the setting is updated/deleted concurrently, `DELETE` fails with 409 and the
-request must be retried by using the fresh etag in the 409 response.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`DeleteDefaultNamespaceSettingResponse`
-
+        
+        Deletes the default namespace setting for the workspace. A fresh etag needs to be provided in `DELETE`
+        requests (as a query parameter). The etag can be retrieved by making a `GET` request before the
+        `DELETE` request. If the setting is updated/deleted concurrently, `DELETE` fails with 409 and the
+        request must be retried by using the fresh etag in the 409 response.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteDefaultNamespaceSettingResponse`
+        
 
     .. py:method:: get( [, etag: Optional[str]]) -> DefaultNamespaceSetting
 
         Get the default namespace setting.
-
-Gets the default namespace setting.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`DefaultNamespaceSetting`
-
+        
+        Gets the default namespace setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DefaultNamespaceSetting`
+        
 
     .. py:method:: update(allow_missing: bool, setting: DefaultNamespaceSetting, field_mask: str) -> DefaultNamespaceSetting
 
         Update the default namespace setting.
-
-Updates the default namespace setting for the workspace. A fresh etag needs to be provided in `PATCH`
-requests (as part of the setting field). The etag can be retrieved by making a `GET` request before
-the `PATCH` request. Note that if the setting does not exist, `GET` returns a NOT_FOUND error and the
-etag is present in the error response, which should be set in the `PATCH` request. If the setting is
-updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag
-in the 409 response.
-
-:param allow_missing: bool
-  This should always be set to true for Settings API. Added for AIP compliance.
-:param setting: :class:`DefaultNamespaceSetting`
-  This represents the setting configuration for the default namespace in the Databricks workspace.
-  Setting the default catalog for the workspace determines the catalog that is used when queries do
-  not reference a fully qualified 3 level name. For example, if the default catalog is set to
-  'retail_prod' then a query 'SELECT * FROM myTable' would reference the object
-  'retail_prod.default.myTable' (the schema 'default' is always assumed). This setting requires a
-  restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only
-  applies when using Unity Catalog-enabled compute.
-:param field_mask: str
-  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-  the entire collection field can be specified. Field names must exactly match the resource field
-  names.
-  
-  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-  changes in the future.
-
-:returns: :class:`DefaultNamespaceSetting`
+        
+        Updates the default namespace setting for the workspace. A fresh etag needs to be provided in `PATCH`
+        requests (as part of the setting field). The etag can be retrieved by making a `GET` request before
+        the `PATCH` request. Note that if the setting does not exist, `GET` returns a NOT_FOUND error and the
+        etag is present in the error response, which should be set in the `PATCH` request. If the setting is
+        updated concurrently, `PATCH` fails with 409 and the request must be retried by using the fresh etag
+        in the 409 response.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`DefaultNamespaceSetting`
+          This represents the setting configuration for the default namespace in the Databricks workspace.
+          Setting the default catalog for the workspace determines the catalog that is used when queries do
+          not reference a fully qualified 3 level name. For example, if the default catalog is set to
+          'retail_prod' then a query 'SELECT * FROM myTable' would reference the object
+          'retail_prod.default.myTable' (the schema 'default' is always assumed). This setting requires a
+          restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only
+          applies when using Unity Catalog-enabled compute.
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`DefaultNamespaceSetting`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/disable_legacy_access.rst b/docs/workspace/settings/disable_legacy_access.rst
index 4e44a7891..a015e777f 100644
--- a/docs/workspace/settings/disable_legacy_access.rst
+++ b/docs/workspace/settings/disable_legacy_access.rst
@@ -5,62 +5,63 @@
 .. py:class:: DisableLegacyAccessAPI
 
     'Disabling legacy access' has the following impacts:
-
-1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore through HMS
-Federation. 2. Disables Fallback Mode (docs link) on any External Location access from the workspace. 3.
-Alters DBFS path access to use External Location permissions in place of legacy credentials. 4. Enforces
-Unity Catalog access on all path based access.
+    
+    1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore through HMS
+    Federation. 2. Disables Fallback Mode (docs link) on any External Location access from the workspace. 3.
+    Alters DBFS path access to use External Location permissions in place of legacy credentials. 4. Enforces
+    Unity Catalog access on all path based access.
 
     .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyAccessResponse
 
         Delete Legacy Access Disablement Status.
-
-Deletes legacy access disablement status.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`DeleteDisableLegacyAccessResponse`
-
+        
+        Deletes legacy access disablement status.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteDisableLegacyAccessResponse`
+        
 
     .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyAccess
 
         Retrieve Legacy Access Disablement Status.
-
-Retrieves legacy access disablement Status.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`DisableLegacyAccess`
-
+        
+        Retrieves legacy access disablement Status.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DisableLegacyAccess`
+        
 
     .. py:method:: update(allow_missing: bool, setting: DisableLegacyAccess, field_mask: str) -> DisableLegacyAccess
 
         Update Legacy Access Disablement Status.
-
-Updates legacy access disablement status.
-
-:param allow_missing: bool
-  This should always be set to true for Settings API. Added for AIP compliance.
-:param setting: :class:`DisableLegacyAccess`
-:param field_mask: str
-  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-  the entire collection field can be specified. Field names must exactly match the resource field
-  names.
-  
-  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-  changes in the future.
-
-:returns: :class:`DisableLegacyAccess`
+        
+        Updates legacy access disablement status.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`DisableLegacyAccess`
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`DisableLegacyAccess`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/disable_legacy_dbfs.rst b/docs/workspace/settings/disable_legacy_dbfs.rst
index 8368bcc71..502111fe4 100644
--- a/docs/workspace/settings/disable_legacy_dbfs.rst
+++ b/docs/workspace/settings/disable_legacy_dbfs.rst
@@ -5,58 +5,59 @@
 .. py:class:: DisableLegacyDbfsAPI
 
     When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation of new
-mounts). When the setting is off, all DBFS functionality is enabled
+    mounts). When the setting is off, all DBFS functionality is enabled
 
     .. py:method:: delete( [, etag: Optional[str]]) -> DeleteDisableLegacyDbfsResponse
 
         Delete the disable legacy DBFS setting.
-
-Deletes the disable legacy DBFS setting for a workspace, reverting back to the default.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`DeleteDisableLegacyDbfsResponse`
-
+        
+        Deletes the disable legacy DBFS setting for a workspace, reverting back to the default.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteDisableLegacyDbfsResponse`
+        
 
     .. py:method:: get( [, etag: Optional[str]]) -> DisableLegacyDbfs
 
         Get the disable legacy DBFS setting.
-
-Gets the disable legacy DBFS setting.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`DisableLegacyDbfs`
-
+        
+        Gets the disable legacy DBFS setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DisableLegacyDbfs`
+        
 
     .. py:method:: update(allow_missing: bool, setting: DisableLegacyDbfs, field_mask: str) -> DisableLegacyDbfs
 
         Update the disable legacy DBFS setting.
-
-Updates the disable legacy DBFS setting for the workspace.
-
-:param allow_missing: bool
-  This should always be set to true for Settings API. Added for AIP compliance.
-:param setting: :class:`DisableLegacyDbfs`
-:param field_mask: str
-  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-  the entire collection field can be specified. Field names must exactly match the resource field
-  names.
-  
-  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-  changes in the future.
-
-:returns: :class:`DisableLegacyDbfs`
+        
+        Updates the disable legacy DBFS setting for the workspace.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`DisableLegacyDbfs`
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`DisableLegacyDbfs`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/enhanced_security_monitoring.rst b/docs/workspace/settings/enhanced_security_monitoring.rst
index 2b1e6b5f5..c9dfb547d 100644
--- a/docs/workspace/settings/enhanced_security_monitoring.rst
+++ b/docs/workspace/settings/enhanced_security_monitoring.rst
@@ -5,49 +5,50 @@
 .. py:class:: EnhancedSecurityMonitoringAPI
 
     Controls whether enhanced security monitoring is enabled for the current workspace. If the compliance
-security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the
-compliance security profile is enabled, this is automatically enabled.
-
-If the compliance security profile is disabled, you can enable or disable this setting and it is not
-permanent.
+    security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the
+    compliance security profile is enabled, this is automatically enabled.
+    
+    If the compliance security profile is disabled, you can enable or disable this setting and it is not
+    permanent.
 
     .. py:method:: get( [, etag: Optional[str]]) -> EnhancedSecurityMonitoringSetting
 
         Get the enhanced security monitoring setting.
-
-Gets the enhanced security monitoring setting.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`EnhancedSecurityMonitoringSetting`
-
+        
+        Gets the enhanced security monitoring setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`EnhancedSecurityMonitoringSetting`
+        
 
     .. py:method:: update(allow_missing: bool, setting: EnhancedSecurityMonitoringSetting, field_mask: str) -> EnhancedSecurityMonitoringSetting
 
         Update the enhanced security monitoring setting.
-
-Updates the enhanced security monitoring setting for the workspace. A fresh etag needs to be provided
-in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET`
-request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and
-the request must be retried by using the fresh etag in the 409 response.
-
-:param allow_missing: bool
-  This should always be set to true for Settings API. Added for AIP compliance.
-:param setting: :class:`EnhancedSecurityMonitoringSetting`
-:param field_mask: str
-  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-  the entire collection field can be specified. Field names must exactly match the resource field
-  names.
-  
-  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-  changes in the future.
-
-:returns: :class:`EnhancedSecurityMonitoringSetting`
+        
+        Updates the enhanced security monitoring setting for the workspace. A fresh etag needs to be provided
+        in `PATCH` requests (as part of the setting field). The etag can be retrieved by making a `GET`
+        request before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and
+        the request must be retried by using the fresh etag in the 409 response.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`EnhancedSecurityMonitoringSetting`
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`EnhancedSecurityMonitoringSetting`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/ip_access_lists.rst b/docs/workspace/settings/ip_access_lists.rst
index 162009162..a265c5943 100644
--- a/docs/workspace/settings/ip_access_lists.rst
+++ b/docs/workspace/settings/ip_access_lists.rst
@@ -5,22 +5,22 @@
 .. py:class:: IpAccessListsAPI
 
     IP Access List enables admins to configure IP access lists.
-
-IP access lists affect web application access and REST API access to this workspace only. If the feature
-is disabled for a workspace, all access is allowed for this workspace. There is support for allow lists
-(inclusion) and block lists (exclusion).
-
-When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address
-matches any block list, the connection is rejected. 2. **If the connection was not rejected by block
-lists**, the IP address is compared with the allow lists.
-
-If there is at least one allow list for the workspace, the connection is allowed only if the IP address
-matches an allow list. If there are no allow lists for the workspace, all IP addresses are allowed.
-
-For all allow lists and block lists combined, the workspace supports a maximum of 1000 IP/CIDR values,
-where one CIDR counts as a single value.
-
-After changes to the IP access list feature, it can take a few minutes for changes to take effect.
+    
+    IP access lists affect web application access and REST API access to this workspace only. If the feature
+    is disabled for a workspace, all access is allowed for this workspace. There is support for allow lists
+    (inclusion) and block lists (exclusion).
+    
+    When a connection is attempted: 1. **First, all block lists are checked.** If the connection IP address
+    matches any block list, the connection is rejected. 2. **If the connection was not rejected by block
+    lists**, the IP address is compared with the allow lists.
+    
+    If there is at least one allow list for the workspace, the connection is allowed only if the IP address
+    matches an allow list. If there are no allow lists for the workspace, all IP addresses are allowed.
+    
+    For all allow lists and block lists combined, the workspace supports a maximum of 1000 IP/CIDR values,
+    where one CIDR counts as a single value.
+    
+    After changes to the IP access list feature, it can take a few minutes for changes to take effect.
 
     .. py:method:: create(label: str, list_type: ListType [, ip_addresses: Optional[List[str]]]) -> CreateIpAccessListResponse
 
@@ -44,45 +44,45 @@ After changes to the IP access list feature, it can take a few minutes for chang
             w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id)
 
         Create access list.
-
-Creates an IP access list for this workspace.
-
-A list can be an allow list or a block list. See the top of this file for a description of how the
-server treats allow lists and block lists at runtime.
-
-When creating or updating an IP access list:
-
-* For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
-where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
-`error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP,
-error 400 is returned with `error_code` value `INVALID_STATE`.
-
-It can take a few minutes for the changes to take effect. **Note**: Your new IP access list has no
-effect until you enable the feature. See :method:workspaceconf/setStatus
-
-:param label: str
-  Label for the IP access list. This **cannot** be empty.
-:param list_type: :class:`ListType`
-  Type of IP access list. Valid values are as follows and are case-sensitive:
-  
-  * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
-  range. IP addresses in the block list are excluded even if they are included in an allow list.
-:param ip_addresses: List[str] (optional)
-
-:returns: :class:`CreateIpAccessListResponse`
-
+        
+        Creates an IP access list for this workspace.
+        
+        A list can be an allow list or a block list. See the top of this file for a description of how the
+        server treats allow lists and block lists at runtime.
+        
+        When creating or updating an IP access list:
+        
+        * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
+        where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
+        `error_code` value `QUOTA_EXCEEDED`. * If the new list would block the calling user's current IP,
+        error 400 is returned with `error_code` value `INVALID_STATE`.
+        
+        It can take a few minutes for the changes to take effect. **Note**: Your new IP access list has no
+        effect until you enable the feature. See :method:workspaceconf/setStatus
+        
+        :param label: str
+          Label for the IP access list. This **cannot** be empty.
+        :param list_type: :class:`ListType`
+          Type of IP access list. Valid values are as follows and are case-sensitive:
+          
+          * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
+          range. IP addresses in the block list are excluded even if they are included in an allow list.
+        :param ip_addresses: List[str] (optional)
+        
+        :returns: :class:`CreateIpAccessListResponse`
+        
 
     .. py:method:: delete(ip_access_list_id: str)
 
         Delete access list.
-
-Deletes an IP access list, specified by its list ID.
-
-:param ip_access_list_id: str
-  The ID for the corresponding IP access list
-
-
-
+        
+        Deletes an IP access list, specified by its list ID.
+        
+        :param ip_access_list_id: str
+          The ID for the corresponding IP access list
+        
+        
+        
 
     .. py:method:: get(ip_access_list_id: str) -> FetchIpAccessListResponse
 
@@ -108,14 +108,14 @@ Deletes an IP access list, specified by its list ID.
             w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id)
 
         Get access list.
-
-Gets an IP access list, specified by its list ID.
-
-:param ip_access_list_id: str
-  The ID for the corresponding IP access list
-
-:returns: :class:`FetchIpAccessListResponse`
-
+        
+        Gets an IP access list, specified by its list ID.
+        
+        :param ip_access_list_id: str
+          The ID for the corresponding IP access list
+        
+        :returns: :class:`FetchIpAccessListResponse`
+        
 
     .. py:method:: list() -> Iterator[IpAccessListInfo]
 
@@ -131,11 +131,11 @@ Gets an IP access list, specified by its list ID.
             all = w.ip_access_lists.list()
 
         Get access lists.
-
-Gets all IP access lists for the specified workspace.
-
-:returns: Iterator over :class:`IpAccessListInfo`
-
+        
+        Gets all IP access lists for the specified workspace.
+        
+        :returns: Iterator over :class:`IpAccessListInfo`
+        
 
     .. py:method:: replace(ip_access_list_id: str, label: str, list_type: ListType, enabled: bool [, ip_addresses: Optional[List[str]]])
 
@@ -165,64 +165,65 @@ Gets all IP access lists for the specified workspace.
             w.ip_access_lists.delete(ip_access_list_id=created.ip_access_list.list_id)
 
         Replace access list.
-
-Replaces an IP access list, specified by its ID.
-
-A list can include allow lists and block lists. See the top of this file for a description of how the
-server treats allow lists and block lists at run time. When replacing an IP access list: * For all
-allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one
-CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value
-`QUOTA_EXCEEDED`. * If the resulting list would block the calling user's current IP, error 400 is
-returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take
-effect. Note that your resulting IP access list has no effect until you enable the feature. See
-:method:workspaceconf/setStatus.
-
-:param ip_access_list_id: str
-  The ID for the corresponding IP access list
-:param label: str
-  Label for the IP access list. This **cannot** be empty.
-:param list_type: :class:`ListType`
-  Type of IP access list. Valid values are as follows and are case-sensitive:
-  
-  * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
-  range. IP addresses in the block list are excluded even if they are included in an allow list.
-:param enabled: bool
-  Specifies whether this IP access list is enabled.
-:param ip_addresses: List[str] (optional)
-
-
-
+        
+        Replaces an IP access list, specified by its ID.
+        
+        A list can include allow lists and block lists. See the top of this file for a description of how the
+        server treats allow lists and block lists at run time. When replacing an IP access list: * For all
+        allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values, where one
+        CIDR counts as a single value. Attempts to exceed that number return error 400 with `error_code` value
+        `QUOTA_EXCEEDED`. * If the resulting list would block the calling user's current IP, error 400 is
+        returned with `error_code` value `INVALID_STATE`. It can take a few minutes for the changes to take
+        effect. Note that your resulting IP access list has no effect until you enable the feature. See
+        :method:workspaceconf/setStatus.
+        
+        :param ip_access_list_id: str
+          The ID for the corresponding IP access list
+        :param label: str
+          Label for the IP access list. This **cannot** be empty.
+        :param list_type: :class:`ListType`
+          Type of IP access list. Valid values are as follows and are case-sensitive:
+          
+          * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
+          range. IP addresses in the block list are excluded even if they are included in an allow list.
+        :param enabled: bool
+          Specifies whether this IP access list is enabled.
+        :param ip_addresses: List[str] (optional)
+        
+        
+        
 
     .. py:method:: update(ip_access_list_id: str [, enabled: Optional[bool], ip_addresses: Optional[List[str]], label: Optional[str], list_type: Optional[ListType]])
 
         Update access list.
-
-Updates an existing IP access list, specified by its ID.
-
-A list can include allow lists and block lists. See the top of this file for a description of how the
-server treats allow lists and block lists at run time.
-
-When updating an IP access list:
-
-* For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
-where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
-`error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP,
-error 400 is returned with `error_code` value `INVALID_STATE`.
-
-It can take a few minutes for the changes to take effect. Note that your resulting IP access list has
-no effect until you enable the feature. See :method:workspaceconf/setStatus.
-
-:param ip_access_list_id: str
-  The ID for the corresponding IP access list
-:param enabled: bool (optional)
-  Specifies whether this IP access list is enabled.
-:param ip_addresses: List[str] (optional)
-:param label: str (optional)
-  Label for the IP access list. This **cannot** be empty.
-:param list_type: :class:`ListType` (optional)
-  Type of IP access list. Valid values are as follows and are case-sensitive:
-  
-  * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
-  range. IP addresses in the block list are excluded even if they are included in an allow list.
-
-
+        
+        Updates an existing IP access list, specified by its ID.
+        
+        A list can include allow lists and block lists. See the top of this file for a description of how the
+        server treats allow lists and block lists at run time.
+        
+        When updating an IP access list:
+        
+        * For all allow lists and block lists combined, the API supports a maximum of 1000 IP/CIDR values,
+        where one CIDR counts as a single value. Attempts to exceed that number return error 400 with
+        `error_code` value `QUOTA_EXCEEDED`. * If the updated list would block the calling user's current IP,
+        error 400 is returned with `error_code` value `INVALID_STATE`.
+        
+        It can take a few minutes for the changes to take effect. Note that your resulting IP access list has
+        no effect until you enable the feature. See :method:workspaceconf/setStatus.
+        
+        :param ip_access_list_id: str
+          The ID for the corresponding IP access list
+        :param enabled: bool (optional)
+          Specifies whether this IP access list is enabled.
+        :param ip_addresses: List[str] (optional)
+        :param label: str (optional)
+          Label for the IP access list. This **cannot** be empty.
+        :param list_type: :class:`ListType` (optional)
+          Type of IP access list. Valid values are as follows and are case-sensitive:
+          
+          * `ALLOW`: An allow list. Include this IP or range. * `BLOCK`: A block list. Exclude this IP or
+          range. IP addresses in the block list are excluded even if they are included in an allow list.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/notification_destinations.rst b/docs/workspace/settings/notification_destinations.rst
index e43383704..8fb2d0c3c 100644
--- a/docs/workspace/settings/notification_destinations.rst
+++ b/docs/workspace/settings/notification_destinations.rst
@@ -5,70 +5,71 @@
 .. py:class:: NotificationDestinationsAPI
 
     The notification destinations API lets you programmatically manage a workspace's notification
-destinations. Notification destinations are used to send notifications for query alerts and jobs to
-destinations outside of Databricks. Only workspace admins can create, update, and delete notification
-destinations.
+    destinations. Notification destinations are used to send notifications for query alerts and jobs to
+    destinations outside of Databricks. Only workspace admins can create, update, and delete notification
+    destinations.
 
     .. py:method:: create( [, config: Optional[Config], display_name: Optional[str]]) -> NotificationDestination
 
         Create a notification destination.
-
-Creates a notification destination. Requires workspace admin permissions.
-
-:param config: :class:`Config` (optional)
-  The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.
-:param display_name: str (optional)
-  The display name for the notification destination.
-
-:returns: :class:`NotificationDestination`
-
+        
+        Creates a notification destination. Requires workspace admin permissions.
+        
+        :param config: :class:`Config` (optional)
+          The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.
+        :param display_name: str (optional)
+          The display name for the notification destination.
+        
+        :returns: :class:`NotificationDestination`
+        
 
     .. py:method:: delete(id: str)
 
         Delete a notification destination.
-
-Deletes a notification destination. Requires workspace admin permissions.
-
-:param id: str
-
-
-
+        
+        Deletes a notification destination. Requires workspace admin permissions.
+        
+        :param id: str
+        
+        
+        
 
     .. py:method:: get(id: str) -> NotificationDestination
 
         Get a notification destination.
-
-Gets a notification destination.
-
-:param id: str
-
-:returns: :class:`NotificationDestination`
-
+        
+        Gets a notification destination.
+        
+        :param id: str
+        
+        :returns: :class:`NotificationDestination`
+        
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListNotificationDestinationsResult]
 
         List notification destinations.
-
-Lists notification destinations.
-
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`ListNotificationDestinationsResult`
-
+        
+        Lists notification destinations.
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`ListNotificationDestinationsResult`
+        
 
     .. py:method:: update(id: str [, config: Optional[Config], display_name: Optional[str]]) -> NotificationDestination
 
         Update a notification destination.
-
-Updates a notification destination. Requires workspace admin permissions. At least one field is
-required in the request body.
-
-:param id: str
-  UUID identifying notification destination.
-:param config: :class:`Config` (optional)
-  The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.
-:param display_name: str (optional)
-  The display name for the notification destination.
-
-:returns: :class:`NotificationDestination`
+        
+        Updates a notification destination. Requires workspace admin permissions. At least one field is
+        required in the request body.
+        
+        :param id: str
+          UUID identifying notification destination.
+        :param config: :class:`Config` (optional)
+          The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.
+        :param display_name: str (optional)
+          The display name for the notification destination.
+        
+        :returns: :class:`NotificationDestination`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/restrict_workspace_admins.rst b/docs/workspace/settings/restrict_workspace_admins.rst
index 9d44b6189..b025112cc 100644
--- a/docs/workspace/settings/restrict_workspace_admins.rst
+++ b/docs/workspace/settings/restrict_workspace_admins.rst
@@ -5,71 +5,72 @@
 .. py:class:: RestrictWorkspaceAdminsAPI
 
     The Restrict Workspace Admins setting lets you control the capabilities of workspace admins. With the
-setting status set to ALLOW_ALL, workspace admins can create service principal personal access tokens on
-behalf of any service principal in their workspace. Workspace admins can also change a job owner to any
-user in their workspace. And they can change the job run_as setting to any user in their workspace or to a
-service principal on which they have the Service Principal User role. With the setting status set to
-RESTRICT_TOKENS_AND_JOB_RUN_AS, workspace admins can only create personal access tokens on behalf of
-service principals they have the Service Principal User role on. They can also only change a job owner to
-themselves. And they can change the job run_as setting to themselves or to a service principal on which
-they have the Service Principal User role.
+    setting status set to ALLOW_ALL, workspace admins can create service principal personal access tokens on
+    behalf of any service principal in their workspace. Workspace admins can also change a job owner to any
+    user in their workspace. And they can change the job run_as setting to any user in their workspace or to a
+    service principal on which they have the Service Principal User role. With the setting status set to
+    RESTRICT_TOKENS_AND_JOB_RUN_AS, workspace admins can only create personal access tokens on behalf of
+    service principals they have the Service Principal User role on. They can also only change a job owner to
+    themselves. And they can change the job run_as setting to themselves or to a service principal on which
+    they have the Service Principal User role.
 
     .. py:method:: delete( [, etag: Optional[str]]) -> DeleteRestrictWorkspaceAdminsSettingResponse
 
         Delete the restrict workspace admins setting.
-
-Reverts the restrict workspace admins setting status for the workspace. A fresh etag needs to be
-provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET`
-request before the DELETE request. If the setting is updated/deleted concurrently, `DELETE` fails with
-409 and the request must be retried by using the fresh etag in the 409 response.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`DeleteRestrictWorkspaceAdminsSettingResponse`
-
+        
+        Reverts the restrict workspace admins setting status for the workspace. A fresh etag needs to be
+        provided in `DELETE` requests (as a query parameter). The etag can be retrieved by making a `GET`
+        request before the DELETE request. If the setting is updated/deleted concurrently, `DELETE` fails with
+        409 and the request must be retried by using the fresh etag in the 409 response.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteRestrictWorkspaceAdminsSettingResponse`
+        
 
     .. py:method:: get( [, etag: Optional[str]]) -> RestrictWorkspaceAdminsSetting
 
         Get the restrict workspace admins setting.
-
-Gets the restrict workspace admins setting.
-
-:param etag: str (optional)
-  etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
-  optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
-  each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
-  to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
-  request, and pass it with the DELETE request to identify the rule set version you are deleting.
-
-:returns: :class:`RestrictWorkspaceAdminsSetting`
-
+        
+        Gets the restrict workspace admins setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`RestrictWorkspaceAdminsSetting`
+        
 
     .. py:method:: update(allow_missing: bool, setting: RestrictWorkspaceAdminsSetting, field_mask: str) -> RestrictWorkspaceAdminsSetting
 
         Update the restrict workspace admins setting.
-
-Updates the restrict workspace admins setting for the workspace. A fresh etag needs to be provided in
-`PATCH` requests (as part of the setting field). The etag can be retrieved by making a GET request
-before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the
-request must be retried by using the fresh etag in the 409 response.
-
-:param allow_missing: bool
-  This should always be set to true for Settings API. Added for AIP compliance.
-:param setting: :class:`RestrictWorkspaceAdminsSetting`
-:param field_mask: str
-  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-  the entire collection field can be specified. Field names must exactly match the resource field
-  names.
-  
-  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-  changes in the future.
-
-:returns: :class:`RestrictWorkspaceAdminsSetting`
+        
+        Updates the restrict workspace admins setting for the workspace. A fresh etag needs to be provided in
+        `PATCH` requests (as part of the setting field). The etag can be retrieved by making a GET request
+        before the `PATCH` request. If the setting is updated concurrently, `PATCH` fails with 409 and the
+        request must be retried by using the fresh etag in the 409 response.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`RestrictWorkspaceAdminsSetting`
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`RestrictWorkspaceAdminsSetting`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/settings.rst b/docs/workspace/settings/settings.rst
index 8338866ec..aa806280e 100644
--- a/docs/workspace/settings/settings.rst
+++ b/docs/workspace/settings/settings.rst
@@ -10,77 +10,77 @@
         :type: AibiDashboardEmbeddingAccessPolicyAPI
 
         Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the
-    workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS).
+        workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS).
 
     .. py:property:: aibi_dashboard_embedding_approved_domains
         :type: AibiDashboardEmbeddingApprovedDomainsAPI
 
         Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains list
-    can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS.
+        can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS.
 
     .. py:property:: automatic_cluster_update
         :type: AutomaticClusterUpdateAPI
 
         Controls whether automatic cluster update is enabled for the current workspace. By default, it is turned
-    off.
+        off.
 
     .. py:property:: compliance_security_profile
         :type: ComplianceSecurityProfileAPI
 
         Controls whether to enable the compliance security profile for the current workspace. Enabling it on a
-    workspace is permanent. By default, it is turned off.
-    
-    This settings can NOT be disabled once it is enabled.
+        workspace is permanent. By default, it is turned off.
+        
+        This settings can NOT be disabled once it is enabled.
 
     .. py:property:: default_namespace
         :type: DefaultNamespaceAPI
 
         The default namespace setting API allows users to configure the default namespace for a Databricks
-    workspace.
-    
-    Through this API, users can retrieve, set, or modify the default namespace used when queries do not
-    reference a fully qualified three-level name. For example, if you use the API to set 'retail_prod' as the
-    default catalog, then a query 'SELECT * FROM myTable' would reference the object
-    'retail_prod.default.myTable' (the schema 'default' is always assumed).
-    
-    This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default
-    namespace only applies when using Unity Catalog-enabled compute.
+        workspace.
+        
+        Through this API, users can retrieve, set, or modify the default namespace used when queries do not
+        reference a fully qualified three-level name. For example, if you use the API to set 'retail_prod' as the
+        default catalog, then a query 'SELECT * FROM myTable' would reference the object
+        'retail_prod.default.myTable' (the schema 'default' is always assumed).
+        
+        This setting requires a restart of clusters and SQL warehouses to take effect. Additionally, the default
+        namespace only applies when using Unity Catalog-enabled compute.
 
     .. py:property:: disable_legacy_access
         :type: DisableLegacyAccessAPI
 
         'Disabling legacy access' has the following impacts:
-    
-    1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore through HMS
-    Federation. 2. Disables Fallback Mode (docs link) on any External Location access from the workspace. 3.
-    Alters DBFS path access to use External Location permissions in place of legacy credentials. 4. Enforces
-    Unity Catalog access on all path based access.
+        
+        1. Disables direct access to the Hive Metastore. However, you can still access Hive Metastore through HMS
+        Federation. 2. Disables Fallback Mode (docs link) on any External Location access from the workspace. 3.
+        Alters DBFS path access to use External Location permissions in place of legacy credentials. 4. Enforces
+        Unity Catalog access on all path based access.
 
     .. py:property:: disable_legacy_dbfs
         :type: DisableLegacyDbfsAPI
 
         When this setting is on, access to DBFS root and DBFS mounts is disallowed (as well as creation of new
-    mounts). When the setting is off, all DBFS functionality is enabled
+        mounts). When the setting is off, all DBFS functionality is enabled
 
     .. py:property:: enhanced_security_monitoring
         :type: EnhancedSecurityMonitoringAPI
 
         Controls whether enhanced security monitoring is enabled for the current workspace. If the compliance
-    security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the
-    compliance security profile is enabled, this is automatically enabled.
-    
-    If the compliance security profile is disabled, you can enable or disable this setting and it is not
-    permanent.
+        security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the
+        compliance security profile is enabled, this is automatically enabled.
+        
+        If the compliance security profile is disabled, you can enable or disable this setting and it is not
+        permanent.
 
     .. py:property:: restrict_workspace_admins
         :type: RestrictWorkspaceAdminsAPI
 
         The Restrict Workspace Admins setting lets you control the capabilities of workspace admins. With the
-    setting status set to ALLOW_ALL, workspace admins can create service principal personal access tokens on
-    behalf of any service principal in their workspace. Workspace admins can also change a job owner to any
-    user in their workspace. And they can change the job run_as setting to any user in their workspace or to a
-    service principal on which they have the Service Principal User role. With the setting status set to
-    RESTRICT_TOKENS_AND_JOB_RUN_AS, workspace admins can only create personal access tokens on behalf of
-    service principals they have the Service Principal User role on. They can also only change a job owner to
-    themselves. And they can change the job run_as setting to themselves or to a service principal on which
-    they have the Service Principal User role.
\ No newline at end of file
+        setting status set to ALLOW_ALL, workspace admins can create service principal personal access tokens on
+        behalf of any service principal in their workspace. Workspace admins can also change a job owner to any
+        user in their workspace. And they can change the job run_as setting to any user in their workspace or to a
+        service principal on which they have the Service Principal User role. With the setting status set to
+        RESTRICT_TOKENS_AND_JOB_RUN_AS, workspace admins can only create personal access tokens on behalf of
+        service principals they have the Service Principal User role on. They can also only change a job owner to
+        themselves. And they can change the job run_as setting to themselves or to a service principal on which
+        they have the Service Principal User role.
\ No newline at end of file
diff --git a/docs/workspace/settings/token_management.rst b/docs/workspace/settings/token_management.rst
index a2fe7ddea..50dbe1328 100644
--- a/docs/workspace/settings/token_management.rst
+++ b/docs/workspace/settings/token_management.rst
@@ -5,7 +5,7 @@
 .. py:class:: TokenManagementAPI
 
     Enables administrators to get all tokens and delete tokens for other users. Admins can either get every
-token, get a specific token by ID, or get all tokens for a particular user.
+    token, get a specific token by ID, or get all tokens for a particular user.
 
     .. py:method:: create_obo_token(application_id: str [, comment: Optional[str], lifetime_seconds: Optional[int]]) -> CreateOboTokenResponse
 
@@ -33,30 +33,30 @@ token, get a specific token by ID, or get all tokens for a particular user.
             w.token_management.delete(token_id=obo.token_info.token_id)
 
         Create on-behalf token.
-
-Creates a token on behalf of a service principal.
-
-:param application_id: str
-  Application ID of the service principal.
-:param comment: str (optional)
-  Comment that describes the purpose of the token.
-:param lifetime_seconds: int (optional)
-  The number of seconds before the token expires.
-
-:returns: :class:`CreateOboTokenResponse`
-
+        
+        Creates a token on behalf of a service principal.
+        
+        :param application_id: str
+          Application ID of the service principal.
+        :param comment: str (optional)
+          Comment that describes the purpose of the token.
+        :param lifetime_seconds: int (optional)
+          The number of seconds before the token expires.
+        
+        :returns: :class:`CreateOboTokenResponse`
+        
 
     .. py:method:: delete(token_id: str)
 
         Delete a token.
-
-Deletes a token, specified by its ID.
-
-:param token_id: str
-  The ID of the token to revoke.
-
-
-
+        
+        Deletes a token, specified by its ID.
+        
+        :param token_id: str
+          The ID of the token to revoke.
+        
+        
+        
 
     .. py:method:: get(token_id: str) -> GetTokenResponse
 
@@ -86,32 +86,32 @@ Deletes a token, specified by its ID.
             w.token_management.delete(token_id=obo.token_info.token_id)
 
         Get token info.
-
-Gets information about a token, specified by its ID.
-
-:param token_id: str
-  The ID of the token to get.
-
-:returns: :class:`GetTokenResponse`
-
+        
+        Gets information about a token, specified by its ID.
+        
+        :param token_id: str
+          The ID of the token to get.
+        
+        :returns: :class:`GetTokenResponse`
+        
 
     .. py:method:: get_permission_levels() -> GetTokenPermissionLevelsResponse
 
         Get token permission levels.
-
-Gets the permission levels that a user can have on an object.
-
-:returns: :class:`GetTokenPermissionLevelsResponse`
-
+        
+        Gets the permission levels that a user can have on an object.
+        
+        :returns: :class:`GetTokenPermissionLevelsResponse`
+        
 
     .. py:method:: get_permissions() -> TokenPermissions
 
         Get token permissions.
-
-Gets the permissions of all tokens. Tokens can inherit permissions from their root object.
-
-:returns: :class:`TokenPermissions`
-
+        
+        Gets the permissions of all tokens. Tokens can inherit permissions from their root object.
+        
+        :returns: :class:`TokenPermissions`
+        
 
     .. py:method:: list( [, created_by_id: Optional[int], created_by_username: Optional[str]]) -> Iterator[TokenInfo]
 
@@ -128,35 +128,36 @@ Gets the permissions of all tokens. Tokens can inherit permissions from their ro
             all = w.token_management.list(settings.ListTokenManagementRequest())
 
         List all tokens.
-
-Lists all tokens associated with the specified workspace or user.
-
-:param created_by_id: int (optional)
-  User ID of the user that created the token.
-:param created_by_username: str (optional)
-  Username of the user that created the token.
-
-:returns: Iterator over :class:`TokenInfo`
-
+        
+        Lists all tokens associated with the specified workspace or user.
+        
+        :param created_by_id: int (optional)
+          User ID of the user that created the token.
+        :param created_by_username: str (optional)
+          Username of the user that created the token.
+        
+        :returns: Iterator over :class:`TokenInfo`
+        
 
     .. py:method:: set_permissions( [, access_control_list: Optional[List[TokenAccessControlRequest]]]) -> TokenPermissions
 
         Set token permissions.
-
-Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-permissions if none are specified. Objects can inherit permissions from their root object.
-
-:param access_control_list: List[:class:`TokenAccessControlRequest`] (optional)
-
-:returns: :class:`TokenPermissions`
-
+        
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
+        
+        :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional)
+        
+        :returns: :class:`TokenPermissions`
+        
 
     .. py:method:: update_permissions( [, access_control_list: Optional[List[TokenAccessControlRequest]]]) -> TokenPermissions
 
         Update token permissions.
-
-Updates the permissions on all tokens. Tokens can inherit permissions from their root object.
-
-:param access_control_list: List[:class:`TokenAccessControlRequest`] (optional)
-
-:returns: :class:`TokenPermissions`
+        
+        Updates the permissions on all tokens. Tokens can inherit permissions from their root object.
+        
+        :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional)
+        
+        :returns: :class:`TokenPermissions`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/tokens.rst b/docs/workspace/settings/tokens.rst
index 273909746..899db00d1 100644
--- a/docs/workspace/settings/tokens.rst
+++ b/docs/workspace/settings/tokens.rst
@@ -5,7 +5,7 @@
 .. py:class:: TokensAPI
 
     The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access
-Databricks REST APIs.
+    Databricks REST APIs.
 
     .. py:method:: create( [, comment: Optional[str], lifetime_seconds: Optional[int]]) -> CreateTokenResponse
 
@@ -26,34 +26,34 @@ Databricks REST APIs.
             w.tokens.delete(token_id=token.token_info.token_id)
 
         Create a user token.
-
-Creates and returns a token for a user. If this call is made through token authentication, it creates
-a token with the same client ID as the authenticated token. If the user's token quota is exceeded,
-this call returns an error **QUOTA_EXCEEDED**.
-
-:param comment: str (optional)
-  Optional description to attach to the token.
-:param lifetime_seconds: int (optional)
-  The lifetime of the token, in seconds.
-  
-  If the lifetime is not specified, this token remains valid indefinitely.
-
-:returns: :class:`CreateTokenResponse`
-
+        
+        Creates and returns a token for a user. If this call is made through token authentication, it creates
+        a token with the same client ID as the authenticated token. If the user's token quota is exceeded,
+        this call returns an error **QUOTA_EXCEEDED**.
+        
+        :param comment: str (optional)
+          Optional description to attach to the token.
+        :param lifetime_seconds: int (optional)
+          The lifetime of the token, in seconds.
+          
+          If the lifetime is not specified, this token remains valid indefinitely.
+        
+        :returns: :class:`CreateTokenResponse`
+        
 
     .. py:method:: delete(token_id: str)
 
         Revoke token.
-
-Revokes an access token.
-
-If a token with the specified ID is not valid, this call returns an error **RESOURCE_DOES_NOT_EXIST**.
-
-:param token_id: str
-  The ID of the token to be revoked.
-
-
-
+        
+        Revokes an access token.
+        
+        If a token with the specified ID is not valid, this call returns an error **RESOURCE_DOES_NOT_EXIST**.
+        
+        :param token_id: str
+          The ID of the token to be revoked.
+        
+        
+        
 
     .. py:method:: list() -> Iterator[PublicTokenInfo]
 
@@ -69,7 +69,8 @@ If a token with the specified ID is not valid, this call returns an error **RESO
             all = w.tokens.list()
 
         List tokens.
-
-Lists all the valid tokens for a user-workspace pair.
-
-:returns: Iterator over :class:`PublicTokenInfo`
+        
+        Lists all the valid tokens for a user-workspace pair.
+        
+        :returns: Iterator over :class:`PublicTokenInfo`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/workspace_conf.rst b/docs/workspace/settings/workspace_conf.rst
index a3533f564..3759de043 100644
--- a/docs/workspace/settings/workspace_conf.rst
+++ b/docs/workspace/settings/workspace_conf.rst
@@ -20,19 +20,20 @@
             conf = w.workspace_conf.get_status(keys="enableWorkspaceFilesystem")
 
         Check configuration status.
-
-Gets the configuration status for a workspace.
-
-:param keys: str
-
-:returns: Dict[str,str]
-
+        
+        Gets the configuration status for a workspace.
+        
+        :param keys: str
+        
+        :returns: Dict[str,str]
+        
 
     .. py:method:: set_status(contents: Dict[str, str])
 
         Enable/disable features.
-
-Sets the configuration status for a workspace, including enabling or disabling it.
-
-
-
+        
+        Sets the configuration status for a workspace, including enabling or disabling it.
+        
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/sharing/providers.rst b/docs/workspace/sharing/providers.rst
index 13d7e037b..7d27acc3d 100644
--- a/docs/workspace/sharing/providers.rst
+++ b/docs/workspace/sharing/providers.rst
@@ -5,7 +5,7 @@
 .. py:class:: ProvidersAPI
 
     A data provider is an object representing the organization in the real world who shares the data. A
-provider contains shares which further contain the shared data.
+    provider contains shares which further contain the shared data.
 
     .. py:method:: create(name: str, authentication_type: AuthenticationType [, comment: Optional[str], recipient_profile_str: Optional[str]]) -> ProviderInfo
 
@@ -33,35 +33,35 @@ provider contains shares which further contain the shared data.
             w.providers.delete(name=created.name)
 
         Create an auth provider.
-
-Creates a new authentication provider minimally based on a name and authentication type. The caller
-must be an admin on the metastore.
-
-:param name: str
-  The name of the Provider.
-:param authentication_type: :class:`AuthenticationType`
-  The delta sharing authentication type.
-:param comment: str (optional)
-  Description about the provider.
-:param recipient_profile_str: str (optional)
-  This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS**
-  or not provided.
-
-:returns: :class:`ProviderInfo`
-
+        
+        Creates a new authentication provider minimally based on a name and authentication type. The caller
+        must be an admin on the metastore.
+        
+        :param name: str
+          The name of the Provider.
+        :param authentication_type: :class:`AuthenticationType`
+          The delta sharing authentication type.
+        :param comment: str (optional)
+          Description about the provider.
+        :param recipient_profile_str: str (optional)
+          This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS**
+          or not provided.
+        
+        :returns: :class:`ProviderInfo`
+        
 
     .. py:method:: delete(name: str)
 
         Delete a provider.
-
-Deletes an authentication provider, if the caller is a metastore admin or is the owner of the
-provider.
-
-:param name: str
-  Name of the provider.
-
-
-
+        
+        Deletes an authentication provider, if the caller is a metastore admin or is the owner of the
+        provider.
+        
+        :param name: str
+          Name of the provider.
+        
+        
+        
 
     .. py:method:: get(name: str) -> ProviderInfo
 
@@ -91,15 +91,15 @@ provider.
             w.providers.delete(name=created.name)
 
         Get a provider.
-
-Gets a specific authentication provider. The caller must supply the name of the provider, and must
-either be a metastore admin or the owner of the provider.
-
-:param name: str
-  Name of the provider.
-
-:returns: :class:`ProviderInfo`
-
+        
+        Gets a specific authentication provider. The caller must supply the name of the provider, and must
+        either be a metastore admin or the owner of the provider.
+        
+        :param name: str
+          Name of the provider.
+        
+        :returns: :class:`ProviderInfo`
+        
 
     .. py:method:: list( [, data_provider_global_metastore_id: Optional[str], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderInfo]
 
@@ -116,27 +116,27 @@ either be a metastore admin or the owner of the provider.
             all = w.providers.list(sharing.ListProvidersRequest())
 
         List providers.
-
-Gets an array of available authentication providers. The caller must either be a metastore admin or
-the owner of the providers. Providers not owned by the caller are not included in the response. There
-is no guarantee of a specific ordering of the elements in the array.
-
-:param data_provider_global_metastore_id: str (optional)
-  If not provided, all providers will be returned. If no providers exist with this ID, no results will
-  be returned.
-:param max_results: int (optional)
-  Maximum number of providers to return. - when set to 0, the page length is set to a server
-  configured value (recommended); - when set to a value greater than 0, the page length is the minimum
-  of this value and a server configured value; - when set to a value less than 0, an invalid parameter
-  error is returned; - If not set, all valid providers are returned (not recommended). - Note: The
-  number of returned providers might be less than the specified max_results size, even zero. The only
-  definitive indication that no further providers can be fetched is when the next_page_token is unset
-  from the response.
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-
-:returns: Iterator over :class:`ProviderInfo`
-
+        
+        Gets an array of available authentication providers. The caller must either be a metastore admin or
+        the owner of the providers. Providers not owned by the caller are not included in the response. There
+        is no guarantee of a specific ordering of the elements in the array.
+        
+        :param data_provider_global_metastore_id: str (optional)
+          If not provided, all providers will be returned. If no providers exist with this ID, no results will
+          be returned.
+        :param max_results: int (optional)
+          Maximum number of providers to return. - when set to 0, the page length is set to a server
+          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+          error is returned; - If not set, all valid providers are returned (not recommended). - Note: The
+          number of returned providers might be less than the specified max_results size, even zero. The only
+          definitive indication that no further providers can be fetched is when the next_page_token is unset
+          from the response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`ProviderInfo`
+        
 
     .. py:method:: list_shares(name: str [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ProviderShare]
 
@@ -166,26 +166,26 @@ is no guarantee of a specific ordering of the elements in the array.
             w.providers.delete(name=created.name)
 
         List shares by Provider.
-
-Gets an array of a specified provider's shares within the metastore where:
-
-* the caller is a metastore admin, or * the caller is the owner.
-
-:param name: str
-  Name of the provider in which to list shares.
-:param max_results: int (optional)
-  Maximum number of shares to return. - when set to 0, the page length is set to a server configured
-  value (recommended); - when set to a value greater than 0, the page length is the minimum of this
-  value and a server configured value; - when set to a value less than 0, an invalid parameter error
-  is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of
-  returned shares might be less than the specified max_results size, even zero. The only definitive
-  indication that no further shares can be fetched is when the next_page_token is unset from the
-  response.
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-
-:returns: Iterator over :class:`ProviderShare`
-
+        
+        Gets an array of a specified provider's shares within the metastore where:
+        
+        * the caller is a metastore admin, or * the caller is the owner.
+        
+        :param name: str
+          Name of the provider in which to list shares.
+        :param max_results: int (optional)
+          Maximum number of shares to return. - when set to 0, the page length is set to a server configured
+          value (recommended); - when set to a value greater than 0, the page length is the minimum of this
+          value and a server configured value; - when set to a value less than 0, an invalid parameter error
+          is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of
+          returned shares might be less than the specified max_results size, even zero. The only definitive
+          indication that no further shares can be fetched is when the next_page_token is unset from the
+          response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`ProviderShare`
+        
 
     .. py:method:: update(name: str [, comment: Optional[str], new_name: Optional[str], owner: Optional[str], recipient_profile_str: Optional[str]]) -> ProviderInfo
 
@@ -215,21 +215,22 @@ Gets an array of a specified provider's shares within the metastore where:
             w.providers.delete(name=created.name)
 
         Update a provider.
-
-Updates the information for an authentication provider, if the caller is a metastore admin or is the
-owner of the provider. If the update changes the provider name, the caller must be both a metastore
-admin and the owner of the provider.
-
-:param name: str
-  Name of the provider.
-:param comment: str (optional)
-  Description about the provider.
-:param new_name: str (optional)
-  New name for the provider.
-:param owner: str (optional)
-  Username of Provider owner.
-:param recipient_profile_str: str (optional)
-  This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS**
-  or not provided.
-
-:returns: :class:`ProviderInfo`
+        
+        Updates the information for an authentication provider, if the caller is a metastore admin or is the
+        owner of the provider. If the update changes the provider name, the caller must be both a metastore
+        admin and the owner of the provider.
+        
+        :param name: str
+          Name of the provider.
+        :param comment: str (optional)
+          Description about the provider.
+        :param new_name: str (optional)
+          New name for the provider.
+        :param owner: str (optional)
+          Username of Provider owner.
+        :param recipient_profile_str: str (optional)
+          This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS**
+          or not provided.
+        
+        :returns: :class:`ProviderInfo`
+        
\ No newline at end of file
diff --git a/docs/workspace/sharing/recipient_activation.rst b/docs/workspace/sharing/recipient_activation.rst
index 4ac315098..2c214d9c0 100644
--- a/docs/workspace/sharing/recipient_activation.rst
+++ b/docs/workspace/sharing/recipient_activation.rst
@@ -5,32 +5,33 @@
 .. py:class:: RecipientActivationAPI
 
     The Recipient Activation API is only applicable in the open sharing model where the recipient object has
-the authentication type of `TOKEN`. The data recipient follows the activation link shared by the data
-provider to download the credential file that includes the access token. The recipient will then use the
-credential file to establish a secure connection with the provider to receive the shared data.
-
-Note that you can download the credential file only once. Recipients should treat the downloaded
-credential as a secret and must not share it outside of their organization.
+    the authentication type of `TOKEN`. The data recipient follows the activation link shared by the data
+    provider to download the credential file that includes the access token. The recipient will then use the
+    credential file to establish a secure connection with the provider to receive the shared data.
+    
+    Note that you can download the credential file only once. Recipients should treat the downloaded
+    credential as a secret and must not share it outside of their organization.
 
     .. py:method:: get_activation_url_info(activation_url: str)
 
         Get a share activation URL.
-
-Gets an activation URL for a share.
-
-:param activation_url: str
-  The one time activation url. It also accepts activation token.
-
-
-
+        
+        Gets an activation URL for a share.
+        
+        :param activation_url: str
+          The one time activation url. It also accepts activation token.
+        
+        
+        
 
     .. py:method:: retrieve_token(activation_url: str) -> RetrieveTokenResponse
 
         Get an access token.
-
-Retrieve access token with an activation url. This is a public API without any authentication.
-
-:param activation_url: str
-  The one time activation url. It also accepts activation token.
-
-:returns: :class:`RetrieveTokenResponse`
+        
+        Retrieve access token with an activation url. This is a public API without any authentication.
+        
+        :param activation_url: str
+          The one time activation url. It also accepts activation token.
+        
+        :returns: :class:`RetrieveTokenResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/sharing/recipients.rst b/docs/workspace/sharing/recipients.rst
index b98291571..76e1da171 100644
--- a/docs/workspace/sharing/recipients.rst
+++ b/docs/workspace/sharing/recipients.rst
@@ -5,18 +5,18 @@
 .. py:class:: RecipientsAPI
 
     A recipient is an object you create using :method:recipients/create to represent an organization which you
-want to allow access shares. The way how sharing works differs depending on whether or not your recipient
-has access to a Databricks workspace that is enabled for Unity Catalog:
-
-- For recipients with access to a Databricks workspace that is enabled for Unity Catalog, you can create a
-recipient object along with a unique sharing identifier you get from the recipient. The sharing identifier
-is the key identifier that enables the secure connection. This sharing mode is called
-**Databricks-to-Databricks sharing**.
-
-- For recipients without access to a Databricks workspace that is enabled for Unity Catalog, when you
-create a recipient object, Databricks generates an activation link you can send to the recipient. The
-recipient follows the activation link to download the credential file, and then uses the credential file
-to establish a secure connection to receive the shared data. This sharing mode is called **open sharing**.
+    want to allow access shares. The way how sharing works differs depending on whether or not your recipient
+    has access to a Databricks workspace that is enabled for Unity Catalog:
+    
+    - For recipients with access to a Databricks workspace that is enabled for Unity Catalog, you can create a
+    recipient object along with a unique sharing identifier you get from the recipient. The sharing identifier
+    is the key identifier that enables the secure connection. This sharing mode is called
+    **Databricks-to-Databricks sharing**.
+    
+    - For recipients without access to a Databricks workspace that is enabled for Unity Catalog, when you
+    create a recipient object, Databricks generates an activation link you can send to the recipient. The
+    recipient follows the activation link to download the credential file, and then uses the credential file
+    to establish a secure connection to receive the shared data. This sharing mode is called **open sharing**.
 
     .. py:method:: create(name: str, authentication_type: AuthenticationType [, comment: Optional[str], data_recipient_global_metastore_id: Optional[str], expiration_time: Optional[int], ip_access_list: Optional[IpAccessList], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs], sharing_code: Optional[str]]) -> RecipientInfo
 
@@ -37,48 +37,48 @@ to establish a secure connection to receive the shared data. This sharing mode i
             w.recipients.delete(name=created.name)
 
         Create a share recipient.
-
-Creates a new recipient with the delta sharing authentication type in the metastore. The caller must
-be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore.
-
-:param name: str
-  Name of Recipient.
-:param authentication_type: :class:`AuthenticationType`
-  The delta sharing authentication type.
-:param comment: str (optional)
-  Description about the recipient.
-:param data_recipient_global_metastore_id: str (optional)
-  The global Unity Catalog metastore id provided by the data recipient. This field is only present
-  when the __authentication_type__ is **DATABRICKS**. The identifier is of format
-  __cloud__:__region__:__metastore-uuid__.
-:param expiration_time: int (optional)
-  Expiration timestamp of the token, in epoch milliseconds.
-:param ip_access_list: :class:`IpAccessList` (optional)
-  IP Access List
-:param owner: str (optional)
-  Username of the recipient owner.
-:param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional)
-  Recipient properties as map of string key-value pairs. When provided in update request, the
-  specified properties will override the existing properties. To add and remove properties, one would
-  need to perform a read-modify-write.
-:param sharing_code: str (optional)
-  The one-time sharing code provided by the data recipient. This field is only present when the
-  __authentication_type__ is **DATABRICKS**.
-
-:returns: :class:`RecipientInfo`
-
+        
+        Creates a new recipient with the delta sharing authentication type in the metastore. The caller must
+        be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore.
+        
+        :param name: str
+          Name of Recipient.
+        :param authentication_type: :class:`AuthenticationType`
+          The delta sharing authentication type.
+        :param comment: str (optional)
+          Description about the recipient.
+        :param data_recipient_global_metastore_id: str (optional)
+          The global Unity Catalog metastore id provided by the data recipient. This field is only present
+          when the __authentication_type__ is **DATABRICKS**. The identifier is of format
+          __cloud__:__region__:__metastore-uuid__.
+        :param expiration_time: int (optional)
+          Expiration timestamp of the token, in epoch milliseconds.
+        :param ip_access_list: :class:`IpAccessList` (optional)
+          IP Access List
+        :param owner: str (optional)
+          Username of the recipient owner.
+        :param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional)
+          Recipient properties as map of string key-value pairs. When provided in update request, the
+          specified properties will override the existing properties. To add and remove properties, one would
+          need to perform a read-modify-write.
+        :param sharing_code: str (optional)
+          The one-time sharing code provided by the data recipient. This field is only present when the
+          __authentication_type__ is **DATABRICKS**.
+        
+        :returns: :class:`RecipientInfo`
+        
 
     .. py:method:: delete(name: str)
 
         Delete a share recipient.
-
-Deletes the specified recipient from the metastore. The caller must be the owner of the recipient.
-
-:param name: str
-  Name of the recipient.
-
-
-
+        
+        Deletes the specified recipient from the metastore. The caller must be the owner of the recipient.
+        
+        :param name: str
+          Name of the recipient.
+        
+        
+        
 
     .. py:method:: get(name: str) -> RecipientInfo
 
@@ -101,16 +101,16 @@ Deletes the specified recipient from the metastore. The caller must be the owner
             w.recipients.delete(name=created.name)
 
         Get a share recipient.
-
-Gets a share recipient from the metastore if:
-
-* the caller is the owner of the share recipient, or: * is a metastore admin
-
-:param name: str
-  Name of the recipient.
-
-:returns: :class:`RecipientInfo`
-
+        
+        Gets a share recipient from the metastore if:
+        
+        * the caller is the owner of the share recipient, or: * is a metastore admin
+        
+        :param name: str
+          Name of the recipient.
+        
+        :returns: :class:`RecipientInfo`
+        
 
     .. py:method:: list( [, data_recipient_global_metastore_id: Optional[str], max_results: Optional[int], page_token: Optional[str]]) -> Iterator[RecipientInfo]
 
@@ -127,28 +127,28 @@ Gets a share recipient from the metastore if:
             all = w.recipients.list(sharing.ListRecipientsRequest())
 
         List share recipients.
-
-Gets an array of all share recipients within the current metastore where:
-
-* the caller is a metastore admin, or * the caller is the owner. There is no guarantee of a specific
-ordering of the elements in the array.
-
-:param data_recipient_global_metastore_id: str (optional)
-  If not provided, all recipients will be returned. If no recipients exist with this ID, no results
-  will be returned.
-:param max_results: int (optional)
-  Maximum number of recipients to return. - when set to 0, the page length is set to a server
-  configured value (recommended); - when set to a value greater than 0, the page length is the minimum
-  of this value and a server configured value; - when set to a value less than 0, an invalid parameter
-  error is returned; - If not set, all valid recipients are returned (not recommended). - Note: The
-  number of returned recipients might be less than the specified max_results size, even zero. The only
-  definitive indication that no further recipients can be fetched is when the next_page_token is unset
-  from the response.
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-
-:returns: Iterator over :class:`RecipientInfo`
-
+        
+        Gets an array of all share recipients within the current metastore where:
+        
+        * the caller is a metastore admin, or * the caller is the owner. There is no guarantee of a specific
+        ordering of the elements in the array.
+        
+        :param data_recipient_global_metastore_id: str (optional)
+          If not provided, all recipients will be returned. If no recipients exist with this ID, no results
+          will be returned.
+        :param max_results: int (optional)
+          Maximum number of recipients to return. - when set to 0, the page length is set to a server
+          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+          error is returned; - If not set, all valid recipients are returned (not recommended). - Note: The
+          number of returned recipients might be less than the specified max_results size, even zero. The only
+          definitive indication that no further recipients can be fetched is when the next_page_token is unset
+          from the response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`RecipientInfo`
+        
 
     .. py:method:: rotate_token(name: str, existing_token_expire_in_seconds: int) -> RecipientInfo
 
@@ -171,19 +171,19 @@ ordering of the elements in the array.
             w.recipients.delete(name=created.name)
 
         Rotate a token.
-
-Refreshes the specified recipient's delta sharing authentication token with the provided token info.
-The caller must be the owner of the recipient.
-
-:param name: str
-  The name of the Recipient.
-:param existing_token_expire_in_seconds: int
-  The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of
-  existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to expire
-  the existing token immediately, negative number will return an error.
-
-:returns: :class:`RecipientInfo`
-
+        
+        Refreshes the specified recipient's delta sharing authentication token with the provided token info.
+        The caller must be the owner of the recipient.
+        
+        :param name: str
+          The name of the Recipient.
+        :param existing_token_expire_in_seconds: int
+          The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of
+          existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to expire
+          the existing token immediately, negative number will return an error.
+        
+        :returns: :class:`RecipientInfo`
+        
 
     .. py:method:: share_permissions(name: str [, max_results: Optional[int], page_token: Optional[str]]) -> GetRecipientSharePermissionsResponse
 
@@ -206,25 +206,25 @@ The caller must be the owner of the recipient.
             w.recipients.delete(name=created.name)
 
         Get recipient share permissions.
-
-Gets the share permissions for the specified Recipient. The caller must be a metastore admin or the
-owner of the Recipient.
-
-:param name: str
-  The name of the Recipient.
-:param max_results: int (optional)
-  Maximum number of permissions to return. - when set to 0, the page length is set to a server
-  configured value (recommended); - when set to a value greater than 0, the page length is the minimum
-  of this value and a server configured value; - when set to a value less than 0, an invalid parameter
-  error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The
-  number of returned permissions might be less than the specified max_results size, even zero. The
-  only definitive indication that no further permissions can be fetched is when the next_page_token is
-  unset from the response.
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-
-:returns: :class:`GetRecipientSharePermissionsResponse`
-
+        
+        Gets the share permissions for the specified Recipient. The caller must be a metastore admin or the
+        owner of the Recipient.
+        
+        :param name: str
+          The name of the Recipient.
+        :param max_results: int (optional)
+          Maximum number of permissions to return. - when set to 0, the page length is set to a server
+          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+          error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The
+          number of returned permissions might be less than the specified max_results size, even zero. The
+          only definitive indication that no further permissions can be fetched is when the next_page_token is
+          unset from the response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: :class:`GetRecipientSharePermissionsResponse`
+        
 
     .. py:method:: update(name: str [, comment: Optional[str], expiration_time: Optional[int], ip_access_list: Optional[IpAccessList], new_name: Optional[str], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs]]) -> RecipientInfo
 
@@ -247,26 +247,27 @@ owner of the Recipient.
             w.recipients.delete(name=created.name)
 
         Update a share recipient.
-
-Updates an existing recipient in the metastore. The caller must be a metastore admin or the owner of
-the recipient. If the recipient name will be updated, the user must be both a metastore admin and the
-owner of the recipient.
-
-:param name: str
-  Name of the recipient.
-:param comment: str (optional)
-  Description about the recipient.
-:param expiration_time: int (optional)
-  Expiration timestamp of the token, in epoch milliseconds.
-:param ip_access_list: :class:`IpAccessList` (optional)
-  IP Access List
-:param new_name: str (optional)
-  New name for the recipient. .
-:param owner: str (optional)
-  Username of the recipient owner.
-:param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional)
-  Recipient properties as map of string key-value pairs. When provided in update request, the
-  specified properties will override the existing properties. To add and remove properties, one would
-  need to perform a read-modify-write.
-
-:returns: :class:`RecipientInfo`
+        
+        Updates an existing recipient in the metastore. The caller must be a metastore admin or the owner of
+        the recipient. If the recipient name will be updated, the user must be both a metastore admin and the
+        owner of the recipient.
+        
+        :param name: str
+          Name of the recipient.
+        :param comment: str (optional)
+          Description about the recipient.
+        :param expiration_time: int (optional)
+          Expiration timestamp of the token, in epoch milliseconds.
+        :param ip_access_list: :class:`IpAccessList` (optional)
+          IP Access List
+        :param new_name: str (optional)
+          New name for the recipient. .
+        :param owner: str (optional)
+          Username of the recipient owner.
+        :param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional)
+          Recipient properties as map of string key-value pairs. When provided in update request, the
+          specified properties will override the existing properties. To add and remove properties, one would
+          need to perform a read-modify-write.
+        
+        :returns: :class:`RecipientInfo`
+        
\ No newline at end of file
diff --git a/docs/workspace/sharing/shares.rst b/docs/workspace/sharing/shares.rst
index f1c10ebeb..4d14b811d 100644
--- a/docs/workspace/sharing/shares.rst
+++ b/docs/workspace/sharing/shares.rst
@@ -5,9 +5,9 @@
 .. py:class:: SharesAPI
 
     A share is a container instantiated with :method:shares/create. Once created you can iteratively register
-a collection of existing data assets defined within the metastore using :method:shares/update. You can
-register data assets under their original name, qualified by their original schema, or provide alternate
-exposed names.
+    a collection of existing data assets defined within the metastore using :method:shares/update. You can
+    register data assets under their original name, qualified by their original schema, or provide alternate
+    exposed names.
 
     .. py:method:: create(name: str [, comment: Optional[str], storage_root: Optional[str]]) -> ShareInfo
 
@@ -28,31 +28,31 @@ exposed names.
             w.shares.delete(name=created_share.name)
 
         Create a share.
-
-Creates a new share for data objects. Data objects can be added after creation with **update**. The
-caller must be a metastore admin or have the **CREATE_SHARE** privilege on the metastore.
-
-:param name: str
-  Name of the share.
-:param comment: str (optional)
-  User-provided free-form text description.
-:param storage_root: str (optional)
-  Storage root URL for the share.
-
-:returns: :class:`ShareInfo`
-
+        
+        Creates a new share for data objects. Data objects can be added after creation with **update**. The
+        caller must be a metastore admin or have the **CREATE_SHARE** privilege on the metastore.
+        
+        :param name: str
+          Name of the share.
+        :param comment: str (optional)
+          User-provided free-form text description.
+        :param storage_root: str (optional)
+          Storage root URL for the share.
+        
+        :returns: :class:`ShareInfo`
+        
 
     .. py:method:: delete(name: str)
 
         Delete a share.
-
-Deletes a data object share from the metastore. The caller must be an owner of the share.
-
-:param name: str
-  The name of the share.
-
-
-
+        
+        Deletes a data object share from the metastore. The caller must be an owner of the share.
+        
+        :param name: str
+          The name of the share.
+        
+        
+        
 
     .. py:method:: get(name: str [, include_shared_data: Optional[bool]]) -> ShareInfo
 
@@ -75,17 +75,17 @@ Deletes a data object share from the metastore. The caller must be an owner of t
             w.shares.delete(name=created_share.name)
 
         Get a share.
-
-Gets a data object share from the metastore. The caller must be a metastore admin or the owner of the
-share.
-
-:param name: str
-  The name of the share.
-:param include_shared_data: bool (optional)
-  Query for data to include in the share.
-
-:returns: :class:`ShareInfo`
-
+        
+        Gets a data object share from the metastore. The caller must be a metastore admin or the owner of the
+        share.
+        
+        :param name: str
+          The name of the share.
+        :param include_shared_data: bool (optional)
+          Query for data to include in the share.
+        
+        :returns: :class:`ShareInfo`
+        
 
     .. py:method:: list( [, max_results: Optional[int], page_token: Optional[str]]) -> Iterator[ShareInfo]
 
@@ -102,46 +102,46 @@ share.
             all = w.shares.list(sharing.ListSharesRequest())
 
         List shares.
-
-Gets an array of data object shares from the metastore. The caller must be a metastore admin or the
-owner of the share. There is no guarantee of a specific ordering of the elements in the array.
-
-:param max_results: int (optional)
-  Maximum number of shares to return. - when set to 0, the page length is set to a server configured
-  value (recommended); - when set to a value greater than 0, the page length is the minimum of this
-  value and a server configured value; - when set to a value less than 0, an invalid parameter error
-  is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of
-  returned shares might be less than the specified max_results size, even zero. The only definitive
-  indication that no further shares can be fetched is when the next_page_token is unset from the
-  response.
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-
-:returns: Iterator over :class:`ShareInfo`
-
+        
+        Gets an array of data object shares from the metastore. The caller must be a metastore admin or the
+        owner of the share. There is no guarantee of a specific ordering of the elements in the array.
+        
+        :param max_results: int (optional)
+          Maximum number of shares to return. - when set to 0, the page length is set to a server configured
+          value (recommended); - when set to a value greater than 0, the page length is the minimum of this
+          value and a server configured value; - when set to a value less than 0, an invalid parameter error
+          is returned; - If not set, all valid shares are returned (not recommended). - Note: The number of
+          returned shares might be less than the specified max_results size, even zero. The only definitive
+          indication that no further shares can be fetched is when the next_page_token is unset from the
+          response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`ShareInfo`
+        
 
     .. py:method:: share_permissions(name: str [, max_results: Optional[int], page_token: Optional[str]]) -> catalog.PermissionsList
 
         Get permissions.
-
-Gets the permissions for a data share from the metastore. The caller must be a metastore admin or the
-owner of the share.
-
-:param name: str
-  The name of the share.
-:param max_results: int (optional)
-  Maximum number of permissions to return. - when set to 0, the page length is set to a server
-  configured value (recommended); - when set to a value greater than 0, the page length is the minimum
-  of this value and a server configured value; - when set to a value less than 0, an invalid parameter
-  error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The
-  number of returned permissions might be less than the specified max_results size, even zero. The
-  only definitive indication that no further permissions can be fetched is when the next_page_token is
-  unset from the response.
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-
-:returns: :class:`PermissionsList`
-
+        
+        Gets the permissions for a data share from the metastore. The caller must be a metastore admin or the
+        owner of the share.
+        
+        :param name: str
+          The name of the share.
+        :param max_results: int (optional)
+          Maximum number of permissions to return. - when set to 0, the page length is set to a server
+          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+          error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The
+          number of returned permissions might be less than the specified max_results size, even zero. The
+          only definitive indication that no further permissions can be fetched is when the next_page_token is
+          unset from the response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: :class:`PermissionsList`
+        
 
     .. py:method:: update(name: str [, comment: Optional[str], new_name: Optional[str], owner: Optional[str], storage_root: Optional[str], updates: Optional[List[SharedDataObjectUpdate]]]) -> ShareInfo
 
@@ -189,62 +189,63 @@ owner of the share.
             w.shares.delete(name=created_share.name)
 
         Update a share.
-
-Updates the share with the changes and data objects in the request. The caller must be the owner of
-the share or a metastore admin.
-
-When the caller is a metastore admin, only the __owner__ field can be updated.
-
-In the case that the share name is changed, **updateShare** requires that the caller is both the share
-owner and a metastore admin.
-
-If there are notebook files in the share, the __storage_root__ field cannot be updated.
-
-For each table that is added through this method, the share owner must also have **SELECT** privilege
-on the table. This privilege must be maintained indefinitely for recipients to be able to access the
-table. Typically, you should use a group as the share owner.
-
-Table removals through **update** do not require additional privileges.
-
-:param name: str
-  The name of the share.
-:param comment: str (optional)
-  User-provided free-form text description.
-:param new_name: str (optional)
-  New name for the share.
-:param owner: str (optional)
-  Username of current owner of share.
-:param storage_root: str (optional)
-  Storage root URL for the share.
-:param updates: List[:class:`SharedDataObjectUpdate`] (optional)
-  Array of shared data object updates.
-
-:returns: :class:`ShareInfo`
-
+        
+        Updates the share with the changes and data objects in the request. The caller must be the owner of
+        the share or a metastore admin.
+        
+        When the caller is a metastore admin, only the __owner__ field can be updated.
+        
+        In the case that the share name is changed, **updateShare** requires that the caller is both the share
+        owner and a metastore admin.
+        
+        If there are notebook files in the share, the __storage_root__ field cannot be updated.
+        
+        For each table that is added through this method, the share owner must also have **SELECT** privilege
+        on the table. This privilege must be maintained indefinitely for recipients to be able to access the
+        table. Typically, you should use a group as the share owner.
+        
+        Table removals through **update** do not require additional privileges.
+        
+        :param name: str
+          The name of the share.
+        :param comment: str (optional)
+          User-provided free-form text description.
+        :param new_name: str (optional)
+          New name for the share.
+        :param owner: str (optional)
+          Username of current owner of share.
+        :param storage_root: str (optional)
+          Storage root URL for the share.
+        :param updates: List[:class:`SharedDataObjectUpdate`] (optional)
+          Array of shared data object updates.
+        
+        :returns: :class:`ShareInfo`
+        
 
     .. py:method:: update_permissions(name: str [, changes: Optional[List[catalog.PermissionsChange]], max_results: Optional[int], page_token: Optional[str]])
 
         Update permissions.
-
-Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an
-owner of the share.
-
-For new recipient grants, the user must also be the owner of the recipients. recipient revocations do
-not require additional privileges.
-
-:param name: str
-  The name of the share.
-:param changes: List[:class:`PermissionsChange`] (optional)
-  Array of permission changes.
-:param max_results: int (optional)
-  Maximum number of permissions to return. - when set to 0, the page length is set to a server
-  configured value (recommended); - when set to a value greater than 0, the page length is the minimum
-  of this value and a server configured value; - when set to a value less than 0, an invalid parameter
-  error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The
-  number of returned permissions might be less than the specified max_results size, even zero. The
-  only definitive indication that no further permissions can be fetched is when the next_page_token is
-  unset from the response.
-:param page_token: str (optional)
-  Opaque pagination token to go to next page based on previous query.
-
-
+        
+        Updates the permissions for a data share in the metastore. The caller must be a metastore admin or an
+        owner of the share.
+        
+        For new recipient grants, the user must also be the owner of the recipients. recipient revocations do
+        not require additional privileges.
+        
+        :param name: str
+          The name of the share.
+        :param changes: List[:class:`PermissionsChange`] (optional)
+          Array of permission changes.
+        :param max_results: int (optional)
+          Maximum number of permissions to return. - when set to 0, the page length is set to a server
+          configured value (recommended); - when set to a value greater than 0, the page length is the minimum
+          of this value and a server configured value; - when set to a value less than 0, an invalid parameter
+          error is returned; - If not set, all valid permissions are returned (not recommended). - Note: The
+          number of returned permissions might be less than the specified max_results size, even zero. The
+          only definitive indication that no further permissions can be fetched is when the next_page_token is
+          unset from the response.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/sql/alerts.rst b/docs/workspace/sql/alerts.rst
index b209b1047..c8d9c31ab 100644
--- a/docs/workspace/sql/alerts.rst
+++ b/docs/workspace/sql/alerts.rst
@@ -5,9 +5,9 @@
 .. py:class:: AlertsAPI
 
     The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that
-periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or
-notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of
-the Jobs API, e.g. :method:jobs/create.
+    periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or
+    notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of
+    the Jobs API, e.g. :method:jobs/create.
 
     .. py:method:: create( [, alert: Optional[CreateAlertRequestAlert]]) -> Alert
 
@@ -45,26 +45,26 @@ the Jobs API, e.g. :method:jobs/create.
             w.alerts.delete(id=alert.id)
 
         Create an alert.
-
-Creates an alert.
-
-:param alert: :class:`CreateAlertRequestAlert` (optional)
-
-:returns: :class:`Alert`
-
+        
+        Creates an alert.
+        
+        :param alert: :class:`CreateAlertRequestAlert` (optional)
+        
+        :returns: :class:`Alert`
+        
 
     .. py:method:: delete(id: str)
 
         Delete an alert.
-
-Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and
-can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently
-deleted after 30 days.
-
-:param id: str
-
-
-
+        
+        Moves an alert to the trash. Trashed alerts immediately disappear from searches and list views, and
+        can no longer trigger. You can restore a trashed alert through the UI. A trashed alert is permanently
+        deleted after 30 days.
+        
+        :param id: str
+        
+        
+        
 
     .. py:method:: get(id: str) -> Alert
 
@@ -104,13 +104,13 @@ deleted after 30 days.
             w.alerts.delete(id=alert.id)
 
         Get an alert.
-
-Gets an alert.
-
-:param id: str
-
-:returns: :class:`Alert`
-
+        
+        Gets an alert.
+        
+        :param id: str
+        
+        :returns: :class:`Alert`
+        
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListAlertsResponseAlert]
 
@@ -127,15 +127,15 @@ Gets an alert.
             all = w.alerts.list(sql.ListAlertsRequest())
 
         List alerts.
-
-Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API
-concurrently 10 or more times could result in throttling, service degradation, or a temporary ban.
-
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`ListAlertsResponseAlert`
-
+        
+        Gets a list of alerts accessible to the user, ordered by creation time. **Warning:** Calling this API
+        concurrently 10 or more times could result in throttling, service degradation, or a temporary ban.
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`ListAlertsResponseAlert`
+        
 
     .. py:method:: update(id: str, update_mask: str [, alert: Optional[UpdateAlertRequestAlert]]) -> Alert
 
@@ -177,20 +177,21 @@ concurrently 10 or more times could result in throttling, service degradation, o
             w.alerts.delete(id=alert.id)
 
         Update an alert.
-
-Updates an alert.
-
-:param id: str
-:param update_mask: str
-  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-  the entire collection field can be specified. Field names must exactly match the resource field
-  names.
-  
-  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-  changes in the future.
-:param alert: :class:`UpdateAlertRequestAlert` (optional)
-
-:returns: :class:`Alert`
+        
+        Updates an alert.
+        
+        :param id: str
+        :param update_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        :param alert: :class:`UpdateAlertRequestAlert` (optional)
+        
+        :returns: :class:`Alert`
+        
\ No newline at end of file
diff --git a/docs/workspace/sql/alerts_legacy.rst b/docs/workspace/sql/alerts_legacy.rst
index e5f11673e..6dfd96128 100644
--- a/docs/workspace/sql/alerts_legacy.rst
+++ b/docs/workspace/sql/alerts_legacy.rst
@@ -5,109 +5,110 @@
 .. py:class:: AlertsLegacyAPI
 
     The alerts API can be used to perform CRUD operations on alerts. An alert is a Databricks SQL object that
-periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or
-notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of
-the Jobs API, e.g. :method:jobs/create.
-
-**Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
-more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+    periodically runs a query, evaluates a condition of its result, and notifies one or more users and/or
+    notification destinations if the condition was met. Alerts can be scheduled using the `sql_task` type of
+    the Jobs API, e.g. :method:jobs/create.
+    
+    **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
+    more]
+    
+    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
 
     .. py:method:: create(name: str, options: AlertOptions, query_id: str [, parent: Optional[str], rearm: Optional[int]]) -> LegacyAlert
 
         Create an alert.
-
-Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a
-condition of its result, and notifies users or notification destinations if the condition was met.
-
-**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create
-instead. [Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-
-:param name: str
-  Name of the alert.
-:param options: :class:`AlertOptions`
-  Alert configuration options.
-:param query_id: str
-  Query ID.
-:param parent: str (optional)
-  The identifier of the workspace folder containing the object.
-:param rearm: int (optional)
-  Number of seconds after being triggered before the alert rearms itself and can be triggered again.
-  If `null`, alert will never be triggered again.
-
-:returns: :class:`LegacyAlert`
-
+        
+        Creates an alert. An alert is a Databricks SQL object that periodically runs a query, evaluates a
+        condition of its result, and notifies users or notification destinations if the condition was met.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/create
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param name: str
+          Name of the alert.
+        :param options: :class:`AlertOptions`
+          Alert configuration options.
+        :param query_id: str
+          Query ID.
+        :param parent: str (optional)
+          The identifier of the workspace folder containing the object.
+        :param rearm: int (optional)
+          Number of seconds after being triggered before the alert rearms itself and can be triggered again.
+          If `null`, alert will never be triggered again.
+        
+        :returns: :class:`LegacyAlert`
+        
 
     .. py:method:: delete(alert_id: str)
 
         Delete an alert.
-
-Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike
-queries and dashboards, alerts cannot be moved to the trash.
-
-**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete
-instead. [Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-
-:param alert_id: str
-
-
-
+        
+        Deletes an alert. Deleted alerts are no longer accessible and cannot be restored. **Note**: Unlike
+        queries and dashboards, alerts cannot be moved to the trash.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/delete
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param alert_id: str
+        
+        
+        
 
     .. py:method:: get(alert_id: str) -> LegacyAlert
 
         Get an alert.
-
-Gets an alert.
-
-**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get
-instead. [Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-
-:param alert_id: str
-
-:returns: :class:`LegacyAlert`
-
+        
+        Gets an alert.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/get
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param alert_id: str
+        
+        :returns: :class:`LegacyAlert`
+        
 
     .. py:method:: list() -> Iterator[LegacyAlert]
 
         Get alerts.
-
-Gets a list of alerts.
-
-**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list
-instead. [Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-
-:returns: Iterator over :class:`LegacyAlert`
-
+        
+        Gets a list of alerts.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/list
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :returns: Iterator over :class:`LegacyAlert`
+        
 
     .. py:method:: update(alert_id: str, name: str, options: AlertOptions, query_id: str [, rearm: Optional[int]])
 
         Update an alert.
-
-Updates an alert.
-
-**Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update
-instead. [Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-
-:param alert_id: str
-:param name: str
-  Name of the alert.
-:param options: :class:`AlertOptions`
-  Alert configuration options.
-:param query_id: str
-  Query ID.
-:param rearm: int (optional)
-  Number of seconds after being triggered before the alert rearms itself and can be triggered again.
-  If `null`, alert will never be triggered again.
-
-
+        
+        Updates an alert.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:alerts/update
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param alert_id: str
+        :param name: str
+          Name of the alert.
+        :param options: :class:`AlertOptions`
+          Alert configuration options.
+        :param query_id: str
+          Query ID.
+        :param rearm: int (optional)
+          Number of seconds after being triggered before the alert rearms itself and can be triggered again.
+          If `null`, alert will never be triggered again.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/sql/dashboard_widgets.rst b/docs/workspace/sql/dashboard_widgets.rst
index df6a37f35..d4bbcde1d 100644
--- a/docs/workspace/sql/dashboard_widgets.rst
+++ b/docs/workspace/sql/dashboard_widgets.rst
@@ -5,51 +5,52 @@
 .. py:class:: DashboardWidgetsAPI
 
     This is an evolving API that facilitates the addition and removal of widgets from existing dashboards
-within the Databricks Workspace. Data structures may change over time.
+    within the Databricks Workspace. Data structures may change over time.
 
     .. py:method:: create(dashboard_id: str, options: WidgetOptions, width: int [, text: Optional[str], visualization_id: Optional[str]]) -> Widget
 
         Add widget to a dashboard.
-
-:param dashboard_id: str
-  Dashboard ID returned by :method:dashboards/create.
-:param options: :class:`WidgetOptions`
-:param width: int
-  Width of a widget
-:param text: str (optional)
-  If this is a textbox widget, the application displays this text. This field is ignored if the widget
-  contains a visualization in the `visualization` field.
-:param visualization_id: str (optional)
-  Query Vizualization ID returned by :method:queryvisualizations/create.
-
-:returns: :class:`Widget`
-
+        
+        :param dashboard_id: str
+          Dashboard ID returned by :method:dashboards/create.
+        :param options: :class:`WidgetOptions`
+        :param width: int
+          Width of a widget
+        :param text: str (optional)
+          If this is a textbox widget, the application displays this text. This field is ignored if the widget
+          contains a visualization in the `visualization` field.
+        :param visualization_id: str (optional)
+          Query Vizualization ID returned by :method:queryvisualizations/create.
+        
+        :returns: :class:`Widget`
+        
 
     .. py:method:: delete(id: str)
 
         Remove widget.
-
-:param id: str
-  Widget ID returned by :method:dashboardwidgets/create
-
-
-
+        
+        :param id: str
+          Widget ID returned by :method:dashboardwidgets/create
+        
+        
+        
 
     .. py:method:: update(id: str, dashboard_id: str, options: WidgetOptions, width: int [, text: Optional[str], visualization_id: Optional[str]]) -> Widget
 
         Update existing widget.
-
-:param id: str
-  Widget ID returned by :method:dashboardwidgets/create
-:param dashboard_id: str
-  Dashboard ID returned by :method:dashboards/create.
-:param options: :class:`WidgetOptions`
-:param width: int
-  Width of a widget
-:param text: str (optional)
-  If this is a textbox widget, the application displays this text. This field is ignored if the widget
-  contains a visualization in the `visualization` field.
-:param visualization_id: str (optional)
-  Query Vizualization ID returned by :method:queryvisualizations/create.
-
-:returns: :class:`Widget`
+        
+        :param id: str
+          Widget ID returned by :method:dashboardwidgets/create
+        :param dashboard_id: str
+          Dashboard ID returned by :method:dashboards/create.
+        :param options: :class:`WidgetOptions`
+        :param width: int
+          Width of a widget
+        :param text: str (optional)
+          If this is a textbox widget, the application displays this text. This field is ignored if the widget
+          contains a visualization in the `visualization` field.
+        :param visualization_id: str (optional)
+          Query Vizualization ID returned by :method:queryvisualizations/create.
+        
+        :returns: :class:`Widget`
+        
\ No newline at end of file
diff --git a/docs/workspace/sql/dashboards.rst b/docs/workspace/sql/dashboards.rst
index 2b44a1edd..f22c7c96b 100644
--- a/docs/workspace/sql/dashboards.rst
+++ b/docs/workspace/sql/dashboards.rst
@@ -5,10 +5,10 @@
 .. py:class:: DashboardsAPI
 
     In general, there is little need to modify dashboards using the API. However, it can be useful to use
-dashboard objects to look-up a collection of related query IDs. The API can also be used to duplicate
-multiple dashboards at once since you can get a dashboard definition with a GET request and then POST it
-to create a new one. Dashboards can be scheduled using the `sql_task` type of the Jobs API, e.g.
-:method:jobs/create.
+    dashboard objects to look-up a collection of related query IDs. The API can also be used to duplicate
+    multiple dashboards at once since you can get a dashboard definition with a GET request and then POST it
+    to create a new one. Dashboards can be scheduled using the `sql_task` type of the Jobs API, e.g.
+    :method:jobs/create.
 
     .. py:method:: create(name: str [, dashboard_filters_enabled: Optional[bool], is_favorite: Optional[bool], parent: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> Dashboard
 
@@ -29,22 +29,22 @@ to create a new one. Dashboards can be scheduled using the `sql_task` type of th
             w.dashboards.delete(dashboard_id=created.id)
 
         Create a dashboard object.
-
-:param name: str
-  The title of this dashboard that appears in list views and at the top of the dashboard page.
-:param dashboard_filters_enabled: bool (optional)
-  Indicates whether the dashboard filters are enabled
-:param is_favorite: bool (optional)
-  Indicates whether this dashboard object should appear in the current user's favorites list.
-:param parent: str (optional)
-  The identifier of the workspace folder containing the object.
-:param run_as_role: :class:`RunAsRole` (optional)
-  Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
-  viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
-:param tags: List[str] (optional)
-
-:returns: :class:`Dashboard`
-
+        
+        :param name: str
+          The title of this dashboard that appears in list views and at the top of the dashboard page.
+        :param dashboard_filters_enabled: bool (optional)
+          Indicates whether the dashboard filters are enabled
+        :param is_favorite: bool (optional)
+          Indicates whether this dashboard object should appear in the current user's favorites list.
+        :param parent: str (optional)
+          The identifier of the workspace folder containing the object.
+        :param run_as_role: :class:`RunAsRole` (optional)
+          Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
+          viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
+        :param tags: List[str] (optional)
+        
+        :returns: :class:`Dashboard`
+        
 
     .. py:method:: delete(dashboard_id: str)
 
@@ -67,14 +67,14 @@ to create a new one. Dashboards can be scheduled using the `sql_task` type of th
             w.dashboards.delete(dashboard_id=created.id)
 
         Remove a dashboard.
-
-Moves a dashboard to the trash. Trashed dashboards do not appear in list views or searches, and cannot
-be shared.
-
-:param dashboard_id: str
-
-
-
+        
+        Moves a dashboard to the trash. Trashed dashboards do not appear in list views or searches, and cannot
+        be shared.
+        
+        :param dashboard_id: str
+        
+        
+        
 
     .. py:method:: get(dashboard_id: str) -> Dashboard
 
@@ -97,13 +97,13 @@ be shared.
             w.dashboards.delete(dashboard_id=created.id)
 
         Retrieve a definition.
-
-Returns a JSON representation of a dashboard object, including its visualization and query objects.
-
-:param dashboard_id: str
-
-:returns: :class:`Dashboard`
-
+        
+        Returns a JSON representation of a dashboard object, including its visualization and query objects.
+        
+        :param dashboard_id: str
+        
+        :returns: :class:`Dashboard`
+        
 
     .. py:method:: list( [, order: Optional[ListOrder], page: Optional[int], page_size: Optional[int], q: Optional[str]]) -> Iterator[Dashboard]
 
@@ -120,23 +120,23 @@ Returns a JSON representation of a dashboard object, including its visualization
             all = w.dashboards.list(sql.ListDashboardsRequest())
 
         Get dashboard objects.
-
-Fetch a paginated list of dashboard objects.
-
-**Warning**: Calling this API concurrently 10 or more times could result in throttling, service
-degradation, or a temporary ban.
-
-:param order: :class:`ListOrder` (optional)
-  Name of dashboard attribute to order by.
-:param page: int (optional)
-  Page number to retrieve.
-:param page_size: int (optional)
-  Number of dashboards to return per page.
-:param q: str (optional)
-  Full text search term.
-
-:returns: Iterator over :class:`Dashboard`
-
+        
+        Fetch a paginated list of dashboard objects.
+        
+        **Warning**: Calling this API concurrently 10 or more times could result in throttling, service
+        degradation, or a temporary ban.
+        
+        :param order: :class:`ListOrder` (optional)
+          Name of dashboard attribute to order by.
+        :param page: int (optional)
+          Page number to retrieve.
+        :param page_size: int (optional)
+          Number of dashboards to return per page.
+        :param q: str (optional)
+          Full text search term.
+        
+        :returns: Iterator over :class:`Dashboard`
+        
 
     .. py:method:: restore(dashboard_id: str)
 
@@ -159,29 +159,30 @@ degradation, or a temporary ban.
             w.dashboards.delete(dashboard_id=created.id)
 
         Restore a dashboard.
-
-A restored dashboard appears in list views and searches and can be shared.
-
-:param dashboard_id: str
-
-
-
+        
+        A restored dashboard appears in list views and searches and can be shared.
+        
+        :param dashboard_id: str
+        
+        
+        
 
     .. py:method:: update(dashboard_id: str [, name: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> Dashboard
 
         Change a dashboard definition.
-
-Modify this dashboard definition. This operation only affects attributes of the dashboard object. It
-does not add, modify, or remove widgets.
-
-**Note**: You cannot undo this operation.
-
-:param dashboard_id: str
-:param name: str (optional)
-  The title of this dashboard that appears in list views and at the top of the dashboard page.
-:param run_as_role: :class:`RunAsRole` (optional)
-  Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
-  viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
-:param tags: List[str] (optional)
-
-:returns: :class:`Dashboard`
+        
+        Modify this dashboard definition. This operation only affects attributes of the dashboard object. It
+        does not add, modify, or remove widgets.
+        
+        **Note**: You cannot undo this operation.
+        
+        :param dashboard_id: str
+        :param name: str (optional)
+          The title of this dashboard that appears in list views and at the top of the dashboard page.
+        :param run_as_role: :class:`RunAsRole` (optional)
+          Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
+          viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
+        :param tags: List[str] (optional)
+        
+        :returns: :class:`Dashboard`
+        
\ No newline at end of file
diff --git a/docs/workspace/sql/data_sources.rst b/docs/workspace/sql/data_sources.rst
index 4b05ce137..8f7321fa0 100644
--- a/docs/workspace/sql/data_sources.rst
+++ b/docs/workspace/sql/data_sources.rst
@@ -5,16 +5,16 @@
 .. py:class:: DataSourcesAPI
 
     This API is provided to assist you in making new query objects. When creating a query object, you may
-optionally specify a `data_source_id` for the SQL warehouse against which it will run. If you don't
-already know the `data_source_id` for your desired SQL warehouse, this API will help you find it.
-
-This API does not support searches. It returns the full list of SQL warehouses in your workspace. We
-advise you to use any text editor, REST client, or `grep` to search the response from this API for the
-name of your SQL warehouse as it appears in Databricks SQL.
-
-**Note**: A new version of the Databricks SQL API is now available. [Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+    optionally specify a `data_source_id` for the SQL warehouse against which it will run. If you don't
+    already know the `data_source_id` for your desired SQL warehouse, this API will help you find it.
+    
+    This API does not support searches. It returns the full list of SQL warehouses in your workspace. We
+    advise you to use any text editor, REST client, or `grep` to search the response from this API for the
+    name of your SQL warehouse as it appears in Databricks SQL.
+    
+    **Note**: A new version of the Databricks SQL API is now available. [Learn more]
+    
+    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
 
     .. py:method:: list() -> Iterator[DataSource]
 
@@ -30,14 +30,15 @@ name of your SQL warehouse as it appears in Databricks SQL.
             srcs = w.data_sources.list()
 
         Get a list of SQL warehouses.
-
-Retrieves a full list of SQL warehouses available in this workspace. All fields that appear in this
-API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new
-queries against it.
-
-**Note**: A new version of the Databricks SQL API is now available. Please use :method:warehouses/list
-instead. [Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-
-:returns: Iterator over :class:`DataSource`
+        
+        Retrieves a full list of SQL warehouses available in this workspace. All fields that appear in this
+        API response are enumerated for clarity. However, you need only a SQL warehouse's `id` to create new
+        queries against it.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:warehouses/list
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :returns: Iterator over :class:`DataSource`
+        
\ No newline at end of file
diff --git a/docs/workspace/sql/dbsql_permissions.rst b/docs/workspace/sql/dbsql_permissions.rst
index 18da30ff0..7f9e5d19c 100644
--- a/docs/workspace/sql/dbsql_permissions.rst
+++ b/docs/workspace/sql/dbsql_permissions.rst
@@ -5,77 +5,78 @@
 .. py:class:: DbsqlPermissionsAPI
 
     The SQL Permissions API is similar to the endpoints of the :method:permissions/set. However, this exposes
-only one endpoint, which gets the Access Control List for a given object. You cannot modify any
-permissions using this API.
-
-There are three levels of permission:
-
-- `CAN_VIEW`: Allows read-only access
-
-- `CAN_RUN`: Allows read access and run access (superset of `CAN_VIEW`)
-
-- `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`)
-
-**Note**: A new version of the Databricks SQL API is now available. [Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+    only one endpoint, which gets the Access Control List for a given object. You cannot modify any
+    permissions using this API.
+    
+    There are three levels of permission:
+    
+    - `CAN_VIEW`: Allows read-only access
+    
+    - `CAN_RUN`: Allows read access and run access (superset of `CAN_VIEW`)
+    
+    - `CAN_MANAGE`: Allows all actions: read, run, edit, delete, modify permissions (superset of `CAN_RUN`)
+    
+    **Note**: A new version of the Databricks SQL API is now available. [Learn more]
+    
+    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
 
     .. py:method:: get(object_type: ObjectTypePlural, object_id: str) -> GetResponse
 
         Get object ACL.
-
-Gets a JSON representation of the access control list (ACL) for a specified object.
-
-**Note**: A new version of the Databricks SQL API is now available. Please use
-:method:workspace/getpermissions instead. [Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-
-:param object_type: :class:`ObjectTypePlural`
-  The type of object permissions to check.
-:param object_id: str
-  Object ID. An ACL is returned for the object with this UUID.
-
-:returns: :class:`GetResponse`
-
+        
+        Gets a JSON representation of the access control list (ACL) for a specified object.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use
+        :method:workspace/getpermissions instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param object_type: :class:`ObjectTypePlural`
+          The type of object permissions to check.
+        :param object_id: str
+          Object ID. An ACL is returned for the object with this UUID.
+        
+        :returns: :class:`GetResponse`
+        
 
     .. py:method:: set(object_type: ObjectTypePlural, object_id: str [, access_control_list: Optional[List[AccessControl]]]) -> SetResponse
 
         Set object ACL.
-
-Sets the access control list (ACL) for a specified object. This operation will complete rewrite the
-ACL.
-
-**Note**: A new version of the Databricks SQL API is now available. Please use
-:method:workspace/setpermissions instead. [Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-
-:param object_type: :class:`ObjectTypePlural`
-  The type of object permission to set.
-:param object_id: str
-  Object ID. The ACL for the object with this UUID is overwritten by this request's POST content.
-:param access_control_list: List[:class:`AccessControl`] (optional)
-
-:returns: :class:`SetResponse`
-
+        
+        Sets the access control list (ACL) for a specified object. This operation will complete rewrite the
+        ACL.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use
+        :method:workspace/setpermissions instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param object_type: :class:`ObjectTypePlural`
+          The type of object permission to set.
+        :param object_id: str
+          Object ID. The ACL for the object with this UUID is overwritten by this request's POST content.
+        :param access_control_list: List[:class:`AccessControl`] (optional)
+        
+        :returns: :class:`SetResponse`
+        
 
     .. py:method:: transfer_ownership(object_type: OwnableObjectType, object_id: TransferOwnershipObjectId [, new_owner: Optional[str]]) -> Success
 
         Transfer object ownership.
-
-Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key.
-
-**Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use
-:method:queries/update and :method:alerts/update respectively instead. [Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-
-:param object_type: :class:`OwnableObjectType`
-  The type of object on which to change ownership.
-:param object_id: :class:`TransferOwnershipObjectId`
-  The ID of the object on which to change ownership.
-:param new_owner: str (optional)
-  Email address for the new owner, who must exist in the workspace.
-
-:returns: :class:`Success`
+        
+        Transfers ownership of a dashboard, query, or alert to an active user. Requires an admin API key.
+        
+        **Note**: A new version of the Databricks SQL API is now available. For queries and alerts, please use
+        :method:queries/update and :method:alerts/update respectively instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param object_type: :class:`OwnableObjectType`
+          The type of object on which to change ownership.
+        :param object_id: :class:`TransferOwnershipObjectId`
+          The ID of the object on which to change ownership.
+        :param new_owner: str (optional)
+          Email address for the new owner, who must exist in the workspace.
+        
+        :returns: :class:`Success`
+        
\ No newline at end of file
diff --git a/docs/workspace/sql/queries.rst b/docs/workspace/sql/queries.rst
index 8c7b356e2..959552850 100644
--- a/docs/workspace/sql/queries.rst
+++ b/docs/workspace/sql/queries.rst
@@ -5,8 +5,8 @@
 .. py:class:: QueriesAPI
 
     The queries API can be used to perform CRUD operations on queries. A query is a Databricks SQL object that
-includes the target SQL warehouse, query text, name, description, tags, and parameters. Queries can be
-scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.
+    includes the target SQL warehouse, query text, name, description, tags, and parameters. Queries can be
+    scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.
 
     .. py:method:: create( [, query: Optional[CreateQueryRequestQuery]]) -> Query
 
@@ -33,26 +33,26 @@ scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.
             w.queries.delete(id=query.id)
 
         Create a query.
-
-Creates a query.
-
-:param query: :class:`CreateQueryRequestQuery` (optional)
-
-:returns: :class:`Query`
-
+        
+        Creates a query.
+        
+        :param query: :class:`CreateQueryRequestQuery` (optional)
+        
+        :returns: :class:`Query`
+        
 
     .. py:method:: delete(id: str)
 
         Delete a query.
-
-Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and
-cannot be used for alerts. You can restore a trashed query through the UI. A trashed query is
-permanently deleted after 30 days.
-
-:param id: str
-
-
-
+        
+        Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and
+        cannot be used for alerts. You can restore a trashed query through the UI. A trashed query is
+        permanently deleted after 30 days.
+        
+        :param id: str
+        
+        
+        
 
     .. py:method:: get(id: str) -> Query
 
@@ -81,39 +81,39 @@ permanently deleted after 30 days.
             w.queries.delete(id=query.id)
 
         Get a query.
-
-Gets a query.
-
-:param id: str
-
-:returns: :class:`Query`
-
+        
+        Gets a query.
+        
+        :param id: str
+        
+        :returns: :class:`Query`
+        
 
     .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[ListQueryObjectsResponseQuery]
 
         List queries.
-
-Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API
-concurrently 10 or more times could result in throttling, service degradation, or a temporary ban.
-
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`ListQueryObjectsResponseQuery`
-
+        
+        Gets a list of queries accessible to the user, ordered by creation time. **Warning:** Calling this API
+        concurrently 10 or more times could result in throttling, service degradation, or a temporary ban.
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`ListQueryObjectsResponseQuery`
+        
 
     .. py:method:: list_visualizations(id: str [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[Visualization]
 
         List visualizations on a query.
-
-Gets a list of visualizations on a query.
-
-:param id: str
-:param page_size: int (optional)
-:param page_token: str (optional)
-
-:returns: Iterator over :class:`Visualization`
-
+        
+        Gets a list of visualizations on a query.
+        
+        :param id: str
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`Visualization`
+        
 
     .. py:method:: update(id: str, update_mask: str [, query: Optional[UpdateQueryRequestQuery]]) -> Query
 
@@ -146,20 +146,21 @@ Gets a list of visualizations on a query.
             w.queries.delete(id=query.id)
 
         Update a query.
-
-Updates a query.
-
-:param id: str
-:param update_mask: str
-  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-  the entire collection field can be specified. Field names must exactly match the resource field
-  names.
-  
-  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-  changes in the future.
-:param query: :class:`UpdateQueryRequestQuery` (optional)
-
-:returns: :class:`Query`
+        
+        Updates a query.
+        
+        :param id: str
+        :param update_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        :param query: :class:`UpdateQueryRequestQuery` (optional)
+        
+        :returns: :class:`Query`
+        
\ No newline at end of file
diff --git a/docs/workspace/sql/queries_legacy.rst b/docs/workspace/sql/queries_legacy.rst
index 694be0946..a7ab56836 100644
--- a/docs/workspace/sql/queries_legacy.rst
+++ b/docs/workspace/sql/queries_legacy.rst
@@ -5,178 +5,179 @@
 .. py:class:: QueriesLegacyAPI
 
     These endpoints are used for CRUD operations on query definitions. Query definitions include the target
-SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be
-scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.
-
-**Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
-more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+    SQL warehouse, query text, name, description, tags, parameters, and visualizations. Queries can be
+    scheduled using the `sql_task` type of the Jobs API, e.g. :method:jobs/create.
+    
+    **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
+    more]
+    
+    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
 
     .. py:method:: create( [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], parent: Optional[str], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> LegacyQuery
 
         Create a new query definition.
-
-Creates a new query definition. Queries created with this endpoint belong to the authenticated user
-making the request.
-
-The `data_source_id` field specifies the ID of the SQL warehouse to run this query against. You can
-use the Data Sources API to see a complete list of available SQL warehouses. Or you can copy the
-`data_source_id` from an existing query.
-
-**Note**: You cannot add a visualization until you create the query.
-
-**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/create
-instead. [Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-
-:param data_source_id: str (optional)
-  Data source ID maps to the ID of the data source used by the resource and is distinct from the
-  warehouse ID. [Learn more]
-  
-  [Learn more]: https://docs.databricks.com/api/workspace/datasources/list
-:param description: str (optional)
-  General description that conveys additional information about this query such as usage notes.
-:param name: str (optional)
-  The title of this query that appears in list views, widget headings, and on the query page.
-:param options: Any (optional)
-  Exclusively used for storing a list parameter definitions. A parameter is an object with `title`,
-  `name`, `type`, and `value` properties. The `value` field here is the default value. It can be
-  overridden at runtime.
-:param parent: str (optional)
-  The identifier of the workspace folder containing the object.
-:param query: str (optional)
-  The text of the query to be run.
-:param run_as_role: :class:`RunAsRole` (optional)
-  Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
-  viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
-:param tags: List[str] (optional)
-
-:returns: :class:`LegacyQuery`
-
+        
+        Creates a new query definition. Queries created with this endpoint belong to the authenticated user
+        making the request.
+        
+        The `data_source_id` field specifies the ID of the SQL warehouse to run this query against. You can
+        use the Data Sources API to see a complete list of available SQL warehouses. Or you can copy the
+        `data_source_id` from an existing query.
+        
+        **Note**: You cannot add a visualization until you create the query.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/create
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param data_source_id: str (optional)
+          Data source ID maps to the ID of the data source used by the resource and is distinct from the
+          warehouse ID. [Learn more]
+          
+          [Learn more]: https://docs.databricks.com/api/workspace/datasources/list
+        :param description: str (optional)
+          General description that conveys additional information about this query such as usage notes.
+        :param name: str (optional)
+          The title of this query that appears in list views, widget headings, and on the query page.
+        :param options: Any (optional)
+          Exclusively used for storing a list parameter definitions. A parameter is an object with `title`,
+          `name`, `type`, and `value` properties. The `value` field here is the default value. It can be
+          overridden at runtime.
+        :param parent: str (optional)
+          The identifier of the workspace folder containing the object.
+        :param query: str (optional)
+          The text of the query to be run.
+        :param run_as_role: :class:`RunAsRole` (optional)
+          Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
+          viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
+        :param tags: List[str] (optional)
+        
+        :returns: :class:`LegacyQuery`
+        
 
     .. py:method:: delete(query_id: str)
 
         Delete a query.
-
-Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and
-they cannot be used for alerts. The trash is deleted after 30 days.
-
-**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete
-instead. [Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-
-:param query_id: str
-
-
-
+        
+        Moves a query to the trash. Trashed queries immediately disappear from searches and list views, and
+        they cannot be used for alerts. The trash is deleted after 30 days.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/delete
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param query_id: str
+        
+        
+        
 
     .. py:method:: get(query_id: str) -> LegacyQuery
 
         Get a query definition.
-
-Retrieve a query object definition along with contextual permissions information about the currently
-authenticated user.
-
-**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get
-instead. [Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-
-:param query_id: str
-
-:returns: :class:`LegacyQuery`
-
+        
+        Retrieve a query object definition along with contextual permissions information about the currently
+        authenticated user.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/get
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param query_id: str
+        
+        :returns: :class:`LegacyQuery`
+        
 
     .. py:method:: list( [, order: Optional[str], page: Optional[int], page_size: Optional[int], q: Optional[str]]) -> Iterator[LegacyQuery]
 
         Get a list of queries.
-
-Gets a list of queries. Optionally, this list can be filtered by a search term.
-
-**Warning**: Calling this API concurrently 10 or more times could result in throttling, service
-degradation, or a temporary ban.
-
-**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/list
-instead. [Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-
-:param order: str (optional)
-  Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order
-  descending instead.
-  
-  - `name`: The name of the query.
-  
-  - `created_at`: The timestamp the query was created.
-  
-  - `runtime`: The time it took to run this query. This is blank for parameterized queries. A blank
-  value is treated as the highest value for sorting.
-  
-  - `executed_at`: The timestamp when the query was last run.
-  
-  - `created_by`: The user name of the user that created the query.
-:param page: int (optional)
-  Page number to retrieve.
-:param page_size: int (optional)
-  Number of queries to return per page.
-:param q: str (optional)
-  Full text search term
-
-:returns: Iterator over :class:`LegacyQuery`
-
+        
+        Gets a list of queries. Optionally, this list can be filtered by a search term.
+        
+        **Warning**: Calling this API concurrently 10 or more times could result in throttling, service
+        degradation, or a temporary ban.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/list
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param order: str (optional)
+          Name of query attribute to order by. Default sort order is ascending. Append a dash (`-`) to order
+          descending instead.
+          
+          - `name`: The name of the query.
+          
+          - `created_at`: The timestamp the query was created.
+          
+          - `runtime`: The time it took to run this query. This is blank for parameterized queries. A blank
+          value is treated as the highest value for sorting.
+          
+          - `executed_at`: The timestamp when the query was last run.
+          
+          - `created_by`: The user name of the user that created the query.
+        :param page: int (optional)
+          Page number to retrieve.
+        :param page_size: int (optional)
+          Number of queries to return per page.
+        :param q: str (optional)
+          Full text search term
+        
+        :returns: Iterator over :class:`LegacyQuery`
+        
 
     .. py:method:: restore(query_id: str)
 
         Restore a query.
-
-Restore a query that has been moved to the trash. A restored query appears in list views and searches.
-You can use restored queries for alerts.
-
-**Note**: A new version of the Databricks SQL API is now available. Please see the latest version.
-[Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-
-:param query_id: str
-
-
-
+        
+        Restore a query that has been moved to the trash. A restored query appears in list views and searches.
+        You can use restored queries for alerts.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please see the latest version.
+        [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param query_id: str
+        
+        
+        
 
     .. py:method:: update(query_id: str [, data_source_id: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[str], run_as_role: Optional[RunAsRole], tags: Optional[List[str]]]) -> LegacyQuery
 
         Change a query definition.
-
-Modify this query definition.
-
-**Note**: You cannot undo this operation.
-
-**Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/update
-instead. [Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-
-:param query_id: str
-:param data_source_id: str (optional)
-  Data source ID maps to the ID of the data source used by the resource and is distinct from the
-  warehouse ID. [Learn more]
-  
-  [Learn more]: https://docs.databricks.com/api/workspace/datasources/list
-:param description: str (optional)
-  General description that conveys additional information about this query such as usage notes.
-:param name: str (optional)
-  The title of this query that appears in list views, widget headings, and on the query page.
-:param options: Any (optional)
-  Exclusively used for storing a list parameter definitions. A parameter is an object with `title`,
-  `name`, `type`, and `value` properties. The `value` field here is the default value. It can be
-  overridden at runtime.
-:param query: str (optional)
-  The text of the query to be run.
-:param run_as_role: :class:`RunAsRole` (optional)
-  Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
-  viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
-:param tags: List[str] (optional)
-
-:returns: :class:`LegacyQuery`
+        
+        Modify this query definition.
+        
+        **Note**: You cannot undo this operation.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use :method:queries/update
+        instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param query_id: str
+        :param data_source_id: str (optional)
+          Data source ID maps to the ID of the data source used by the resource and is distinct from the
+          warehouse ID. [Learn more]
+          
+          [Learn more]: https://docs.databricks.com/api/workspace/datasources/list
+        :param description: str (optional)
+          General description that conveys additional information about this query such as usage notes.
+        :param name: str (optional)
+          The title of this query that appears in list views, widget headings, and on the query page.
+        :param options: Any (optional)
+          Exclusively used for storing a list parameter definitions. A parameter is an object with `title`,
+          `name`, `type`, and `value` properties. The `value` field here is the default value. It can be
+          overridden at runtime.
+        :param query: str (optional)
+          The text of the query to be run.
+        :param run_as_role: :class:`RunAsRole` (optional)
+          Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
+          viewer" behavior) or `"owner"` (signifying "run as owner" behavior)
+        :param tags: List[str] (optional)
+        
+        :returns: :class:`LegacyQuery`
+        
\ No newline at end of file
diff --git a/docs/workspace/sql/query_history.rst b/docs/workspace/sql/query_history.rst
index 30d1db6ce..2f5520cdf 100644
--- a/docs/workspace/sql/query_history.rst
+++ b/docs/workspace/sql/query_history.rst
@@ -5,7 +5,7 @@
 .. py:class:: QueryHistoryAPI
 
     A service responsible for storing and retrieving the list of queries run against SQL endpoints and
-serverless compute.
+    serverless compute.
 
     .. py:method:: list( [, filter_by: Optional[QueryFilter], include_metrics: Optional[bool], max_results: Optional[int], page_token: Optional[str]]) -> ListQueriesResponse
 
@@ -23,23 +23,24 @@ serverless compute.
                 query_start_time_range=sql.TimeRange(start_time_ms=1690243200000, end_time_ms=1690329600000)))
 
         List Queries.
-
-List the history of queries through SQL warehouses, and serverless compute.
-
-You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are
-returned first (up to max_results in request). The pagination token returned in response can be used
-to list subsequent query statuses.
-
-:param filter_by: :class:`QueryFilter` (optional)
-  A filter to limit query history results. This field is optional.
-:param include_metrics: bool (optional)
-  Whether to include the query metrics with each query. Only use this for a small subset of queries
-  (max_results). Defaults to false.
-:param max_results: int (optional)
-  Limit the number of results returned in one page. Must be less than 1000 and the default is 100.
-:param page_token: str (optional)
-  A token that can be used to get the next page of results. The token can contains characters that
-  need to be encoded before using it in a URL. For example, the character '+' needs to be replaced by
-  %2B. This field is optional.
-
-:returns: :class:`ListQueriesResponse`
+        
+        List the history of queries through SQL warehouses, and serverless compute.
+        
+        You can filter by user ID, warehouse ID, status, and time range. Most recently started queries are
+        returned first (up to max_results in request). The pagination token returned in response can be used
+        to list subsequent query statuses.
+        
+        :param filter_by: :class:`QueryFilter` (optional)
+          A filter to limit query history results. This field is optional.
+        :param include_metrics: bool (optional)
+          Whether to include the query metrics with each query. Only use this for a small subset of queries
+          (max_results). Defaults to false.
+        :param max_results: int (optional)
+          Limit the number of results returned in one page. Must be less than 1000 and the default is 100.
+        :param page_token: str (optional)
+          A token that can be used to get the next page of results. The token can contains characters that
+          need to be encoded before using it in a URL. For example, the character '+' needs to be replaced by
+          %2B. This field is optional.
+        
+        :returns: :class:`ListQueriesResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/sql/query_visualizations.rst b/docs/workspace/sql/query_visualizations.rst
index 6011fbeb0..ac3d6c565 100644
--- a/docs/workspace/sql/query_visualizations.rst
+++ b/docs/workspace/sql/query_visualizations.rst
@@ -5,47 +5,48 @@
 .. py:class:: QueryVisualizationsAPI
 
     This is an evolving API that facilitates the addition and removal of visualizations from existing queries
-in the Databricks Workspace. Data structures can change over time.
+    in the Databricks Workspace. Data structures can change over time.
 
     .. py:method:: create( [, visualization: Optional[CreateVisualizationRequestVisualization]]) -> Visualization
 
         Add a visualization to a query.
-
-Adds a visualization to a query.
-
-:param visualization: :class:`CreateVisualizationRequestVisualization` (optional)
-
-:returns: :class:`Visualization`
-
+        
+        Adds a visualization to a query.
+        
+        :param visualization: :class:`CreateVisualizationRequestVisualization` (optional)
+        
+        :returns: :class:`Visualization`
+        
 
     .. py:method:: delete(id: str)
 
         Remove a visualization.
-
-Removes a visualization.
-
-:param id: str
-
-
-
+        
+        Removes a visualization.
+        
+        :param id: str
+        
+        
+        
 
     .. py:method:: update(id: str, update_mask: str [, visualization: Optional[UpdateVisualizationRequestVisualization]]) -> Visualization
 
         Update a visualization.
-
-Updates a visualization.
-
-:param id: str
-:param update_mask: str
-  The field mask must be a single string, with multiple fields separated by commas (no spaces). The
-  field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
-  `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
-  the entire collection field can be specified. Field names must exactly match the resource field
-  names.
-  
-  A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
-  fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
-  changes in the future.
-:param visualization: :class:`UpdateVisualizationRequestVisualization` (optional)
-
-:returns: :class:`Visualization`
+        
+        Updates a visualization.
+        
+        :param id: str
+        :param update_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        :param visualization: :class:`UpdateVisualizationRequestVisualization` (optional)
+        
+        :returns: :class:`Visualization`
+        
\ No newline at end of file
diff --git a/docs/workspace/sql/query_visualizations_legacy.rst b/docs/workspace/sql/query_visualizations_legacy.rst
index aca56b516..f56f78a5f 100644
--- a/docs/workspace/sql/query_visualizations_legacy.rst
+++ b/docs/workspace/sql/query_visualizations_legacy.rst
@@ -5,80 +5,81 @@
 .. py:class:: QueryVisualizationsLegacyAPI
 
     This is an evolving API that facilitates the addition and removal of vizualisations from existing queries
-within the Databricks Workspace. Data structures may change over time.
-
-**Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
-more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+    within the Databricks Workspace. Data structures may change over time.
+    
+    **Note**: A new version of the Databricks SQL API is now available. Please see the latest version. [Learn
+    more]
+    
+    [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
 
     .. py:method:: create(query_id: str, type: str, options: Any [, description: Optional[str], name: Optional[str]]) -> LegacyVisualization
 
         Add visualization to a query.
-
-Creates visualization in the query.
-
-**Note**: A new version of the Databricks SQL API is now available. Please use
-:method:queryvisualizations/create instead. [Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-
-:param query_id: str
-  The identifier returned by :method:queries/create
-:param type: str
-  The type of visualization: chart, table, pivot table, and so on.
-:param options: Any
-  The options object varies widely from one visualization type to the next and is unsupported.
-  Databricks does not recommend modifying visualization settings in JSON.
-:param description: str (optional)
-  A short description of this visualization. This is not displayed in the UI.
-:param name: str (optional)
-  The name of the visualization that appears on dashboards and the query screen.
-
-:returns: :class:`LegacyVisualization`
-
+        
+        Creates visualization in the query.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use
+        :method:queryvisualizations/create instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param query_id: str
+          The identifier returned by :method:queries/create
+        :param type: str
+          The type of visualization: chart, table, pivot table, and so on.
+        :param options: Any
+          The options object varies widely from one visualization type to the next and is unsupported.
+          Databricks does not recommend modifying visualization settings in JSON.
+        :param description: str (optional)
+          A short description of this visualization. This is not displayed in the UI.
+        :param name: str (optional)
+          The name of the visualization that appears on dashboards and the query screen.
+        
+        :returns: :class:`LegacyVisualization`
+        
 
     .. py:method:: delete(id: str)
 
         Remove visualization.
-
-Removes a visualization from the query.
-
-**Note**: A new version of the Databricks SQL API is now available. Please use
-:method:queryvisualizations/delete instead. [Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-
-:param id: str
-  Widget ID returned by :method:queryvizualisations/create
-
-
-
+        
+        Removes a visualization from the query.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use
+        :method:queryvisualizations/delete instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param id: str
+          Widget ID returned by :method:queryvizualisations/create
+        
+        
+        
 
     .. py:method:: update(id: str [, created_at: Optional[str], description: Optional[str], name: Optional[str], options: Optional[Any], query: Optional[LegacyQuery], type: Optional[str], updated_at: Optional[str]]) -> LegacyVisualization
 
         Edit existing visualization.
-
-Updates visualization in the query.
-
-**Note**: A new version of the Databricks SQL API is now available. Please use
-:method:queryvisualizations/update instead. [Learn more]
-
-[Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
-
-:param id: str
-  The UUID for this visualization.
-:param created_at: str (optional)
-:param description: str (optional)
-  A short description of this visualization. This is not displayed in the UI.
-:param name: str (optional)
-  The name of the visualization that appears on dashboards and the query screen.
-:param options: Any (optional)
-  The options object varies widely from one visualization type to the next and is unsupported.
-  Databricks does not recommend modifying visualization settings in JSON.
-:param query: :class:`LegacyQuery` (optional)
-:param type: str (optional)
-  The type of visualization: chart, table, pivot table, and so on.
-:param updated_at: str (optional)
-
-:returns: :class:`LegacyVisualization`
+        
+        Updates visualization in the query.
+        
+        **Note**: A new version of the Databricks SQL API is now available. Please use
+        :method:queryvisualizations/update instead. [Learn more]
+        
+        [Learn more]: https://docs.databricks.com/en/sql/dbsql-api-latest.html
+        
+        :param id: str
+          The UUID for this visualization.
+        :param created_at: str (optional)
+        :param description: str (optional)
+          A short description of this visualization. This is not displayed in the UI.
+        :param name: str (optional)
+          The name of the visualization that appears on dashboards and the query screen.
+        :param options: Any (optional)
+          The options object varies widely from one visualization type to the next and is unsupported.
+          Databricks does not recommend modifying visualization settings in JSON.
+        :param query: :class:`LegacyQuery` (optional)
+        :param type: str (optional)
+          The type of visualization: chart, table, pivot table, and so on.
+        :param updated_at: str (optional)
+        
+        :returns: :class:`LegacyVisualization`
+        
\ No newline at end of file
diff --git a/docs/workspace/sql/redash_config.rst b/docs/workspace/sql/redash_config.rst
index 2ab3de7ea..9b4382dd5 100644
--- a/docs/workspace/sql/redash_config.rst
+++ b/docs/workspace/sql/redash_config.rst
@@ -9,5 +9,6 @@
     .. py:method:: get_config() -> ClientConfig
 
         Read workspace configuration for Redash-v2.
-
-:returns: :class:`ClientConfig`
+        
+        :returns: :class:`ClientConfig`
+        
\ No newline at end of file
diff --git a/docs/workspace/sql/statement_execution.rst b/docs/workspace/sql/statement_execution.rst
index 1fd0d3407..44f64b512 100644
--- a/docs/workspace/sql/statement_execution.rst
+++ b/docs/workspace/sql/statement_execution.rst
@@ -5,247 +5,248 @@
 .. py:class:: StatementExecutionAPI
 
     The Databricks SQL Statement Execution API can be used to execute SQL statements on a SQL warehouse and
-fetch the result.
-
-**Getting started**
-
-We suggest beginning with the [Databricks SQL Statement Execution API tutorial].
-
-**Overview of statement execution and result fetching**
-
-Statement execution begins by issuing a :method:statementexecution/executeStatement request with a valid
-SQL statement and warehouse ID, along with optional parameters such as the data catalog and output format.
-If no other parameters are specified, the server will wait for up to 10s before returning a response. If
-the statement has completed within this timespan, the response will include the result data as a JSON
-array and metadata. Otherwise, if no result is available after the 10s timeout expired, the response will
-provide the statement ID that can be used to poll for results by using a
-:method:statementexecution/getStatement request.
-
-You can specify whether the call should behave synchronously, asynchronously or start synchronously with a
-fallback to asynchronous execution. This is controlled with the `wait_timeout` and `on_wait_timeout`
-settings. If `wait_timeout` is set between 5-50 seconds (default: 10s), the call waits for results up to
-the specified timeout; when set to `0s`, the call is asynchronous and responds immediately with a
-statement ID. The `on_wait_timeout` setting specifies what should happen when the timeout is reached while
-the statement execution has not yet finished. This can be set to either `CONTINUE`, to fallback to
-asynchronous mode, or it can be set to `CANCEL`, which cancels the statement.
-
-In summary: - Synchronous mode - `wait_timeout=30s` and `on_wait_timeout=CANCEL` - The call waits up to 30
-seconds; if the statement execution finishes within this time, the result data is returned directly in the
-response. If the execution takes longer than 30 seconds, the execution is canceled and the call returns
-with a `CANCELED` state. - Asynchronous mode - `wait_timeout=0s` (`on_wait_timeout` is ignored) - The call
-doesn't wait for the statement to finish but returns directly with a statement ID. The status of the
-statement execution can be polled by issuing :method:statementexecution/getStatement with the statement
-ID. Once the execution has succeeded, this call also returns the result and metadata in the response. -
-Hybrid mode (default) - `wait_timeout=10s` and `on_wait_timeout=CONTINUE` - The call waits for up to 10
-seconds; if the statement execution finishes within this time, the result data is returned directly in the
-response. If the execution takes longer than 10 seconds, a statement ID is returned. The statement ID can
-be used to fetch status and results in the same way as in the asynchronous mode.
-
-Depending on the size, the result can be split into multiple chunks. If the statement execution is
-successful, the statement response contains a manifest and the first chunk of the result. The manifest
-contains schema information and provides metadata for each chunk in the result. Result chunks can be
-retrieved by index with :method:statementexecution/getStatementResultChunkN which may be called in any
-order and in parallel. For sequential fetching, each chunk, apart from the last, also contains a
-`next_chunk_index` and `next_chunk_internal_link` that point to the next chunk.
-
-A statement can be canceled with :method:statementexecution/cancelExecution.
-
-**Fetching result data: format and disposition**
-
-To specify the format of the result data, use the `format` field, which can be set to one of the following
-options: `JSON_ARRAY` (JSON), `ARROW_STREAM` ([Apache Arrow Columnar]), or `CSV`.
-
-There are two ways to receive statement results, controlled by the `disposition` setting, which can be
-either `INLINE` or `EXTERNAL_LINKS`:
-
-- `INLINE`: In this mode, the result data is directly included in the response. It's best suited for
-smaller results. This mode can only be used with the `JSON_ARRAY` format.
-
-- `EXTERNAL_LINKS`: In this mode, the response provides links that can be used to download the result data
-in chunks separately. This approach is ideal for larger results and offers higher throughput. This mode
-can be used with all the formats: `JSON_ARRAY`, `ARROW_STREAM`, and `CSV`.
-
-By default, the API uses `format=JSON_ARRAY` and `disposition=INLINE`.
-
-**Limits and limitations**
-
-Note: The byte limit for INLINE disposition is based on internal storage metrics and will not exactly
-match the byte count of the actual payload.
-
-- Statements with `disposition=INLINE` are limited to 25 MiB and will fail when this limit is exceeded. -
-Statements with `disposition=EXTERNAL_LINKS` are limited to 100 GiB. Result sets larger than this limit
-will be truncated. Truncation is indicated by the `truncated` field in the result manifest. - The maximum
-query text size is 16 MiB. - Cancelation might silently fail. A successful response from a cancel request
-indicates that the cancel request was successfully received and sent to the processing engine. However, an
-outstanding statement might have already completed execution when the cancel request arrives. Polling for
-status until a terminal state is reached is a reliable way to determine the final state. - Wait timeouts
-are approximate, occur server-side, and cannot account for things such as caller delays and network
-latency from caller to service. - To guarantee that the statement is kept alive, you must poll at least
-once every 15 minutes. - The results are only available for one hour after success; polling does not
-extend this. - The SQL Execution API must be used for the entire lifecycle of the statement. For example,
-you cannot use the Jobs API to execute the command, and then the SQL Execution API to cancel it.
-
-[Apache Arrow Columnar]: https://arrow.apache.org/overview/
-[Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html
+    fetch the result.
+    
+    **Getting started**
+    
+    We suggest beginning with the [Databricks SQL Statement Execution API tutorial].
+    
+    **Overview of statement execution and result fetching**
+    
+    Statement execution begins by issuing a :method:statementexecution/executeStatement request with a valid
+    SQL statement and warehouse ID, along with optional parameters such as the data catalog and output format.
+    If no other parameters are specified, the server will wait for up to 10s before returning a response. If
+    the statement has completed within this timespan, the response will include the result data as a JSON
+    array and metadata. Otherwise, if no result is available after the 10s timeout expired, the response will
+    provide the statement ID that can be used to poll for results by using a
+    :method:statementexecution/getStatement request.
+    
+    You can specify whether the call should behave synchronously, asynchronously or start synchronously with a
+    fallback to asynchronous execution. This is controlled with the `wait_timeout` and `on_wait_timeout`
+    settings. If `wait_timeout` is set between 5-50 seconds (default: 10s), the call waits for results up to
+    the specified timeout; when set to `0s`, the call is asynchronous and responds immediately with a
+    statement ID. The `on_wait_timeout` setting specifies what should happen when the timeout is reached while
+    the statement execution has not yet finished. This can be set to either `CONTINUE`, to fallback to
+    asynchronous mode, or it can be set to `CANCEL`, which cancels the statement.
+    
+    In summary: - Synchronous mode - `wait_timeout=30s` and `on_wait_timeout=CANCEL` - The call waits up to 30
+    seconds; if the statement execution finishes within this time, the result data is returned directly in the
+    response. If the execution takes longer than 30 seconds, the execution is canceled and the call returns
+    with a `CANCELED` state. - Asynchronous mode - `wait_timeout=0s` (`on_wait_timeout` is ignored) - The call
+    doesn't wait for the statement to finish but returns directly with a statement ID. The status of the
+    statement execution can be polled by issuing :method:statementexecution/getStatement with the statement
+    ID. Once the execution has succeeded, this call also returns the result and metadata in the response. -
+    Hybrid mode (default) - `wait_timeout=10s` and `on_wait_timeout=CONTINUE` - The call waits for up to 10
+    seconds; if the statement execution finishes within this time, the result data is returned directly in the
+    response. If the execution takes longer than 10 seconds, a statement ID is returned. The statement ID can
+    be used to fetch status and results in the same way as in the asynchronous mode.
+    
+    Depending on the size, the result can be split into multiple chunks. If the statement execution is
+    successful, the statement response contains a manifest and the first chunk of the result. The manifest
+    contains schema information and provides metadata for each chunk in the result. Result chunks can be
+    retrieved by index with :method:statementexecution/getStatementResultChunkN which may be called in any
+    order and in parallel. For sequential fetching, each chunk, apart from the last, also contains a
+    `next_chunk_index` and `next_chunk_internal_link` that point to the next chunk.
+    
+    A statement can be canceled with :method:statementexecution/cancelExecution.
+    
+    **Fetching result data: format and disposition**
+    
+    To specify the format of the result data, use the `format` field, which can be set to one of the following
+    options: `JSON_ARRAY` (JSON), `ARROW_STREAM` ([Apache Arrow Columnar]), or `CSV`.
+    
+    There are two ways to receive statement results, controlled by the `disposition` setting, which can be
+    either `INLINE` or `EXTERNAL_LINKS`:
+    
+    - `INLINE`: In this mode, the result data is directly included in the response. It's best suited for
+    smaller results. This mode can only be used with the `JSON_ARRAY` format.
+    
+    - `EXTERNAL_LINKS`: In this mode, the response provides links that can be used to download the result data
+    in chunks separately. This approach is ideal for larger results and offers higher throughput. This mode
+    can be used with all the formats: `JSON_ARRAY`, `ARROW_STREAM`, and `CSV`.
+    
+    By default, the API uses `format=JSON_ARRAY` and `disposition=INLINE`.
+    
+    **Limits and limitations**
+    
+    Note: The byte limit for INLINE disposition is based on internal storage metrics and will not exactly
+    match the byte count of the actual payload.
+    
+    - Statements with `disposition=INLINE` are limited to 25 MiB and will fail when this limit is exceeded. -
+    Statements with `disposition=EXTERNAL_LINKS` are limited to 100 GiB. Result sets larger than this limit
+    will be truncated. Truncation is indicated by the `truncated` field in the result manifest. - The maximum
+    query text size is 16 MiB. - Cancelation might silently fail. A successful response from a cancel request
+    indicates that the cancel request was successfully received and sent to the processing engine. However, an
+    outstanding statement might have already completed execution when the cancel request arrives. Polling for
+    status until a terminal state is reached is a reliable way to determine the final state. - Wait timeouts
+    are approximate, occur server-side, and cannot account for things such as caller delays and network
+    latency from caller to service. - To guarantee that the statement is kept alive, you must poll at least
+    once every 15 minutes. - The results are only available for one hour after success; polling does not
+    extend this. - The SQL Execution API must be used for the entire lifecycle of the statement. For example,
+    you cannot use the Jobs API to execute the command, and then the SQL Execution API to cancel it.
+    
+    [Apache Arrow Columnar]: https://arrow.apache.org/overview/
+    [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html
 
     .. py:method:: cancel_execution(statement_id: str)
 
         Cancel statement execution.
-
-Requests that an executing statement be canceled. Callers must poll for status to see the terminal
-state.
-
-:param statement_id: str
-  The statement ID is returned upon successfully submitting a SQL statement, and is a required
-  reference for all subsequent calls.
-
-
-
+        
+        Requests that an executing statement be canceled. Callers must poll for status to see the terminal
+        state.
+        
+        :param statement_id: str
+          The statement ID is returned upon successfully submitting a SQL statement, and is a required
+          reference for all subsequent calls.
+        
+        
+        
 
     .. py:method:: execute_statement(statement: str, warehouse_id: str [, byte_limit: Optional[int], catalog: Optional[str], disposition: Optional[Disposition], format: Optional[Format], on_wait_timeout: Optional[ExecuteStatementRequestOnWaitTimeout], parameters: Optional[List[StatementParameterListItem]], row_limit: Optional[int], schema: Optional[str], wait_timeout: Optional[str]]) -> StatementResponse
 
         Execute a SQL statement.
-
-:param statement: str
-  The SQL statement to execute. The statement can optionally be parameterized, see `parameters`.
-:param warehouse_id: str
-  Warehouse upon which to execute a statement. See also [What are SQL warehouses?]
-  
-  [What are SQL warehouses?]: https://docs.databricks.com/sql/admin/warehouse-type.html
-:param byte_limit: int (optional)
-  Applies the given byte limit to the statement's result size. Byte counts are based on internal data
-  representations and might not match the final size in the requested `format`. If the result was
-  truncated due to the byte limit, then `truncated` in the response is set to `true`. When using
-  `EXTERNAL_LINKS` disposition, a default `byte_limit` of 100 GiB is applied if `byte_limit` is not
-  explcitly set.
-:param catalog: str (optional)
-  Sets default catalog for statement execution, similar to [`USE CATALOG`] in SQL.
-  
-  [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html
-:param disposition: :class:`Disposition` (optional)
-:param format: :class:`Format` (optional)
-  Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and
-  `CSV`.
-  
-  Important: The formats `ARROW_STREAM` and `CSV` are supported only with `EXTERNAL_LINKS`
-  disposition. `JSON_ARRAY` is supported in `INLINE` and `EXTERNAL_LINKS` disposition.
-  
-  When specifying `format=JSON_ARRAY`, result data will be formatted as an array of arrays of values,
-  where each value is either the *string representation* of a value, or `null`. For example, the
-  output of `SELECT concat('id-', id) AS strCol, id AS intCol, null AS nullCol FROM range(3)` would
-  look like this:
-  
-  ``` [ [ "id-1", "1", null ], [ "id-2", "2", null ], [ "id-3", "3", null ], ] ```
-  
-  When specifying `format=JSON_ARRAY` and `disposition=EXTERNAL_LINKS`, each chunk in the result
-  contains compact JSON with no indentation or extra whitespace.
-  
-  When specifying `format=ARROW_STREAM` and `disposition=EXTERNAL_LINKS`, each chunk in the result
-  will be formatted as Apache Arrow Stream. See the [Apache Arrow streaming format].
-  
-  When specifying `format=CSV` and `disposition=EXTERNAL_LINKS`, each chunk in the result will be a
-  CSV according to [RFC 4180] standard. All the columns values will have *string representation*
-  similar to the `JSON_ARRAY` format, and `null` values will be encoded as “null”. Only the first
-  chunk in the result would contain a header row with column names. For example, the output of `SELECT
-  concat('id-', id) AS strCol, id AS intCol, null as nullCol FROM range(3)` would look like this:
-  
-  ``` strCol,intCol,nullCol id-1,1,null id-2,2,null id-3,3,null ```
-  
-  [Apache Arrow streaming format]: https://arrow.apache.org/docs/format/Columnar.html#ipc-streaming-format
-  [RFC 4180]: https://www.rfc-editor.org/rfc/rfc4180
-:param on_wait_timeout: :class:`ExecuteStatementRequestOnWaitTimeout` (optional)
-  When `wait_timeout > 0s`, the call will block up to the specified time. If the statement execution
-  doesn't finish within this time, `on_wait_timeout` determines whether the execution should continue
-  or be canceled. When set to `CONTINUE`, the statement execution continues asynchronously and the
-  call returns a statement ID which can be used for polling with
-  :method:statementexecution/getStatement. When set to `CANCEL`, the statement execution is canceled
-  and the call returns with a `CANCELED` state.
-:param parameters: List[:class:`StatementParameterListItem`] (optional)
-  A list of parameters to pass into a SQL statement containing parameter markers. A parameter consists
-  of a name, a value, and optionally a type. To represent a NULL value, the `value` field may be
-  omitted or set to `null` explicitly. If the `type` field is omitted, the value is interpreted as a
-  string.
-  
-  If the type is given, parameters will be checked for type correctness according to the given type. A
-  value is correct if the provided string can be converted to the requested type using the `cast`
-  function. The exact semantics are described in the section [`cast` function] of the SQL language
-  reference.
-  
-  For example, the following statement contains two parameters, `my_name` and `my_date`:
-  
-  SELECT * FROM my_table WHERE name = :my_name AND date = :my_date
-  
-  The parameters can be passed in the request body as follows:
-  
-  { ..., "statement": "SELECT * FROM my_table WHERE name = :my_name AND date = :my_date",
-  "parameters": [ { "name": "my_name", "value": "the name" }, { "name": "my_date", "value":
-  "2020-01-01", "type": "DATE" } ] }
-  
-  Currently, positional parameters denoted by a `?` marker are not supported by the Databricks SQL
-  Statement Execution API.
-  
-  Also see the section [Parameter markers] of the SQL language reference.
-  
-  [Parameter markers]: https://docs.databricks.com/sql/language-manual/sql-ref-parameter-marker.html
-  [`cast` function]: https://docs.databricks.com/sql/language-manual/functions/cast.html
-:param row_limit: int (optional)
-  Applies the given row limit to the statement's result set, but unlike the `LIMIT` clause in SQL, it
-  also sets the `truncated` field in the response to indicate whether the result was trimmed due to
-  the limit or not.
-:param schema: str (optional)
-  Sets default schema for statement execution, similar to [`USE SCHEMA`] in SQL.
-  
-  [`USE SCHEMA`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-schema.html
-:param wait_timeout: str (optional)
-  The time in seconds the call will wait for the statement's result set as `Ns`, where `N` can be set
-  to 0 or to a value between 5 and 50.
-  
-  When set to `0s`, the statement will execute in asynchronous mode and the call will not wait for the
-  execution to finish. In this case, the call returns directly with `PENDING` state and a statement ID
-  which can be used for polling with :method:statementexecution/getStatement.
-  
-  When set between 5 and 50 seconds, the call will behave synchronously up to this timeout and wait
-  for the statement execution to finish. If the execution finishes within this time, the call returns
-  immediately with a manifest and result data (or a `FAILED` state in case of an execution error). If
-  the statement takes longer to execute, `on_wait_timeout` determines what should happen after the
-  timeout is reached.
-
-:returns: :class:`StatementResponse`
-
+        
+        :param statement: str
+          The SQL statement to execute. The statement can optionally be parameterized, see `parameters`.
+        :param warehouse_id: str
+          Warehouse upon which to execute a statement. See also [What are SQL warehouses?]
+          
+          [What are SQL warehouses?]: https://docs.databricks.com/sql/admin/warehouse-type.html
+        :param byte_limit: int (optional)
+          Applies the given byte limit to the statement's result size. Byte counts are based on internal data
+          representations and might not match the final size in the requested `format`. If the result was
+          truncated due to the byte limit, then `truncated` in the response is set to `true`. When using
+          `EXTERNAL_LINKS` disposition, a default `byte_limit` of 100 GiB is applied if `byte_limit` is not
+          explcitly set.
+        :param catalog: str (optional)
+          Sets default catalog for statement execution, similar to [`USE CATALOG`] in SQL.
+          
+          [`USE CATALOG`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-catalog.html
+        :param disposition: :class:`Disposition` (optional)
+        :param format: :class:`Format` (optional)
+          Statement execution supports three result formats: `JSON_ARRAY` (default), `ARROW_STREAM`, and
+          `CSV`.
+          
+          Important: The formats `ARROW_STREAM` and `CSV` are supported only with `EXTERNAL_LINKS`
+          disposition. `JSON_ARRAY` is supported in `INLINE` and `EXTERNAL_LINKS` disposition.
+          
+          When specifying `format=JSON_ARRAY`, result data will be formatted as an array of arrays of values,
+          where each value is either the *string representation* of a value, or `null`. For example, the
+          output of `SELECT concat('id-', id) AS strCol, id AS intCol, null AS nullCol FROM range(3)` would
+          look like this:
+          
+          ``` [ [ "id-1", "1", null ], [ "id-2", "2", null ], [ "id-3", "3", null ], ] ```
+          
+          When specifying `format=JSON_ARRAY` and `disposition=EXTERNAL_LINKS`, each chunk in the result
+          contains compact JSON with no indentation or extra whitespace.
+          
+          When specifying `format=ARROW_STREAM` and `disposition=EXTERNAL_LINKS`, each chunk in the result
+          will be formatted as Apache Arrow Stream. See the [Apache Arrow streaming format].
+          
+          When specifying `format=CSV` and `disposition=EXTERNAL_LINKS`, each chunk in the result will be a
+          CSV according to [RFC 4180] standard. All the columns values will have *string representation*
+          similar to the `JSON_ARRAY` format, and `null` values will be encoded as “null”. Only the first
+          chunk in the result would contain a header row with column names. For example, the output of `SELECT
+          concat('id-', id) AS strCol, id AS intCol, null as nullCol FROM range(3)` would look like this:
+          
+          ``` strCol,intCol,nullCol id-1,1,null id-2,2,null id-3,3,null ```
+          
+          [Apache Arrow streaming format]: https://arrow.apache.org/docs/format/Columnar.html#ipc-streaming-format
+          [RFC 4180]: https://www.rfc-editor.org/rfc/rfc4180
+        :param on_wait_timeout: :class:`ExecuteStatementRequestOnWaitTimeout` (optional)
+          When `wait_timeout > 0s`, the call will block up to the specified time. If the statement execution
+          doesn't finish within this time, `on_wait_timeout` determines whether the execution should continue
+          or be canceled. When set to `CONTINUE`, the statement execution continues asynchronously and the
+          call returns a statement ID which can be used for polling with
+          :method:statementexecution/getStatement. When set to `CANCEL`, the statement execution is canceled
+          and the call returns with a `CANCELED` state.
+        :param parameters: List[:class:`StatementParameterListItem`] (optional)
+          A list of parameters to pass into a SQL statement containing parameter markers. A parameter consists
+          of a name, a value, and optionally a type. To represent a NULL value, the `value` field may be
+          omitted or set to `null` explicitly. If the `type` field is omitted, the value is interpreted as a
+          string.
+          
+          If the type is given, parameters will be checked for type correctness according to the given type. A
+          value is correct if the provided string can be converted to the requested type using the `cast`
+          function. The exact semantics are described in the section [`cast` function] of the SQL language
+          reference.
+          
+          For example, the following statement contains two parameters, `my_name` and `my_date`:
+          
+          SELECT * FROM my_table WHERE name = :my_name AND date = :my_date
+          
+          The parameters can be passed in the request body as follows:
+          
+          { ..., "statement": "SELECT * FROM my_table WHERE name = :my_name AND date = :my_date",
+          "parameters": [ { "name": "my_name", "value": "the name" }, { "name": "my_date", "value":
+          "2020-01-01", "type": "DATE" } ] }
+          
+          Currently, positional parameters denoted by a `?` marker are not supported by the Databricks SQL
+          Statement Execution API.
+          
+          Also see the section [Parameter markers] of the SQL language reference.
+          
+          [Parameter markers]: https://docs.databricks.com/sql/language-manual/sql-ref-parameter-marker.html
+          [`cast` function]: https://docs.databricks.com/sql/language-manual/functions/cast.html
+        :param row_limit: int (optional)
+          Applies the given row limit to the statement's result set, but unlike the `LIMIT` clause in SQL, it
+          also sets the `truncated` field in the response to indicate whether the result was trimmed due to
+          the limit or not.
+        :param schema: str (optional)
+          Sets default schema for statement execution, similar to [`USE SCHEMA`] in SQL.
+          
+          [`USE SCHEMA`]: https://docs.databricks.com/sql/language-manual/sql-ref-syntax-ddl-use-schema.html
+        :param wait_timeout: str (optional)
+          The time in seconds the call will wait for the statement's result set as `Ns`, where `N` can be set
+          to 0 or to a value between 5 and 50.
+          
+          When set to `0s`, the statement will execute in asynchronous mode and the call will not wait for the
+          execution to finish. In this case, the call returns directly with `PENDING` state and a statement ID
+          which can be used for polling with :method:statementexecution/getStatement.
+          
+          When set between 5 and 50 seconds, the call will behave synchronously up to this timeout and wait
+          for the statement execution to finish. If the execution finishes within this time, the call returns
+          immediately with a manifest and result data (or a `FAILED` state in case of an execution error). If
+          the statement takes longer to execute, `on_wait_timeout` determines what should happen after the
+          timeout is reached.
+        
+        :returns: :class:`StatementResponse`
+        
 
     .. py:method:: get_statement(statement_id: str) -> StatementResponse
 
         Get status, manifest, and result first chunk.
-
-This request can be used to poll for the statement's status. When the `status.state` field is
-`SUCCEEDED` it will also return the result manifest and the first chunk of the result data. When the
-statement is in the terminal states `CANCELED`, `CLOSED` or `FAILED`, it returns HTTP 200 with the
-state set. After at least 12 hours in terminal state, the statement is removed from the warehouse and
-further calls will receive an HTTP 404 response.
-
-**NOTE** This call currently might take up to 5 seconds to get the latest status and result.
-
-:param statement_id: str
-  The statement ID is returned upon successfully submitting a SQL statement, and is a required
-  reference for all subsequent calls.
-
-:returns: :class:`StatementResponse`
-
+        
+        This request can be used to poll for the statement's status. When the `status.state` field is
+        `SUCCEEDED` it will also return the result manifest and the first chunk of the result data. When the
+        statement is in the terminal states `CANCELED`, `CLOSED` or `FAILED`, it returns HTTP 200 with the
+        state set. After at least 12 hours in terminal state, the statement is removed from the warehouse and
+        further calls will receive an HTTP 404 response.
+        
+        **NOTE** This call currently might take up to 5 seconds to get the latest status and result.
+        
+        :param statement_id: str
+          The statement ID is returned upon successfully submitting a SQL statement, and is a required
+          reference for all subsequent calls.
+        
+        :returns: :class:`StatementResponse`
+        
 
     .. py:method:: get_statement_result_chunk_n(statement_id: str, chunk_index: int) -> ResultData
 
         Get result chunk by index.
-
-After the statement execution has `SUCCEEDED`, this request can be used to fetch any chunk by index.
-Whereas the first chunk with `chunk_index=0` is typically fetched with
-:method:statementexecution/executeStatement or :method:statementexecution/getStatement, this request
-can be used to fetch subsequent chunks. The response structure is identical to the nested `result`
-element described in the :method:statementexecution/getStatement request, and similarly includes the
-`next_chunk_index` and `next_chunk_internal_link` fields for simple iteration through the result set.
-
-:param statement_id: str
-  The statement ID is returned upon successfully submitting a SQL statement, and is a required
-  reference for all subsequent calls.
-:param chunk_index: int
-
-:returns: :class:`ResultData`
+        
+        After the statement execution has `SUCCEEDED`, this request can be used to fetch any chunk by index.
+        Whereas the first chunk with `chunk_index=0` is typically fetched with
+        :method:statementexecution/executeStatement or :method:statementexecution/getStatement, this request
+        can be used to fetch subsequent chunks. The response structure is identical to the nested `result`
+        element described in the :method:statementexecution/getStatement request, and similarly includes the
+        `next_chunk_index` and `next_chunk_internal_link` fields for simple iteration through the result set.
+        
+        :param statement_id: str
+          The statement ID is returned upon successfully submitting a SQL statement, and is a required
+          reference for all subsequent calls.
+        :param chunk_index: int
+        
+        :returns: :class:`ResultData`
+        
\ No newline at end of file
diff --git a/docs/workspace/sql/warehouses.rst b/docs/workspace/sql/warehouses.rst
index e5afd9419..fd55d5b0c 100644
--- a/docs/workspace/sql/warehouses.rst
+++ b/docs/workspace/sql/warehouses.rst
@@ -5,7 +5,7 @@
 .. py:class:: WarehousesAPI
 
     A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks
-SQL. Compute resources are infrastructure resources that provide processing capabilities in the cloud.
+    SQL. Compute resources are infrastructure resources that provide processing capabilities in the cloud.
 
     .. py:method:: create( [, auto_stop_mins: Optional[int], channel: Optional[Channel], cluster_size: Optional[str], creator_name: Optional[str], enable_photon: Optional[bool], enable_serverless_compute: Optional[bool], instance_profile_arn: Optional[str], max_num_clusters: Optional[int], min_num_clusters: Optional[int], name: Optional[str], spot_instance_policy: Optional[SpotInstancePolicy], tags: Optional[EndpointTags], warehouse_type: Optional[CreateWarehouseRequestWarehouseType]]) -> Wait[GetWarehouseResponse]
 
@@ -34,69 +34,69 @@ SQL. Compute resources are infrastructure resources that provide processing capa
             w.warehouses.delete(id=created.id)
 
         Create a warehouse.
-
-Creates a new SQL warehouse.
-
-:param auto_stop_mins: int (optional)
-  The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it
-  is automatically stopped.
-  
-  Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins for
-  non-serverless warehouses - 0 indicates no autostop.
-  
-  Defaults to 120 mins
-:param channel: :class:`Channel` (optional)
-  Channel Details
-:param cluster_size: str (optional)
-  Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you
-  to run larger queries on it. If you want to increase the number of concurrent queries, please tune
-  max_num_clusters.
-  
-  Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large -
-  4X-Large
-:param creator_name: str (optional)
-  warehouse creator name
-:param enable_photon: bool (optional)
-  Configures whether the warehouse should use Photon optimized clusters.
-  
-  Defaults to false.
-:param enable_serverless_compute: bool (optional)
-  Configures whether the warehouse should use serverless compute
-:param instance_profile_arn: str (optional)
-  Deprecated. Instance profile used to pass IAM role to the cluster
-:param max_num_clusters: int (optional)
-  Maximum number of clusters that the autoscaler will create to handle concurrent queries.
-  
-  Supported values: - Must be >= min_num_clusters - Must be <= 30.
-  
-  Defaults to min_clusters if unset.
-:param min_num_clusters: int (optional)
-  Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this
-  will ensure that a larger number of clusters are always running and therefore may reduce the cold
-  start time for new queries. This is similar to reserved vs. revocable cores in a resource manager.
-  
-  Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30)
-  
-  Defaults to 1
-:param name: str (optional)
-  Logical name for the cluster.
-  
-  Supported values: - Must be unique within an org. - Must be less than 100 characters.
-:param spot_instance_policy: :class:`SpotInstancePolicy` (optional)
-  Configurations whether the warehouse should use spot instances.
-:param tags: :class:`EndpointTags` (optional)
-  A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes)
-  associated with this SQL warehouse.
-  
-  Supported values: - Number of tags < 45.
-:param warehouse_type: :class:`CreateWarehouseRequestWarehouseType` (optional)
-  Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and
-  also set the field `enable_serverless_compute` to `true`.
-
-:returns:
-  Long-running operation waiter for :class:`GetWarehouseResponse`.
-  See :method:wait_get_warehouse_running for more details.
-
+        
+        Creates a new SQL warehouse.
+        
+        :param auto_stop_mins: int (optional)
+          The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it
+          is automatically stopped.
+          
+          Supported values: - Must be >= 0 mins for serverless warehouses - Must be == 0 or >= 10 mins for
+          non-serverless warehouses - 0 indicates no autostop.
+          
+          Defaults to 120 mins
+        :param channel: :class:`Channel` (optional)
+          Channel Details
+        :param cluster_size: str (optional)
+          Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you
+          to run larger queries on it. If you want to increase the number of concurrent queries, please tune
+          max_num_clusters.
+          
+          Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large -
+          4X-Large
+        :param creator_name: str (optional)
+          warehouse creator name
+        :param enable_photon: bool (optional)
+          Configures whether the warehouse should use Photon optimized clusters.
+          
+          Defaults to false.
+        :param enable_serverless_compute: bool (optional)
+          Configures whether the warehouse should use serverless compute
+        :param instance_profile_arn: str (optional)
+          Deprecated. Instance profile used to pass IAM role to the cluster
+        :param max_num_clusters: int (optional)
+          Maximum number of clusters that the autoscaler will create to handle concurrent queries.
+          
+          Supported values: - Must be >= min_num_clusters - Must be <= 30.
+          
+          Defaults to min_clusters if unset.
+        :param min_num_clusters: int (optional)
+          Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this
+          will ensure that a larger number of clusters are always running and therefore may reduce the cold
+          start time for new queries. This is similar to reserved vs. revocable cores in a resource manager.
+          
+          Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30)
+          
+          Defaults to 1
+        :param name: str (optional)
+          Logical name for the cluster.
+          
+          Supported values: - Must be unique within an org. - Must be less than 100 characters.
+        :param spot_instance_policy: :class:`SpotInstancePolicy` (optional)
+          Configurations whether the warehouse should use spot instances.
+        :param tags: :class:`EndpointTags` (optional)
+          A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes)
+          associated with this SQL warehouse.
+          
+          Supported values: - Number of tags < 45.
+        :param warehouse_type: :class:`CreateWarehouseRequestWarehouseType` (optional)
+          Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and
+          also set the field `enable_serverless_compute` to `true`.
+        
+        :returns:
+          Long-running operation waiter for :class:`GetWarehouseResponse`.
+          See :method:wait_get_warehouse_running for more details.
+        
 
     .. py:method:: create_and_wait( [, auto_stop_mins: Optional[int], channel: Optional[Channel], cluster_size: Optional[str], creator_name: Optional[str], enable_photon: Optional[bool], enable_serverless_compute: Optional[bool], instance_profile_arn: Optional[str], max_num_clusters: Optional[int], min_num_clusters: Optional[int], name: Optional[str], spot_instance_policy: Optional[SpotInstancePolicy], tags: Optional[EndpointTags], warehouse_type: Optional[CreateWarehouseRequestWarehouseType], timeout: datetime.timedelta = 0:20:00]) -> GetWarehouseResponse
 
@@ -104,14 +104,14 @@ Creates a new SQL warehouse.
     .. py:method:: delete(id: str)
 
         Delete a warehouse.
-
-Deletes a SQL warehouse.
-
-:param id: str
-  Required. Id of the SQL warehouse.
-
-
-
+        
+        Deletes a SQL warehouse.
+        
+        :param id: str
+          Required. Id of the SQL warehouse.
+        
+        
+        
 
     .. py:method:: edit(id: str [, auto_stop_mins: Optional[int], channel: Optional[Channel], cluster_size: Optional[str], creator_name: Optional[str], enable_photon: Optional[bool], enable_serverless_compute: Optional[bool], instance_profile_arn: Optional[str], max_num_clusters: Optional[int], min_num_clusters: Optional[int], name: Optional[str], spot_instance_policy: Optional[SpotInstancePolicy], tags: Optional[EndpointTags], warehouse_type: Optional[EditWarehouseRequestWarehouseType]]) -> Wait[GetWarehouseResponse]
 
@@ -146,70 +146,70 @@ Deletes a SQL warehouse.
             w.warehouses.delete(id=created.id)
 
         Update a warehouse.
-
-Updates the configuration for a SQL warehouse.
-
-:param id: str
-  Required. Id of the warehouse to configure.
-:param auto_stop_mins: int (optional)
-  The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it
-  is automatically stopped.
-  
-  Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
-  
-  Defaults to 120 mins
-:param channel: :class:`Channel` (optional)
-  Channel Details
-:param cluster_size: str (optional)
-  Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you
-  to run larger queries on it. If you want to increase the number of concurrent queries, please tune
-  max_num_clusters.
-  
-  Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large -
-  4X-Large
-:param creator_name: str (optional)
-  warehouse creator name
-:param enable_photon: bool (optional)
-  Configures whether the warehouse should use Photon optimized clusters.
-  
-  Defaults to false.
-:param enable_serverless_compute: bool (optional)
-  Configures whether the warehouse should use serverless compute.
-:param instance_profile_arn: str (optional)
-  Deprecated. Instance profile used to pass IAM role to the cluster
-:param max_num_clusters: int (optional)
-  Maximum number of clusters that the autoscaler will create to handle concurrent queries.
-  
-  Supported values: - Must be >= min_num_clusters - Must be <= 30.
-  
-  Defaults to min_clusters if unset.
-:param min_num_clusters: int (optional)
-  Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this
-  will ensure that a larger number of clusters are always running and therefore may reduce the cold
-  start time for new queries. This is similar to reserved vs. revocable cores in a resource manager.
-  
-  Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30)
-  
-  Defaults to 1
-:param name: str (optional)
-  Logical name for the cluster.
-  
-  Supported values: - Must be unique within an org. - Must be less than 100 characters.
-:param spot_instance_policy: :class:`SpotInstancePolicy` (optional)
-  Configurations whether the warehouse should use spot instances.
-:param tags: :class:`EndpointTags` (optional)
-  A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes)
-  associated with this SQL warehouse.
-  
-  Supported values: - Number of tags < 45.
-:param warehouse_type: :class:`EditWarehouseRequestWarehouseType` (optional)
-  Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and
-  also set the field `enable_serverless_compute` to `true`.
-
-:returns:
-  Long-running operation waiter for :class:`GetWarehouseResponse`.
-  See :method:wait_get_warehouse_running for more details.
-
+        
+        Updates the configuration for a SQL warehouse.
+        
+        :param id: str
+          Required. Id of the warehouse to configure.
+        :param auto_stop_mins: int (optional)
+          The amount of time in minutes that a SQL warehouse must be idle (i.e., no RUNNING queries) before it
+          is automatically stopped.
+          
+          Supported values: - Must be == 0 or >= 10 mins - 0 indicates no autostop.
+          
+          Defaults to 120 mins
+        :param channel: :class:`Channel` (optional)
+          Channel Details
+        :param cluster_size: str (optional)
+          Size of the clusters allocated for this warehouse. Increasing the size of a spark cluster allows you
+          to run larger queries on it. If you want to increase the number of concurrent queries, please tune
+          max_num_clusters.
+          
+          Supported values: - 2X-Small - X-Small - Small - Medium - Large - X-Large - 2X-Large - 3X-Large -
+          4X-Large
+        :param creator_name: str (optional)
+          warehouse creator name
+        :param enable_photon: bool (optional)
+          Configures whether the warehouse should use Photon optimized clusters.
+          
+          Defaults to false.
+        :param enable_serverless_compute: bool (optional)
+          Configures whether the warehouse should use serverless compute.
+        :param instance_profile_arn: str (optional)
+          Deprecated. Instance profile used to pass IAM role to the cluster
+        :param max_num_clusters: int (optional)
+          Maximum number of clusters that the autoscaler will create to handle concurrent queries.
+          
+          Supported values: - Must be >= min_num_clusters - Must be <= 30.
+          
+          Defaults to min_clusters if unset.
+        :param min_num_clusters: int (optional)
+          Minimum number of available clusters that will be maintained for this SQL warehouse. Increasing this
+          will ensure that a larger number of clusters are always running and therefore may reduce the cold
+          start time for new queries. This is similar to reserved vs. revocable cores in a resource manager.
+          
+          Supported values: - Must be > 0 - Must be <= min(max_num_clusters, 30)
+          
+          Defaults to 1
+        :param name: str (optional)
+          Logical name for the cluster.
+          
+          Supported values: - Must be unique within an org. - Must be less than 100 characters.
+        :param spot_instance_policy: :class:`SpotInstancePolicy` (optional)
+          Configurations whether the warehouse should use spot instances.
+        :param tags: :class:`EndpointTags` (optional)
+          A set of key-value pairs that will be tagged on all resources (e.g., AWS instances and EBS volumes)
+          associated with this SQL warehouse.
+          
+          Supported values: - Number of tags < 45.
+        :param warehouse_type: :class:`EditWarehouseRequestWarehouseType` (optional)
+          Warehouse type: `PRO` or `CLASSIC`. If you want to use serverless compute, you must set to `PRO` and
+          also set the field `enable_serverless_compute` to `true`.
+        
+        :returns:
+          Long-running operation waiter for :class:`GetWarehouseResponse`.
+          See :method:wait_get_warehouse_running for more details.
+        
 
     .. py:method:: edit_and_wait(id: str [, auto_stop_mins: Optional[int], channel: Optional[Channel], cluster_size: Optional[str], creator_name: Optional[str], enable_photon: Optional[bool], enable_serverless_compute: Optional[bool], instance_profile_arn: Optional[str], max_num_clusters: Optional[int], min_num_clusters: Optional[int], name: Optional[str], spot_instance_policy: Optional[SpotInstancePolicy], tags: Optional[EndpointTags], warehouse_type: Optional[EditWarehouseRequestWarehouseType], timeout: datetime.timedelta = 0:20:00]) -> GetWarehouseResponse
 
@@ -243,48 +243,48 @@ Updates the configuration for a SQL warehouse.
             w.warehouses.delete(id=created.id)
 
         Get warehouse info.
-
-Gets the information for a single SQL warehouse.
-
-:param id: str
-  Required. Id of the SQL warehouse.
-
-:returns: :class:`GetWarehouseResponse`
-
+        
+        Gets the information for a single SQL warehouse.
+        
+        :param id: str
+          Required. Id of the SQL warehouse.
+        
+        :returns: :class:`GetWarehouseResponse`
+        
 
     .. py:method:: get_permission_levels(warehouse_id: str) -> GetWarehousePermissionLevelsResponse
 
         Get SQL warehouse permission levels.
-
-Gets the permission levels that a user can have on an object.
-
-:param warehouse_id: str
-  The SQL warehouse for which to get or manage permissions.
-
-:returns: :class:`GetWarehousePermissionLevelsResponse`
-
+        
+        Gets the permission levels that a user can have on an object.
+        
+        :param warehouse_id: str
+          The SQL warehouse for which to get or manage permissions.
+        
+        :returns: :class:`GetWarehousePermissionLevelsResponse`
+        
 
     .. py:method:: get_permissions(warehouse_id: str) -> WarehousePermissions
 
         Get SQL warehouse permissions.
-
-Gets the permissions of a SQL warehouse. SQL warehouses can inherit permissions from their root
-object.
-
-:param warehouse_id: str
-  The SQL warehouse for which to get or manage permissions.
-
-:returns: :class:`WarehousePermissions`
-
+        
+        Gets the permissions of a SQL warehouse. SQL warehouses can inherit permissions from their root
+        object.
+        
+        :param warehouse_id: str
+          The SQL warehouse for which to get or manage permissions.
+        
+        :returns: :class:`WarehousePermissions`
+        
 
     .. py:method:: get_workspace_warehouse_config() -> GetWorkspaceWarehouseConfigResponse
 
         Get the workspace configuration.
-
-Gets the workspace level configuration that is shared by all SQL warehouses in a workspace.
-
-:returns: :class:`GetWorkspaceWarehouseConfigResponse`
-
+        
+        Gets the workspace level configuration that is shared by all SQL warehouses in a workspace.
+        
+        :returns: :class:`GetWorkspaceWarehouseConfigResponse`
+        
 
     .. py:method:: list( [, run_as_user_id: Optional[int]]) -> Iterator[EndpointInfo]
 
@@ -301,75 +301,75 @@ Gets the workspace level configuration that is shared by all SQL warehouses in a
             all = w.warehouses.list(sql.ListWarehousesRequest())
 
         List warehouses.
-
-Lists all SQL warehouses that a user has manager permissions on.
-
-:param run_as_user_id: int (optional)
-  Service Principal which will be used to fetch the list of warehouses. If not specified, the user
-  from the session header is used.
-
-:returns: Iterator over :class:`EndpointInfo`
-
+        
+        Lists all SQL warehouses that a user has manager permissions on.
+        
+        :param run_as_user_id: int (optional)
+          Service Principal which will be used to fetch the list of warehouses. If not specified, the user
+          from the session header is used.
+        
+        :returns: Iterator over :class:`EndpointInfo`
+        
 
     .. py:method:: set_permissions(warehouse_id: str [, access_control_list: Optional[List[WarehouseAccessControlRequest]]]) -> WarehousePermissions
 
         Set SQL warehouse permissions.
-
-Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-permissions if none are specified. Objects can inherit permissions from their root object.
-
-:param warehouse_id: str
-  The SQL warehouse for which to get or manage permissions.
-:param access_control_list: List[:class:`WarehouseAccessControlRequest`] (optional)
-
-:returns: :class:`WarehousePermissions`
-
+        
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
+        
+        :param warehouse_id: str
+          The SQL warehouse for which to get or manage permissions.
+        :param access_control_list: List[:class:`WarehouseAccessControlRequest`] (optional)
+        
+        :returns: :class:`WarehousePermissions`
+        
 
     .. py:method:: set_workspace_warehouse_config( [, channel: Optional[Channel], config_param: Optional[RepeatedEndpointConfPairs], data_access_config: Optional[List[EndpointConfPair]], enabled_warehouse_types: Optional[List[WarehouseTypePair]], global_param: Optional[RepeatedEndpointConfPairs], google_service_account: Optional[str], instance_profile_arn: Optional[str], security_policy: Optional[SetWorkspaceWarehouseConfigRequestSecurityPolicy], sql_configuration_parameters: Optional[RepeatedEndpointConfPairs]])
 
         Set the workspace configuration.
-
-Sets the workspace level configuration that is shared by all SQL warehouses in a workspace.
-
-:param channel: :class:`Channel` (optional)
-  Optional: Channel selection details
-:param config_param: :class:`RepeatedEndpointConfPairs` (optional)
-  Deprecated: Use sql_configuration_parameters
-:param data_access_config: List[:class:`EndpointConfPair`] (optional)
-  Spark confs for external hive metastore configuration JSON serialized size must be less than <= 512K
-:param enabled_warehouse_types: List[:class:`WarehouseTypePair`] (optional)
-  List of Warehouse Types allowed in this workspace (limits allowed value of the type field in
-  CreateWarehouse and EditWarehouse). Note: Some types cannot be disabled, they don't need to be
-  specified in SetWorkspaceWarehouseConfig. Note: Disabling a type may cause existing warehouses to be
-  converted to another type. Used by frontend to save specific type availability in the warehouse
-  create and edit form UI.
-:param global_param: :class:`RepeatedEndpointConfPairs` (optional)
-  Deprecated: Use sql_configuration_parameters
-:param google_service_account: str (optional)
-  GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage
-:param instance_profile_arn: str (optional)
-  AWS Only: Instance profile used to pass IAM role to the cluster
-:param security_policy: :class:`SetWorkspaceWarehouseConfigRequestSecurityPolicy` (optional)
-  Security policy for warehouses
-:param sql_configuration_parameters: :class:`RepeatedEndpointConfPairs` (optional)
-  SQL configuration parameters
-
-
-
+        
+        Sets the workspace level configuration that is shared by all SQL warehouses in a workspace.
+        
+        :param channel: :class:`Channel` (optional)
+          Optional: Channel selection details
+        :param config_param: :class:`RepeatedEndpointConfPairs` (optional)
+          Deprecated: Use sql_configuration_parameters
+        :param data_access_config: List[:class:`EndpointConfPair`] (optional)
+          Spark confs for external hive metastore configuration JSON serialized size must be less than <= 512K
+        :param enabled_warehouse_types: List[:class:`WarehouseTypePair`] (optional)
+          List of Warehouse Types allowed in this workspace (limits allowed value of the type field in
+          CreateWarehouse and EditWarehouse). Note: Some types cannot be disabled, they don't need to be
+          specified in SetWorkspaceWarehouseConfig. Note: Disabling a type may cause existing warehouses to be
+          converted to another type. Used by frontend to save specific type availability in the warehouse
+          create and edit form UI.
+        :param global_param: :class:`RepeatedEndpointConfPairs` (optional)
+          Deprecated: Use sql_configuration_parameters
+        :param google_service_account: str (optional)
+          GCP only: Google Service Account used to pass to cluster to access Google Cloud Storage
+        :param instance_profile_arn: str (optional)
+          AWS Only: Instance profile used to pass IAM role to the cluster
+        :param security_policy: :class:`SetWorkspaceWarehouseConfigRequestSecurityPolicy` (optional)
+          Security policy for warehouses
+        :param sql_configuration_parameters: :class:`RepeatedEndpointConfPairs` (optional)
+          SQL configuration parameters
+        
+        
+        
 
     .. py:method:: start(id: str) -> Wait[GetWarehouseResponse]
 
         Start a warehouse.
-
-Starts a SQL warehouse.
-
-:param id: str
-  Required. Id of the SQL warehouse.
-
-:returns:
-  Long-running operation waiter for :class:`GetWarehouseResponse`.
-  See :method:wait_get_warehouse_running for more details.
-
+        
+        Starts a SQL warehouse.
+        
+        :param id: str
+          Required. Id of the SQL warehouse.
+        
+        :returns:
+          Long-running operation waiter for :class:`GetWarehouseResponse`.
+          See :method:wait_get_warehouse_running for more details.
+        
 
     .. py:method:: start_and_wait(id: str, timeout: datetime.timedelta = 0:20:00) -> GetWarehouseResponse
 
@@ -377,16 +377,16 @@ Starts a SQL warehouse.
     .. py:method:: stop(id: str) -> Wait[GetWarehouseResponse]
 
         Stop a warehouse.
-
-Stops a SQL warehouse.
-
-:param id: str
-  Required. Id of the SQL warehouse.
-
-:returns:
-  Long-running operation waiter for :class:`GetWarehouseResponse`.
-  See :method:wait_get_warehouse_stopped for more details.
-
+        
+        Stops a SQL warehouse.
+        
+        :param id: str
+          Required. Id of the SQL warehouse.
+        
+        :returns:
+          Long-running operation waiter for :class:`GetWarehouseResponse`.
+          See :method:wait_get_warehouse_stopped for more details.
+        
 
     .. py:method:: stop_and_wait(id: str, timeout: datetime.timedelta = 0:20:00) -> GetWarehouseResponse
 
@@ -394,16 +394,16 @@ Stops a SQL warehouse.
     .. py:method:: update_permissions(warehouse_id: str [, access_control_list: Optional[List[WarehouseAccessControlRequest]]]) -> WarehousePermissions
 
         Update SQL warehouse permissions.
-
-Updates the permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root
-object.
-
-:param warehouse_id: str
-  The SQL warehouse for which to get or manage permissions.
-:param access_control_list: List[:class:`WarehouseAccessControlRequest`] (optional)
-
-:returns: :class:`WarehousePermissions`
-
+        
+        Updates the permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root
+        object.
+        
+        :param warehouse_id: str
+          The SQL warehouse for which to get or manage permissions.
+        :param access_control_list: List[:class:`WarehouseAccessControlRequest`] (optional)
+        
+        :returns: :class:`WarehousePermissions`
+        
 
     .. py:method:: wait_get_warehouse_running(id: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[GetWarehouseResponse], None]]) -> GetWarehouseResponse
 
diff --git a/docs/workspace/vectorsearch/vector_search_endpoints.rst b/docs/workspace/vectorsearch/vector_search_endpoints.rst
index c53697944..1abd09b95 100644
--- a/docs/workspace/vectorsearch/vector_search_endpoints.rst
+++ b/docs/workspace/vectorsearch/vector_search_endpoints.rst
@@ -9,18 +9,18 @@
     .. py:method:: create_endpoint(name: str, endpoint_type: EndpointType) -> Wait[EndpointInfo]
 
         Create an endpoint.
-
-Create a new endpoint.
-
-:param name: str
-  Name of endpoint
-:param endpoint_type: :class:`EndpointType`
-  Type of endpoint.
-
-:returns:
-  Long-running operation waiter for :class:`EndpointInfo`.
-  See :method:wait_get_endpoint_vector_search_endpoint_online for more details.
-
+        
+        Create a new endpoint.
+        
+        :param name: str
+          Name of endpoint
+        :param endpoint_type: :class:`EndpointType`
+          Type of endpoint.
+        
+        :returns:
+          Long-running operation waiter for :class:`EndpointInfo`.
+          See :method:wait_get_endpoint_vector_search_endpoint_online for more details.
+        
 
     .. py:method:: create_endpoint_and_wait(name: str, endpoint_type: EndpointType, timeout: datetime.timedelta = 0:20:00) -> EndpointInfo
 
@@ -28,31 +28,31 @@ Create a new endpoint.
     .. py:method:: delete_endpoint(endpoint_name: str)
 
         Delete an endpoint.
-
-:param endpoint_name: str
-  Name of the endpoint
-
-
-
+        
+        :param endpoint_name: str
+          Name of the endpoint
+        
+        
+        
 
     .. py:method:: get_endpoint(endpoint_name: str) -> EndpointInfo
 
         Get an endpoint.
-
-:param endpoint_name: str
-  Name of the endpoint
-
-:returns: :class:`EndpointInfo`
-
+        
+        :param endpoint_name: str
+          Name of the endpoint
+        
+        :returns: :class:`EndpointInfo`
+        
 
     .. py:method:: list_endpoints( [, page_token: Optional[str]]) -> Iterator[EndpointInfo]
 
         List all endpoints.
-
-:param page_token: str (optional)
-  Token for pagination
-
-:returns: Iterator over :class:`EndpointInfo`
-
+        
+        :param page_token: str (optional)
+          Token for pagination
+        
+        :returns: Iterator over :class:`EndpointInfo`
+        
 
     .. py:method:: wait_get_endpoint_vector_search_endpoint_online(endpoint_name: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[EndpointInfo], None]]) -> EndpointInfo
diff --git a/docs/workspace/vectorsearch/vector_search_indexes.rst b/docs/workspace/vectorsearch/vector_search_indexes.rst
index 1222ab348..415e19d90 100644
--- a/docs/workspace/vectorsearch/vector_search_indexes.rst
+++ b/docs/workspace/vectorsearch/vector_search_indexes.rst
@@ -5,178 +5,179 @@
 .. py:class:: VectorSearchIndexesAPI
 
     **Index**: An efficient representation of your embedding vectors that supports real-time and efficient
-approximate nearest neighbor (ANN) search queries.
-
-There are 2 types of Vector Search indexes: * **Delta Sync Index**: An index that automatically syncs with
-a source Delta Table, automatically and incrementally updating the index as the underlying data in the
-Delta Table changes. * **Direct Vector Access Index**: An index that supports direct read and write of
-vectors and metadata through our REST and SDK APIs. With this model, the user manages index updates.
+    approximate nearest neighbor (ANN) search queries.
+    
+    There are 2 types of Vector Search indexes: * **Delta Sync Index**: An index that automatically syncs with
+    a source Delta Table, automatically and incrementally updating the index as the underlying data in the
+    Delta Table changes. * **Direct Vector Access Index**: An index that supports direct read and write of
+    vectors and metadata through our REST and SDK APIs. With this model, the user manages index updates.
 
     .. py:method:: create_index(name: str, endpoint_name: str, primary_key: str, index_type: VectorIndexType [, delta_sync_index_spec: Optional[DeltaSyncVectorIndexSpecRequest], direct_access_index_spec: Optional[DirectAccessVectorIndexSpec]]) -> CreateVectorIndexResponse
 
         Create an index.
-
-Create a new index.
-
-:param name: str
-  Name of the index
-:param endpoint_name: str
-  Name of the endpoint to be used for serving the index
-:param primary_key: str
-  Primary key of the index
-:param index_type: :class:`VectorIndexType`
-  There are 2 types of Vector Search indexes:
-  
-  - `DELTA_SYNC`: An index that automatically syncs with a source Delta Table, automatically and
-  incrementally updating the index as the underlying data in the Delta Table changes. -
-  `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through our
-  REST and SDK APIs. With this model, the user manages index updates.
-:param delta_sync_index_spec: :class:`DeltaSyncVectorIndexSpecRequest` (optional)
-  Specification for Delta Sync Index. Required if `index_type` is `DELTA_SYNC`.
-:param direct_access_index_spec: :class:`DirectAccessVectorIndexSpec` (optional)
-  Specification for Direct Vector Access Index. Required if `index_type` is `DIRECT_ACCESS`.
-
-:returns: :class:`CreateVectorIndexResponse`
-
+        
+        Create a new index.
+        
+        :param name: str
+          Name of the index
+        :param endpoint_name: str
+          Name of the endpoint to be used for serving the index
+        :param primary_key: str
+          Primary key of the index
+        :param index_type: :class:`VectorIndexType`
+          There are 2 types of Vector Search indexes:
+          
+          - `DELTA_SYNC`: An index that automatically syncs with a source Delta Table, automatically and
+          incrementally updating the index as the underlying data in the Delta Table changes. -
+          `DIRECT_ACCESS`: An index that supports direct read and write of vectors and metadata through our
+          REST and SDK APIs. With this model, the user manages index updates.
+        :param delta_sync_index_spec: :class:`DeltaSyncVectorIndexSpecRequest` (optional)
+          Specification for Delta Sync Index. Required if `index_type` is `DELTA_SYNC`.
+        :param direct_access_index_spec: :class:`DirectAccessVectorIndexSpec` (optional)
+          Specification for Direct Vector Access Index. Required if `index_type` is `DIRECT_ACCESS`.
+        
+        :returns: :class:`CreateVectorIndexResponse`
+        
 
     .. py:method:: delete_data_vector_index(index_name: str, primary_keys: List[str]) -> DeleteDataVectorIndexResponse
 
         Delete data from index.
-
-Handles the deletion of data from a specified vector index.
-
-:param index_name: str
-  Name of the vector index where data is to be deleted. Must be a Direct Vector Access Index.
-:param primary_keys: List[str]
-  List of primary keys for the data to be deleted.
-
-:returns: :class:`DeleteDataVectorIndexResponse`
-
+        
+        Handles the deletion of data from a specified vector index.
+        
+        :param index_name: str
+          Name of the vector index where data is to be deleted. Must be a Direct Vector Access Index.
+        :param primary_keys: List[str]
+          List of primary keys for the data to be deleted.
+        
+        :returns: :class:`DeleteDataVectorIndexResponse`
+        
 
     .. py:method:: delete_index(index_name: str)
 
         Delete an index.
-
-Delete an index.
-
-:param index_name: str
-  Name of the index
-
-
-
+        
+        Delete an index.
+        
+        :param index_name: str
+          Name of the index
+        
+        
+        
 
     .. py:method:: get_index(index_name: str) -> VectorIndex
 
         Get an index.
-
-Get an index.
-
-:param index_name: str
-  Name of the index
-
-:returns: :class:`VectorIndex`
-
+        
+        Get an index.
+        
+        :param index_name: str
+          Name of the index
+        
+        :returns: :class:`VectorIndex`
+        
 
     .. py:method:: list_indexes(endpoint_name: str [, page_token: Optional[str]]) -> Iterator[MiniVectorIndex]
 
         List indexes.
-
-List all indexes in the given endpoint.
-
-:param endpoint_name: str
-  Name of the endpoint
-:param page_token: str (optional)
-  Token for pagination
-
-:returns: Iterator over :class:`MiniVectorIndex`
-
+        
+        List all indexes in the given endpoint.
+        
+        :param endpoint_name: str
+          Name of the endpoint
+        :param page_token: str (optional)
+          Token for pagination
+        
+        :returns: Iterator over :class:`MiniVectorIndex`
+        
 
     .. py:method:: query_index(index_name: str, columns: List[str] [, filters_json: Optional[str], num_results: Optional[int], query_text: Optional[str], query_type: Optional[str], query_vector: Optional[List[float]], score_threshold: Optional[float]]) -> QueryVectorIndexResponse
 
         Query an index.
-
-Query the specified vector index.
-
-:param index_name: str
-  Name of the vector index to query.
-:param columns: List[str]
-  List of column names to include in the response.
-:param filters_json: str (optional)
-  JSON string representing query filters.
-  
-  Example filters: - `{"id <": 5}`: Filter for id less than 5. - `{"id >": 5}`: Filter for id greater
-  than 5. - `{"id <=": 5}`: Filter for id less than equal to 5. - `{"id >=": 5}`: Filter for id
-  greater than equal to 5. - `{"id": 5}`: Filter for id equal to 5.
-:param num_results: int (optional)
-  Number of results to return. Defaults to 10.
-:param query_text: str (optional)
-  Query text. Required for Delta Sync Index using model endpoint.
-:param query_type: str (optional)
-  The query type to use. Choices are `ANN` and `HYBRID`. Defaults to `ANN`.
-:param query_vector: List[float] (optional)
-  Query vector. Required for Direct Vector Access Index and Delta Sync Index using self-managed
-  vectors.
-:param score_threshold: float (optional)
-  Threshold for the approximate nearest neighbor search. Defaults to 0.0.
-
-:returns: :class:`QueryVectorIndexResponse`
-
+        
+        Query the specified vector index.
+        
+        :param index_name: str
+          Name of the vector index to query.
+        :param columns: List[str]
+          List of column names to include in the response.
+        :param filters_json: str (optional)
+          JSON string representing query filters.
+          
+          Example filters: - `{"id <": 5}`: Filter for id less than 5. - `{"id >": 5}`: Filter for id greater
+          than 5. - `{"id <=": 5}`: Filter for id less than equal to 5. - `{"id >=": 5}`: Filter for id
+          greater than equal to 5. - `{"id": 5}`: Filter for id equal to 5.
+        :param num_results: int (optional)
+          Number of results to return. Defaults to 10.
+        :param query_text: str (optional)
+          Query text. Required for Delta Sync Index using model endpoint.
+        :param query_type: str (optional)
+          The query type to use. Choices are `ANN` and `HYBRID`. Defaults to `ANN`.
+        :param query_vector: List[float] (optional)
+          Query vector. Required for Direct Vector Access Index and Delta Sync Index using self-managed
+          vectors.
+        :param score_threshold: float (optional)
+          Threshold for the approximate nearest neighbor search. Defaults to 0.0.
+        
+        :returns: :class:`QueryVectorIndexResponse`
+        
 
     .. py:method:: query_next_page(index_name: str [, endpoint_name: Optional[str], page_token: Optional[str]]) -> QueryVectorIndexResponse
 
         Query next page.
-
-Use `next_page_token` returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` request
-to fetch next page of results.
-
-:param index_name: str
-  Name of the vector index to query.
-:param endpoint_name: str (optional)
-  Name of the endpoint.
-:param page_token: str (optional)
-  Page token returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` API.
-
-:returns: :class:`QueryVectorIndexResponse`
-
+        
+        Use `next_page_token` returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` request
+        to fetch next page of results.
+        
+        :param index_name: str
+          Name of the vector index to query.
+        :param endpoint_name: str (optional)
+          Name of the endpoint.
+        :param page_token: str (optional)
+          Page token returned from previous `QueryVectorIndex` or `QueryVectorIndexNextPage` API.
+        
+        :returns: :class:`QueryVectorIndexResponse`
+        
 
     .. py:method:: scan_index(index_name: str [, last_primary_key: Optional[str], num_results: Optional[int]]) -> ScanVectorIndexResponse
 
         Scan an index.
-
-Scan the specified vector index and return the first `num_results` entries after the exclusive
-`primary_key`.
-
-:param index_name: str
-  Name of the vector index to scan.
-:param last_primary_key: str (optional)
-  Primary key of the last entry returned in the previous scan.
-:param num_results: int (optional)
-  Number of results to return. Defaults to 10.
-
-:returns: :class:`ScanVectorIndexResponse`
-
+        
+        Scan the specified vector index and return the first `num_results` entries after the exclusive
+        `primary_key`.
+        
+        :param index_name: str
+          Name of the vector index to scan.
+        :param last_primary_key: str (optional)
+          Primary key of the last entry returned in the previous scan.
+        :param num_results: int (optional)
+          Number of results to return. Defaults to 10.
+        
+        :returns: :class:`ScanVectorIndexResponse`
+        
 
     .. py:method:: sync_index(index_name: str)
 
         Synchronize an index.
-
-Triggers a synchronization process for a specified vector index.
-
-:param index_name: str
-  Name of the vector index to synchronize. Must be a Delta Sync Index.
-
-
-
+        
+        Triggers a synchronization process for a specified vector index.
+        
+        :param index_name: str
+          Name of the vector index to synchronize. Must be a Delta Sync Index.
+        
+        
+        
 
     .. py:method:: upsert_data_vector_index(index_name: str, inputs_json: str) -> UpsertDataVectorIndexResponse
 
         Upsert data into an index.
-
-Handles the upserting of data into a specified vector index.
-
-:param index_name: str
-  Name of the vector index where data is to be upserted. Must be a Direct Vector Access Index.
-:param inputs_json: str
-  JSON string representing the data to be upserted.
-
-:returns: :class:`UpsertDataVectorIndexResponse`
+        
+        Handles the upserting of data into a specified vector index.
+        
+        :param index_name: str
+          Name of the vector index where data is to be upserted. Must be a Direct Vector Access Index.
+        :param inputs_json: str
+          JSON string representing the data to be upserted.
+        
+        :returns: :class:`UpsertDataVectorIndexResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/workspace/git_credentials.rst b/docs/workspace/workspace/git_credentials.rst
index ea93845b6..34851e84a 100644
--- a/docs/workspace/workspace/git_credentials.rst
+++ b/docs/workspace/workspace/git_credentials.rst
@@ -5,10 +5,10 @@
 .. py:class:: GitCredentialsAPI
 
     Registers personal access token for Databricks to do operations on behalf of the user.
-
-See [more info].
-
-[more info]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
+    
+    See [more info].
+    
+    [more info]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
 
     .. py:method:: create(git_provider: str [, git_username: Optional[str], personal_access_token: Optional[str]]) -> CreateCredentialsResponse
 
@@ -27,41 +27,41 @@ See [more info].
             w.git_credentials.delete(credential_id=cr.credential_id)
 
         Create a credential entry.
-
-Creates a Git credential entry for the user. Only one Git credential per user is supported, so any
-attempts to create credentials if an entry already exists will fail. Use the PATCH endpoint to update
-existing credentials, or the DELETE endpoint to delete existing credentials.
-
-:param git_provider: str
-  Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
-  `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
-  `gitLabEnterpriseEdition` and `awsCodeCommit`.
-:param git_username: str (optional)
-  The username or email provided with your Git provider account, depending on which provider you are
-  using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may
-  be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS CodeCommit, BitBucket or
-  BitBucket Server, username must be used. For all other providers please see your provider's Personal
-  Access Token authentication documentation to see what is supported.
-:param personal_access_token: str (optional)
-  The personal access token used to authenticate to the corresponding Git provider. For certain
-  providers, support may exist for other types of scoped access tokens. [Learn more].
-  
-  [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
-
-:returns: :class:`CreateCredentialsResponse`
-
+        
+        Creates a Git credential entry for the user. Only one Git credential per user is supported, so any
+        attempts to create credentials if an entry already exists will fail. Use the PATCH endpoint to update
+        existing credentials, or the DELETE endpoint to delete existing credentials.
+        
+        :param git_provider: str
+          Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+          `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+          `gitLabEnterpriseEdition` and `awsCodeCommit`.
+        :param git_username: str (optional)
+          The username or email provided with your Git provider account, depending on which provider you are
+          using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may
+          be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS CodeCommit, BitBucket or
+          BitBucket Server, username must be used. For all other providers please see your provider's Personal
+          Access Token authentication documentation to see what is supported.
+        :param personal_access_token: str (optional)
+          The personal access token used to authenticate to the corresponding Git provider. For certain
+          providers, support may exist for other types of scoped access tokens. [Learn more].
+          
+          [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
+        
+        :returns: :class:`CreateCredentialsResponse`
+        
 
     .. py:method:: delete(credential_id: int)
 
         Delete a credential.
-
-Deletes the specified Git credential.
-
-:param credential_id: int
-  The ID for the corresponding credential to access.
-
-
-
+        
+        Deletes the specified Git credential.
+        
+        :param credential_id: int
+          The ID for the corresponding credential to access.
+        
+        
+        
 
     .. py:method:: get(credential_id: int) -> GetCredentialsResponse
 
@@ -82,14 +82,14 @@ Deletes the specified Git credential.
             w.git_credentials.delete(credential_id=cr.credential_id)
 
         Get a credential entry.
-
-Gets the Git credential with the specified credential ID.
-
-:param credential_id: int
-  The ID for the corresponding credential to access.
-
-:returns: :class:`GetCredentialsResponse`
-
+        
+        Gets the Git credential with the specified credential ID.
+        
+        :param credential_id: int
+          The ID for the corresponding credential to access.
+        
+        :returns: :class:`GetCredentialsResponse`
+        
 
     .. py:method:: list() -> Iterator[CredentialInfo]
 
@@ -105,11 +105,11 @@ Gets the Git credential with the specified credential ID.
             list = w.git_credentials.list()
 
         Get Git credentials.
-
-Lists the calling user's Git credentials. One credential per user is supported.
-
-:returns: Iterator over :class:`CredentialInfo`
-
+        
+        Lists the calling user's Git credentials. One credential per user is supported.
+        
+        :returns: Iterator over :class:`CredentialInfo`
+        
 
     .. py:method:: update(credential_id: int, git_provider: str [, git_username: Optional[str], personal_access_token: Optional[str]])
 
@@ -135,25 +135,26 @@ Lists the calling user's Git credentials. One credential per user is supported.
             w.git_credentials.delete(credential_id=cr.credential_id)
 
         Update a credential.
-
-Updates the specified Git credential.
-
-:param credential_id: int
-  The ID for the corresponding credential to access.
-:param git_provider: str
-  Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
-  `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
-  `gitLabEnterpriseEdition` and `awsCodeCommit`.
-:param git_username: str (optional)
-  The username or email provided with your Git provider account, depending on which provider you are
-  using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may
-  be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS CodeCommit, BitBucket or
-  BitBucket Server, username must be used. For all other providers please see your provider's Personal
-  Access Token authentication documentation to see what is supported.
-:param personal_access_token: str (optional)
-  The personal access token used to authenticate to the corresponding Git provider. For certain
-  providers, support may exist for other types of scoped access tokens. [Learn more].
-  
-  [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
-
-
+        
+        Updates the specified Git credential.
+        
+        :param credential_id: int
+          The ID for the corresponding credential to access.
+        :param git_provider: str
+          Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+          `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+          `gitLabEnterpriseEdition` and `awsCodeCommit`.
+        :param git_username: str (optional)
+          The username or email provided with your Git provider account, depending on which provider you are
+          using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may
+          be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS CodeCommit, BitBucket or
+          BitBucket Server, username must be used. For all other providers please see your provider's Personal
+          Access Token authentication documentation to see what is supported.
+        :param personal_access_token: str (optional)
+          The personal access token used to authenticate to the corresponding Git provider. For certain
+          providers, support may exist for other types of scoped access tokens. [Learn more].
+          
+          [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/workspace/repos.rst b/docs/workspace/workspace/repos.rst
index ce8908906..5f3e3e290 100644
--- a/docs/workspace/workspace/repos.rst
+++ b/docs/workspace/workspace/repos.rst
@@ -5,14 +5,14 @@
 .. py:class:: ReposAPI
 
     The Repos API allows users to manage their git repos. Users can use the API to access all repos that they
-have manage permissions on.
-
-Databricks Repos is a visual Git client in Databricks. It supports common Git operations such a cloning a
-repository, committing and pushing, pulling, branch management, and visual comparison of diffs when
-committing.
-
-Within Repos you can develop code in notebooks or other files and follow data science and engineering code
-development best practices using Git for version control, collaboration, and CI/CD.
+    have manage permissions on.
+    
+    Databricks Repos is a visual Git client in Databricks. It supports common Git operations such a cloning a
+    repository, committing and pushing, pulling, branch management, and visual comparison of diffs when
+    committing.
+    
+    Within Repos you can develop code in notebooks or other files and follow data science and engineering code
+    development best practices using Git for version control, collaboration, and CI/CD.
 
     .. py:method:: create(url: str, provider: str [, path: Optional[str], sparse_checkout: Optional[SparseCheckout]]) -> CreateRepoResponse
 
@@ -35,37 +35,37 @@ development best practices using Git for version control, collaboration, and CI/
             w.repos.delete(repo_id=ri.id)
 
         Create a repo.
-
-Creates a repo in the workspace and links it to the remote Git repo specified. Note that repos created
-programmatically must be linked to a remote Git repo, unlike repos created in the browser.
-
-:param url: str
-  URL of the Git repository to be linked.
-:param provider: str
-  Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
-  `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
-  `gitLabEnterpriseEdition` and `awsCodeCommit`.
-:param path: str (optional)
-  Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If repo
-  is created in `/Repos`, path must be in the format `/Repos/{folder}/{repo-name}`.
-:param sparse_checkout: :class:`SparseCheckout` (optional)
-  If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable
-  sparse checkout after the repo is created.
-
-:returns: :class:`CreateRepoResponse`
-
+        
+        Creates a repo in the workspace and links it to the remote Git repo specified. Note that repos created
+        programmatically must be linked to a remote Git repo, unlike repos created in the browser.
+        
+        :param url: str
+          URL of the Git repository to be linked.
+        :param provider: str
+          Git provider. This field is case-insensitive. The available Git providers are `gitHub`,
+          `bitbucketCloud`, `gitLab`, `azureDevOpsServices`, `gitHubEnterprise`, `bitbucketServer`,
+          `gitLabEnterpriseEdition` and `awsCodeCommit`.
+        :param path: str (optional)
+          Desired path for the repo in the workspace. Almost any path in the workspace can be chosen. If repo
+          is created in `/Repos`, path must be in the format `/Repos/{folder}/{repo-name}`.
+        :param sparse_checkout: :class:`SparseCheckout` (optional)
+          If specified, the repo will be created with sparse checkout enabled. You cannot enable/disable
+          sparse checkout after the repo is created.
+        
+        :returns: :class:`CreateRepoResponse`
+        
 
     .. py:method:: delete(repo_id: int)
 
         Delete a repo.
-
-Deletes the specified repo.
-
-:param repo_id: int
-  The ID for the corresponding repo to delete.
-
-
-
+        
+        Deletes the specified repo.
+        
+        :param repo_id: int
+          The ID for the corresponding repo to delete.
+        
+        
+        
 
     .. py:method:: get(repo_id: int) -> GetRepoResponse
 
@@ -90,38 +90,38 @@ Deletes the specified repo.
             w.repos.delete(repo_id=ri.id)
 
         Get a repo.
-
-Returns the repo with the given repo ID.
-
-:param repo_id: int
-  ID of the Git folder (repo) object in the workspace.
-
-:returns: :class:`GetRepoResponse`
-
+        
+        Returns the repo with the given repo ID.
+        
+        :param repo_id: int
+          ID of the Git folder (repo) object in the workspace.
+        
+        :returns: :class:`GetRepoResponse`
+        
 
     .. py:method:: get_permission_levels(repo_id: str) -> GetRepoPermissionLevelsResponse
 
         Get repo permission levels.
-
-Gets the permission levels that a user can have on an object.
-
-:param repo_id: str
-  The repo for which to get or manage permissions.
-
-:returns: :class:`GetRepoPermissionLevelsResponse`
-
+        
+        Gets the permission levels that a user can have on an object.
+        
+        :param repo_id: str
+          The repo for which to get or manage permissions.
+        
+        :returns: :class:`GetRepoPermissionLevelsResponse`
+        
 
     .. py:method:: get_permissions(repo_id: str) -> RepoPermissions
 
         Get repo permissions.
-
-Gets the permissions of a repo. Repos can inherit permissions from their root object.
-
-:param repo_id: str
-  The repo for which to get or manage permissions.
-
-:returns: :class:`RepoPermissions`
-
+        
+        Gets the permissions of a repo. Repos can inherit permissions from their root object.
+        
+        :param repo_id: str
+          The repo for which to get or manage permissions.
+        
+        :returns: :class:`RepoPermissions`
+        
 
     .. py:method:: list( [, next_page_token: Optional[str], path_prefix: Optional[str]]) -> Iterator[RepoInfo]
 
@@ -138,34 +138,34 @@ Gets the permissions of a repo. Repos can inherit permissions from their root ob
             all = w.repos.list(workspace.ListReposRequest())
 
         Get repos.
-
-Returns repos that the calling user has Manage permissions on. Use `next_page_token` to iterate
-through additional pages.
-
-:param next_page_token: str (optional)
-  Token used to get the next page of results. If not specified, returns the first page of results as
-  well as a next page token if there are more results.
-:param path_prefix: str (optional)
-  Filters repos that have paths starting with the given path prefix. If not provided or when provided
-  an effectively empty prefix (`/` or `/Workspace`) Git folders (repos) from `/Workspace/Repos` will
-  be served.
-
-:returns: Iterator over :class:`RepoInfo`
-
+        
+        Returns repos that the calling user has Manage permissions on. Use `next_page_token` to iterate
+        through additional pages.
+        
+        :param next_page_token: str (optional)
+          Token used to get the next page of results. If not specified, returns the first page of results as
+          well as a next page token if there are more results.
+        :param path_prefix: str (optional)
+          Filters repos that have paths starting with the given path prefix. If not provided or when provided
+          an effectively empty prefix (`/` or `/Workspace`) Git folders (repos) from `/Workspace/Repos` will
+          be served.
+        
+        :returns: Iterator over :class:`RepoInfo`
+        
 
     .. py:method:: set_permissions(repo_id: str [, access_control_list: Optional[List[RepoAccessControlRequest]]]) -> RepoPermissions
 
         Set repo permissions.
-
-Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-permissions if none are specified. Objects can inherit permissions from their root object.
-
-:param repo_id: str
-  The repo for which to get or manage permissions.
-:param access_control_list: List[:class:`RepoAccessControlRequest`] (optional)
-
-:returns: :class:`RepoPermissions`
-
+        
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
+        
+        :param repo_id: str
+          The repo for which to get or manage permissions.
+        :param access_control_list: List[:class:`RepoAccessControlRequest`] (optional)
+        
+        :returns: :class:`RepoPermissions`
+        
 
     .. py:method:: update(repo_id: int [, branch: Optional[str], sparse_checkout: Optional[SparseCheckoutUpdate], tag: Optional[str]])
 
@@ -190,33 +190,34 @@ permissions if none are specified. Objects can inherit permissions from their ro
             w.repos.delete(repo_id=ri.id)
 
         Update a repo.
-
-Updates the repo to a different branch or tag, or updates the repo to the latest commit on the same
-branch.
-
-:param repo_id: int
-  ID of the Git folder (repo) object in the workspace.
-:param branch: str (optional)
-  Branch that the local version of the repo is checked out to.
-:param sparse_checkout: :class:`SparseCheckoutUpdate` (optional)
-  If specified, update the sparse checkout settings. The update will fail if sparse checkout is not
-  enabled for the repo.
-:param tag: str (optional)
-  Tag that the local version of the repo is checked out to. Updating the repo to a tag puts the repo
-  in a detached HEAD state. Before committing new changes, you must update the repo to a branch
-  instead of the detached HEAD.
-
-
-
+        
+        Updates the repo to a different branch or tag, or updates the repo to the latest commit on the same
+        branch.
+        
+        :param repo_id: int
+          ID of the Git folder (repo) object in the workspace.
+        :param branch: str (optional)
+          Branch that the local version of the repo is checked out to.
+        :param sparse_checkout: :class:`SparseCheckoutUpdate` (optional)
+          If specified, update the sparse checkout settings. The update will fail if sparse checkout is not
+          enabled for the repo.
+        :param tag: str (optional)
+          Tag that the local version of the repo is checked out to. Updating the repo to a tag puts the repo
+          in a detached HEAD state. Before committing new changes, you must update the repo to a branch
+          instead of the detached HEAD.
+        
+        
+        
 
     .. py:method:: update_permissions(repo_id: str [, access_control_list: Optional[List[RepoAccessControlRequest]]]) -> RepoPermissions
 
         Update repo permissions.
-
-Updates the permissions on a repo. Repos can inherit permissions from their root object.
-
-:param repo_id: str
-  The repo for which to get or manage permissions.
-:param access_control_list: List[:class:`RepoAccessControlRequest`] (optional)
-
-:returns: :class:`RepoPermissions`
+        
+        Updates the permissions on a repo. Repos can inherit permissions from their root object.
+        
+        :param repo_id: str
+          The repo for which to get or manage permissions.
+        :param access_control_list: List[:class:`RepoAccessControlRequest`] (optional)
+        
+        :returns: :class:`RepoPermissions`
+        
\ No newline at end of file
diff --git a/docs/workspace/workspace/secrets.rst b/docs/workspace/workspace/secrets.rst
index cb37e6155..96d94e1de 100644
--- a/docs/workspace/workspace/secrets.rst
+++ b/docs/workspace/workspace/secrets.rst
@@ -5,14 +5,14 @@
 .. py:class:: SecretsAPI
 
     The Secrets API allows you to manage secrets, secret scopes, and access permissions.
-
-Sometimes accessing data requires that you authenticate to external data sources through JDBC. Instead of
-directly entering your credentials into a notebook, use Databricks secrets to store your credentials and
-reference them in notebooks and jobs.
-
-Administrators, secret creators, and users granted permission can read Databricks secrets. While
-Databricks makes an effort to redact secret values that might be displayed in notebooks, it is not
-possible to prevent such users from reading secrets.
+    
+    Sometimes accessing data requires that you authenticate to external data sources through JDBC. Instead of
+    directly entering your credentials into a notebook, use Databricks secrets to store your credentials and
+    reference them in notebooks and jobs.
+    
+    Administrators, secret creators, and users granted permission can read Databricks secrets. While
+    Databricks makes an effort to redact secret values that might be displayed in notebooks, it is not
+    possible to prevent such users from reading secrets.
 
     .. py:method:: create_scope(scope: str [, backend_azure_keyvault: Optional[AzureKeyVaultSecretScopeMetadata], initial_manage_principal: Optional[str], scope_backend_type: Optional[ScopeBackendType]])
 
@@ -38,112 +38,112 @@ possible to prevent such users from reading secrets.
             w.secrets.delete_scope(scope=scope_name)
 
         Create a new secret scope.
-
-The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not
-exceed 128 characters.
-
-:param scope: str
-  Scope name requested by the user. Scope names are unique.
-:param backend_azure_keyvault: :class:`AzureKeyVaultSecretScopeMetadata` (optional)
-  The metadata for the secret scope if the type is `AZURE_KEYVAULT`
-:param initial_manage_principal: str (optional)
-  The principal that is initially granted `MANAGE` permission to the created scope.
-:param scope_backend_type: :class:`ScopeBackendType` (optional)
-  The backend type the scope will be created with. If not specified, will default to `DATABRICKS`
-
-
-
+        
+        The scope name must consist of alphanumeric characters, dashes, underscores, and periods, and may not
+        exceed 128 characters.
+        
+        :param scope: str
+          Scope name requested by the user. Scope names are unique.
+        :param backend_azure_keyvault: :class:`AzureKeyVaultSecretScopeMetadata` (optional)
+          The metadata for the secret scope if the type is `AZURE_KEYVAULT`
+        :param initial_manage_principal: str (optional)
+          The principal that is initially granted `MANAGE` permission to the created scope.
+        :param scope_backend_type: :class:`ScopeBackendType` (optional)
+          The backend type the scope will be created with. If not specified, will default to `DATABRICKS`
+        
+        
+        
 
     .. py:method:: delete_acl(scope: str, principal: str)
 
         Delete an ACL.
-
-Deletes the given ACL on the given scope.
-
-Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` if no
-such secret scope, principal, or ACL exists. Throws `PERMISSION_DENIED` if the user does not have
-permission to make this API call.
-
-:param scope: str
-  The name of the scope to remove permissions from.
-:param principal: str
-  The principal to remove an existing ACL from.
-
-
-
+        
+        Deletes the given ACL on the given scope.
+        
+        Users must have the `MANAGE` permission to invoke this API. Throws `RESOURCE_DOES_NOT_EXIST` if no
+        such secret scope, principal, or ACL exists. Throws `PERMISSION_DENIED` if the user does not have
+        permission to make this API call.
+        
+        :param scope: str
+          The name of the scope to remove permissions from.
+        :param principal: str
+          The principal to remove an existing ACL from.
+        
+        
+        
 
     .. py:method:: delete_scope(scope: str)
 
         Delete a secret scope.
-
-Deletes a secret scope.
-
-Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if the user
-does not have permission to make this API call.
-
-:param scope: str
-  Name of the scope to delete.
-
-
-
+        
+        Deletes a secret scope.
+        
+        Throws `RESOURCE_DOES_NOT_EXIST` if the scope does not exist. Throws `PERMISSION_DENIED` if the user
+        does not have permission to make this API call.
+        
+        :param scope: str
+          Name of the scope to delete.
+        
+        
+        
 
     .. py:method:: delete_secret(scope: str, key: str)
 
         Delete a secret.
-
-Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on the
-secret scope.
-
-Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret exists. Throws `PERMISSION_DENIED`
-if the user does not have permission to make this API call.
-
-:param scope: str
-  The name of the scope that contains the secret to delete.
-:param key: str
-  Name of the secret to delete.
-
-
-
+        
+        Deletes the secret stored in this secret scope. You must have `WRITE` or `MANAGE` permission on the
+        secret scope.
+        
+        Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope or secret exists. Throws `PERMISSION_DENIED`
+        if the user does not have permission to make this API call.
+        
+        :param scope: str
+          The name of the scope that contains the secret to delete.
+        :param key: str
+          Name of the secret to delete.
+        
+        
+        
 
     .. py:method:: get_acl(scope: str, principal: str) -> AclItem
 
         Get secret ACL details.
-
-Gets the details about the given ACL, such as the group and permission. Users must have the `MANAGE`
-permission to invoke this API.
-
-Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the
-user does not have permission to make this API call.
-
-:param scope: str
-  The name of the scope to fetch ACL information from.
-:param principal: str
-  The principal to fetch ACL information for.
-
-:returns: :class:`AclItem`
-
+        
+        Gets the details about the given ACL, such as the group and permission. Users must have the `MANAGE`
+        permission to invoke this API.
+        
+        Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the
+        user does not have permission to make this API call.
+        
+        :param scope: str
+          The name of the scope to fetch ACL information from.
+        :param principal: str
+          The principal to fetch ACL information for.
+        
+        :returns: :class:`AclItem`
+        
 
     .. py:method:: get_secret(scope: str, key: str) -> GetSecretResponse
 
         Get a secret.
-
-Gets the bytes representation of a secret value for the specified scope and key.
-
-Users need the READ permission to make this call.
-
-Note that the secret value returned is in bytes. The interpretation of the bytes is determined by the
-caller in DBUtils and the type the data is decoded into.
-
-Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws
-``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists.
-
-:param scope: str
-  The name of the scope to fetch secret information from.
-:param key: str
-  The key to fetch secret for.
-
-:returns: :class:`GetSecretResponse`
-
+        
+        Gets the bytes representation of a secret value for the specified scope and key.
+        
+        Users need the READ permission to make this call.
+        
+        Note that the secret value returned is in bytes. The interpretation of the bytes is determined by the
+        caller in DBUtils and the type the data is decoded into.
+        
+        Throws ``PERMISSION_DENIED`` if the user does not have permission to make this API call. Throws
+        ``RESOURCE_DOES_NOT_EXIST`` if no such secret or secret scope exists.
+        
+        :param scope: str
+          The name of the scope to fetch secret information from.
+        :param key: str
+          The key to fetch secret for.
+        
+        :returns: :class:`GetSecretResponse`
+        
 
     .. py:method:: list_acls(scope: str) -> Iterator[AclItem]
 
@@ -171,17 +171,17 @@ Throws ``PERMISSION_DENIED`` if the user does not have permission to make this A
             w.secrets.delete_scope(scope=scope_name)
 
         Lists ACLs.
-
-List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this API.
-
-Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the
-user does not have permission to make this API call.
-
-:param scope: str
-  The name of the scope to fetch ACL information from.
-
-:returns: Iterator over :class:`AclItem`
-
+        
+        List the ACLs for a given secret scope. Users must have the `MANAGE` permission to invoke this API.
+        
+        Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `PERMISSION_DENIED` if the
+        user does not have permission to make this API call.
+        
+        :param scope: str
+          The name of the scope to fetch ACL information from.
+        
+        :returns: Iterator over :class:`AclItem`
+        
 
     .. py:method:: list_scopes() -> Iterator[SecretScope]
 
@@ -197,13 +197,13 @@ user does not have permission to make this API call.
             scopes = w.secrets.list_scopes()
 
         List all scopes.
-
-Lists all secret scopes available in the workspace.
-
-Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.
-
-:returns: Iterator over :class:`SecretScope`
-
+        
+        Lists all secret scopes available in the workspace.
+        
+        Throws `PERMISSION_DENIED` if the user does not have permission to make this API call.
+        
+        :returns: Iterator over :class:`SecretScope`
+        
 
     .. py:method:: list_secrets(scope: str) -> Iterator[SecretMetadata]
 
@@ -231,19 +231,19 @@ Throws `PERMISSION_DENIED` if the user does not have permission to make this API
             w.secrets.delete_scope(scope=scope_name)
 
         List secret keys.
-
-Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data
-cannot be retrieved using this API. Users need the READ permission to make this call.
-
-The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws `RESOURCE_DOES_NOT_EXIST` if
-no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make
-this API call.
-
-:param scope: str
-  The name of the scope to list secrets within.
-
-:returns: Iterator over :class:`SecretMetadata`
-
+        
+        Lists the secret keys that are stored at this scope. This is a metadata-only operation; secret data
+        cannot be retrieved using this API. Users need the READ permission to make this call.
+        
+        The lastUpdatedTimestamp returned is in milliseconds since epoch. Throws `RESOURCE_DOES_NOT_EXIST` if
+        no such secret scope exists. Throws `PERMISSION_DENIED` if the user does not have permission to make
+        this API call.
+        
+        :param scope: str
+          The name of the scope to list secrets within.
+        
+        :returns: Iterator over :class:`SecretMetadata`
+        
 
     .. py:method:: put_acl(scope: str, principal: str, permission: AclPermission)
 
@@ -275,41 +275,41 @@ this API call.
             w.secrets.delete_scope(scope=scope_name)
 
         Create/update an ACL.
-
-Creates or overwrites the Access Control List (ACL) associated with the given principal (user or
-group) on the specified scope point.
-
-In general, a user or group will use the most powerful permission available to them, and permissions
-are ordered as follows:
-
-* `MANAGE` - Allowed to change ACLs, and read and write to this secret scope. * `WRITE` - Allowed to
-read and write to this secret scope. * `READ` - Allowed to read this secret scope and list what
-secrets are available.
-
-Note that in general, secret values can only be read from within a command on a cluster (for example,
-through a notebook). There is no API to read the actual secret value material outside of a cluster.
-However, the user's permission will be applied based on who is executing the command, and they must
-have at least READ permission.
-
-Users must have the `MANAGE` permission to invoke this API.
-
-The principal is a user or group name corresponding to an existing Databricks principal to be granted
-or revoked access.
-
-Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_ALREADY_EXISTS` if a
-permission for the principal already exists. Throws `INVALID_PARAMETER_VALUE` if the permission or
-principal is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this API
-call.
-
-:param scope: str
-  The name of the scope to apply permissions to.
-:param principal: str
-  The principal in which the permission is applied.
-:param permission: :class:`AclPermission`
-  The permission level applied to the principal.
-
-
-
+        
+        Creates or overwrites the Access Control List (ACL) associated with the given principal (user or
+        group) on the specified scope point.
+        
+        In general, a user or group will use the most powerful permission available to them, and permissions
+        are ordered as follows:
+        
+        * `MANAGE` - Allowed to change ACLs, and read and write to this secret scope. * `WRITE` - Allowed to
+        read and write to this secret scope. * `READ` - Allowed to read this secret scope and list what
+        secrets are available.
+        
+        Note that in general, secret values can only be read from within a command on a cluster (for example,
+        through a notebook). There is no API to read the actual secret value material outside of a cluster.
+        However, the user's permission will be applied based on who is executing the command, and they must
+        have at least READ permission.
+        
+        Users must have the `MANAGE` permission to invoke this API.
+        
+        The principal is a user or group name corresponding to an existing Databricks principal to be granted
+        or revoked access.
+        
+        Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_ALREADY_EXISTS` if a
+        permission for the principal already exists. Throws `INVALID_PARAMETER_VALUE` if the permission or
+        principal is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this API
+        call.
+        
+        :param scope: str
+          The name of the scope to apply permissions to.
+        :param principal: str
+          The principal in which the permission is applied.
+        :param permission: :class:`AclPermission`
+          The permission level applied to the principal.
+        
+        
+        
 
     .. py:method:: put_secret(scope: str, key: str [, bytes_value: Optional[str], string_value: Optional[str]])
 
@@ -337,30 +337,31 @@ call.
             w.secrets.delete_scope(scope=scope_name)
 
         Add a secret.
-
-Inserts a secret under the provided scope with the given name. If a secret already exists with the
-same name, this command overwrites the existing secret's value. The server encrypts the secret using
-the secret scope's encryption settings before storing it.
-
-You must have `WRITE` or `MANAGE` permission on the secret scope. The secret key must consist of
-alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128 characters. The
-maximum allowed secret value size is 128 KB. The maximum number of secrets in a given scope is 1000.
-
-The input fields "string_value" or "bytes_value" specify the type of the secret, which will determine
-the value returned when the secret value is requested. Exactly one must be specified.
-
-Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_LIMIT_EXCEEDED` if
-maximum number of secrets in scope is exceeded. Throws `INVALID_PARAMETER_VALUE` if the key name or
-value length is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this
-API call.
-
-:param scope: str
-  The name of the scope to which the secret will be associated with.
-:param key: str
-  A unique name to identify the secret.
-:param bytes_value: str (optional)
-  If specified, value will be stored as bytes.
-:param string_value: str (optional)
-  If specified, note that the value will be stored in UTF-8 (MB4) form.
-
-
+        
+        Inserts a secret under the provided scope with the given name. If a secret already exists with the
+        same name, this command overwrites the existing secret's value. The server encrypts the secret using
+        the secret scope's encryption settings before storing it.
+        
+        You must have `WRITE` or `MANAGE` permission on the secret scope. The secret key must consist of
+        alphanumeric characters, dashes, underscores, and periods, and cannot exceed 128 characters. The
+        maximum allowed secret value size is 128 KB. The maximum number of secrets in a given scope is 1000.
+        
+        The input fields "string_value" or "bytes_value" specify the type of the secret, which will determine
+        the value returned when the secret value is requested. Exactly one must be specified.
+        
+        Throws `RESOURCE_DOES_NOT_EXIST` if no such secret scope exists. Throws `RESOURCE_LIMIT_EXCEEDED` if
+        maximum number of secrets in scope is exceeded. Throws `INVALID_PARAMETER_VALUE` if the key name or
+        value length is invalid. Throws `PERMISSION_DENIED` if the user does not have permission to make this
+        API call.
+        
+        :param scope: str
+          The name of the scope to which the secret will be associated with.
+        :param key: str
+          A unique name to identify the secret.
+        :param bytes_value: str (optional)
+          If specified, value will be stored as bytes.
+        :param string_value: str (optional)
+          If specified, note that the value will be stored in UTF-8 (MB4) form.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/workspace/workspace.rst b/docs/workspace/workspace/workspace.rst
index 5c7516cb8..595872deb 100644
--- a/docs/workspace/workspace/workspace.rst
+++ b/docs/workspace/workspace/workspace.rst
@@ -5,30 +5,30 @@
 .. py:class:: WorkspaceExt
 
     The Workspace API allows you to list, import, export, and delete notebooks and folders.
-
-A notebook is a web-based interface to a document that contains runnable code, visualizations, and
-explanatory text.
+    
+    A notebook is a web-based interface to a document that contains runnable code, visualizations, and
+    explanatory text.
 
     .. py:method:: delete(path: str [, recursive: Optional[bool]])
 
         Delete a workspace object.
-
-Deletes an object or a directory (and optionally recursively deletes all objects in the directory). *
-If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * If `path` is a
-non-empty directory and `recursive` is set to `false`, this call returns an error
-`DIRECTORY_NOT_EMPTY`.
-
-Object deletion cannot be undone and deleting a directory recursively is not atomic.
-
-:param path: str
-  The absolute path of the notebook or directory.
-:param recursive: bool (optional)
-  The flag that specifies whether to delete the object recursively. It is `false` by default. Please
-  note this deleting directory is not atomic. If it fails in the middle, some of objects under this
-  directory may be deleted and cannot be undone.
-
-
-
+        
+        Deletes an object or a directory (and optionally recursively deletes all objects in the directory). *
+        If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`. * If `path` is a
+        non-empty directory and `recursive` is set to `false`, this call returns an error
+        `DIRECTORY_NOT_EMPTY`.
+        
+        Object deletion cannot be undone and deleting a directory recursively is not atomic.
+        
+        :param path: str
+          The absolute path of the notebook or directory.
+        :param recursive: bool (optional)
+          The flag that specifies whether to delete the object recursively. It is `false` by default. Please
+          note this deleting directory is not atomic. If it fails in the middle, some of objects under this
+          directory may be deleted and cannot be undone.
+        
+        
+        
 
     .. py:method:: download(path: str [, format: ExportFormat]) -> BinaryIO
 
@@ -55,15 +55,15 @@ Object deletion cannot be undone and deleting a directory recursively is not ato
             w.workspace.delete(py_file)
 
         
-Downloads notebook or file from the workspace
-
-:param path:     location of the file or notebook on workspace.
-:param format:   By default, `ExportFormat.SOURCE`. If using `ExportFormat.AUTO` the `path`
-                 is imported or exported as either a workspace file or a notebook, depending
-                 on an analysis of the `item`’s extension and the header content provided in
-                 the request.
-:return:         file-like `io.BinaryIO` of the `path` contents.
-
+        Downloads notebook or file from the workspace
+
+        :param path:     location of the file or notebook on workspace.
+        :param format:   By default, `ExportFormat.SOURCE`. If using `ExportFormat.AUTO` the `path`
+                         is imported or exported as either a workspace file or a notebook, depending
+                         on an analysis of the `item`’s extension and the header content provided in
+                         the request.
+        :return:         file-like `io.BinaryIO` of the `path` contents.
+        
 
     .. py:method:: export(path: str [, format: Optional[ExportFormat]]) -> ExportResponse
 
@@ -84,60 +84,60 @@ Downloads notebook or file from the workspace
             export_response = w.workspace.export(format=workspace.ExportFormat.SOURCE, path=notebook)
 
         Export a workspace object.
-
-Exports an object or the contents of an entire directory.
-
-If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`.
-
-If the exported data would exceed size limit, this call returns `MAX_NOTEBOOK_SIZE_EXCEEDED`.
-Currently, this API does not support exporting a library.
-
-:param path: str
-  The absolute path of the object or directory. Exporting a directory is only supported for the `DBC`,
-  `SOURCE`, and `AUTO` format.
-:param format: :class:`ExportFormat` (optional)
-  This specifies the format of the exported file. By default, this is `SOURCE`.
-  
-  The value is case sensitive.
-  
-  - `SOURCE`: The notebook is exported as source code. Directory exports will not include non-notebook
-  entries. - `HTML`: The notebook is exported as an HTML file. - `JUPYTER`: The notebook is exported
-  as a Jupyter/IPython Notebook file. - `DBC`: The notebook is exported in Databricks archive format.
-  Directory exports will not include non-notebook entries. - `R_MARKDOWN`: The notebook is exported to
-  R Markdown format. - `AUTO`: The object or directory is exported depending on the objects type.
-  Directory exports will include notebooks and workspace files.
-
-:returns: :class:`ExportResponse`
-
+        
+        Exports an object or the contents of an entire directory.
+        
+        If `path` does not exist, this call returns an error `RESOURCE_DOES_NOT_EXIST`.
+        
+        If the exported data would exceed size limit, this call returns `MAX_NOTEBOOK_SIZE_EXCEEDED`.
+        Currently, this API does not support exporting a library.
+        
+        :param path: str
+          The absolute path of the object or directory. Exporting a directory is only supported for the `DBC`,
+          `SOURCE`, and `AUTO` format.
+        :param format: :class:`ExportFormat` (optional)
+          This specifies the format of the exported file. By default, this is `SOURCE`.
+          
+          The value is case sensitive.
+          
+          - `SOURCE`: The notebook is exported as source code. Directory exports will not include non-notebook
+          entries. - `HTML`: The notebook is exported as an HTML file. - `JUPYTER`: The notebook is exported
+          as a Jupyter/IPython Notebook file. - `DBC`: The notebook is exported in Databricks archive format.
+          Directory exports will not include non-notebook entries. - `R_MARKDOWN`: The notebook is exported to
+          R Markdown format. - `AUTO`: The object or directory is exported depending on the objects type.
+          Directory exports will include notebooks and workspace files.
+        
+        :returns: :class:`ExportResponse`
+        
 
     .. py:method:: get_permission_levels(workspace_object_type: str, workspace_object_id: str) -> GetWorkspaceObjectPermissionLevelsResponse
 
         Get workspace object permission levels.
-
-Gets the permission levels that a user can have on an object.
-
-:param workspace_object_type: str
-  The workspace object type for which to get or manage permissions.
-:param workspace_object_id: str
-  The workspace object for which to get or manage permissions.
-
-:returns: :class:`GetWorkspaceObjectPermissionLevelsResponse`
-
+        
+        Gets the permission levels that a user can have on an object.
+        
+        :param workspace_object_type: str
+          The workspace object type for which to get or manage permissions.
+        :param workspace_object_id: str
+          The workspace object for which to get or manage permissions.
+        
+        :returns: :class:`GetWorkspaceObjectPermissionLevelsResponse`
+        
 
     .. py:method:: get_permissions(workspace_object_type: str, workspace_object_id: str) -> WorkspaceObjectPermissions
 
         Get workspace object permissions.
-
-Gets the permissions of a workspace object. Workspace objects can inherit permissions from their
-parent objects or root object.
-
-:param workspace_object_type: str
-  The workspace object type for which to get or manage permissions.
-:param workspace_object_id: str
-  The workspace object for which to get or manage permissions.
-
-:returns: :class:`WorkspaceObjectPermissions`
-
+        
+        Gets the permissions of a workspace object. Workspace objects can inherit permissions from their
+        parent objects or root object.
+        
+        :param workspace_object_type: str
+          The workspace object type for which to get or manage permissions.
+        :param workspace_object_id: str
+          The workspace object for which to get or manage permissions.
+        
+        :returns: :class:`WorkspaceObjectPermissions`
+        
 
     .. py:method:: get_status(path: str) -> ObjectInfo
 
@@ -157,15 +157,15 @@ parent objects or root object.
             obj = w.workspace.get_status(path=notebook_path)
 
         Get status.
-
-Gets the status of an object or a directory. If `path` does not exist, this call returns an error
-`RESOURCE_DOES_NOT_EXIST`.
-
-:param path: str
-  The absolute path of the notebook or directory.
-
-:returns: :class:`ObjectInfo`
-
+        
+        Gets the status of an object or a directory. If `path` does not exist, this call returns an error
+        `RESOURCE_DOES_NOT_EXIST`.
+        
+        :param path: str
+          The absolute path of the notebook or directory.
+        
+        :returns: :class:`ObjectInfo`
+        
 
     .. py:method:: import_(path: str [, content: Optional[str], format: Optional[ImportFormat], language: Optional[Language], overwrite: Optional[bool]])
 
@@ -191,40 +191,40 @@ Gets the status of an object or a directory. If `path` does not exist, this call
                                 path=notebook_path)
 
         Import a workspace object.
-
-Imports a workspace object (for example, a notebook or file) or the contents of an entire directory.
-If `path` already exists and `overwrite` is set to `false`, this call returns an error
-`RESOURCE_ALREADY_EXISTS`. To import a directory, you can use either the `DBC` format or the `SOURCE`
-format with the `language` field unset. To import a single file as `SOURCE`, you must set the
-`language` field.
-
-:param path: str
-  The absolute path of the object or directory. Importing a directory is only supported for the `DBC`
-  and `SOURCE` formats.
-:param content: str (optional)
-  The base64-encoded content. This has a limit of 10 MB.
-  
-  If the limit (10MB) is exceeded, exception with error code **MAX_NOTEBOOK_SIZE_EXCEEDED** is thrown.
-  This parameter might be absent, and instead a posted file is used.
-:param format: :class:`ImportFormat` (optional)
-  This specifies the format of the file to be imported.
-  
-  The value is case sensitive.
-  
-  - `AUTO`: The item is imported depending on an analysis of the item's extension and the header
-  content provided in the request. If the item is imported as a notebook, then the item's extension is
-  automatically removed. - `SOURCE`: The notebook or directory is imported as source code. - `HTML`:
-  The notebook is imported as an HTML file. - `JUPYTER`: The notebook is imported as a Jupyter/IPython
-  Notebook file. - `DBC`: The notebook is imported in Databricks archive format. Required for
-  directories. - `R_MARKDOWN`: The notebook is imported from R Markdown format.
-:param language: :class:`Language` (optional)
-  The language of the object. This value is set only if the object type is `NOTEBOOK`.
-:param overwrite: bool (optional)
-  The flag that specifies whether to overwrite existing object. It is `false` by default. For `DBC`
-  format, `overwrite` is not supported since it may contain a directory.
-
-
-
+        
+        Imports a workspace object (for example, a notebook or file) or the contents of an entire directory.
+        If `path` already exists and `overwrite` is set to `false`, this call returns an error
+        `RESOURCE_ALREADY_EXISTS`. To import a directory, you can use either the `DBC` format or the `SOURCE`
+        format with the `language` field unset. To import a single file as `SOURCE`, you must set the
+        `language` field.
+        
+        :param path: str
+          The absolute path of the object or directory. Importing a directory is only supported for the `DBC`
+          and `SOURCE` formats.
+        :param content: str (optional)
+          The base64-encoded content. This has a limit of 10 MB.
+          
+          If the limit (10MB) is exceeded, exception with error code **MAX_NOTEBOOK_SIZE_EXCEEDED** is thrown.
+          This parameter might be absent, and instead a posted file is used.
+        :param format: :class:`ImportFormat` (optional)
+          This specifies the format of the file to be imported.
+          
+          The value is case sensitive.
+          
+          - `AUTO`: The item is imported depending on an analysis of the item's extension and the header
+          content provided in the request. If the item is imported as a notebook, then the item's extension is
+          automatically removed. - `SOURCE`: The notebook or directory is imported as source code. - `HTML`:
+          The notebook is imported as an HTML file. - `JUPYTER`: The notebook is imported as a Jupyter/IPython
+          Notebook file. - `DBC`: The notebook is imported in Databricks archive format. Required for
+          directories. - `R_MARKDOWN`: The notebook is imported from R Markdown format.
+        :param language: :class:`Language` (optional)
+          The language of the object. This value is set only if the object type is `NOTEBOOK`.
+        :param overwrite: bool (optional)
+          The flag that specifies whether to overwrite existing object. It is `false` by default. For `DBC`
+          format, `overwrite` is not supported since it may contain a directory.
+        
+        
+        
 
     .. py:method:: list(path: str [, notebooks_modified_after: int, recursive: bool = False]) -> ObjectInfo
 
@@ -244,62 +244,62 @@ format with the `language` field unset. To import a single file as `SOURCE`, you
 
         List workspace objects
 
-:param recursive: bool
-    Optionally invoke recursive traversal
-
-:returns: Iterator of workspaceObjectInfo
+        :param recursive: bool
+            Optionally invoke recursive traversal
 
+        :returns: Iterator of workspaceObjectInfo
+        
 
     .. py:method:: mkdirs(path: str)
 
         Create a directory.
-
-Creates the specified directory (and necessary parent directories if they do not exist). If there is
-an object (not a directory) at any prefix of the input path, this call returns an error
-`RESOURCE_ALREADY_EXISTS`.
-
-Note that if this operation fails it may have succeeded in creating some of the necessary parent
-directories.
-
-:param path: str
-  The absolute path of the directory. If the parent directories do not exist, it will also create
-  them. If the directory already exists, this command will do nothing and succeed.
-
-
-
+        
+        Creates the specified directory (and necessary parent directories if they do not exist). If there is
+        an object (not a directory) at any prefix of the input path, this call returns an error
+        `RESOURCE_ALREADY_EXISTS`.
+        
+        Note that if this operation fails it may have succeeded in creating some of the necessary parent
+        directories.
+        
+        :param path: str
+          The absolute path of the directory. If the parent directories do not exist, it will also create
+          them. If the directory already exists, this command will do nothing and succeed.
+        
+        
+        
 
     .. py:method:: set_permissions(workspace_object_type: str, workspace_object_id: str [, access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]]]) -> WorkspaceObjectPermissions
 
         Set workspace object permissions.
-
-Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
-permissions if none are specified. Objects can inherit permissions from their parent objects or root
-object.
-
-:param workspace_object_type: str
-  The workspace object type for which to get or manage permissions.
-:param workspace_object_id: str
-  The workspace object for which to get or manage permissions.
-:param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional)
-
-:returns: :class:`WorkspaceObjectPermissions`
-
+        
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their parent objects or root
+        object.
+        
+        :param workspace_object_type: str
+          The workspace object type for which to get or manage permissions.
+        :param workspace_object_id: str
+          The workspace object for which to get or manage permissions.
+        :param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional)
+        
+        :returns: :class:`WorkspaceObjectPermissions`
+        
 
     .. py:method:: update_permissions(workspace_object_type: str, workspace_object_id: str [, access_control_list: Optional[List[WorkspaceObjectAccessControlRequest]]]) -> WorkspaceObjectPermissions
 
         Update workspace object permissions.
-
-Updates the permissions on a workspace object. Workspace objects can inherit permissions from their
-parent objects or root object.
-
-:param workspace_object_type: str
-  The workspace object type for which to get or manage permissions.
-:param workspace_object_id: str
-  The workspace object for which to get or manage permissions.
-:param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional)
-
-:returns: :class:`WorkspaceObjectPermissions`
-
+        
+        Updates the permissions on a workspace object. Workspace objects can inherit permissions from their
+        parent objects or root object.
+        
+        :param workspace_object_type: str
+          The workspace object type for which to get or manage permissions.
+        :param workspace_object_id: str
+          The workspace object for which to get or manage permissions.
+        :param access_control_list: List[:class:`WorkspaceObjectAccessControlRequest`] (optional)
+        
+        :returns: :class:`WorkspaceObjectPermissions`
+        
 
     .. py:method:: upload(path: str, content: bytes [, format: ImportFormat, language: Language, overwrite: bool = False])
 
@@ -325,18 +325,19 @@ parent objects or root object.
             w.workspace.delete(notebook)
 
         
-Uploads a workspace object (for example, a notebook or file) or the contents of an entire
-directory (`DBC` format).
-
-Errors:
- * `RESOURCE_ALREADY_EXISTS`: if `path` already exists no `overwrite=True`.
- * `INVALID_PARAMETER_VALUE`: if `format` and `content` values are not compatible.
-
-:param path:     target location of the file on workspace.
-:param content:  the contents as either raw binary data `bytes` or a file-like the file-like `io.BinaryIO` of the `path` contents.
-:param format:   By default, `ImportFormat.SOURCE`. If using `ImportFormat.AUTO` the `path`
-                 is imported or exported as either a workspace file or a notebook, depending
-                 on an analysis of the `item`’s extension and the header content provided in
-                 the request. In addition, if the `path` is imported as a notebook, then
-                 the `item`’s extension is automatically removed.
-:param language: Only required if using `ExportFormat.SOURCE`.
+        Uploads a workspace object (for example, a notebook or file) or the contents of an entire
+        directory (`DBC` format).
+
+        Errors:
+         * `RESOURCE_ALREADY_EXISTS`: if `path` already exists no `overwrite=True`.
+         * `INVALID_PARAMETER_VALUE`: if `format` and `content` values are not compatible.
+
+        :param path:     target location of the file on workspace.
+        :param content:  the contents as either raw binary data `bytes` or a file-like the file-like `io.BinaryIO` of the `path` contents.
+        :param format:   By default, `ImportFormat.SOURCE`. If using `ImportFormat.AUTO` the `path`
+                         is imported or exported as either a workspace file or a notebook, depending
+                         on an analysis of the `item`’s extension and the header content provided in
+                         the request. In addition, if the `path` is imported as a notebook, then
+                         the `item`’s extension is automatically removed.
+        :param language: Only required if using `ExportFormat.SOURCE`.
+        
\ No newline at end of file

From 614386e603293bdd893d1f42ac2d1835c197e5bc Mon Sep 17 00:00:00 2001
From: Giorgi Kikolashvili <47174341+gkiko10@users.noreply.github.com>
Date: Fri, 14 Feb 2025 11:02:54 +0100
Subject: [PATCH 100/136] [Internal] Update Jobs GetJob API to support
 paginated responses (#869)

## What changes are proposed in this pull request?

Introduces logic in extension for jobs GetJob call that paginates tasks
and other arrays in the response. This change is necessary for SDK and
API 2.2 compatibility. API 2.2 serves paginated responses as long as
next_page_token field is present in the response. The pagination logic
is not exposed to the customer.

## How is this tested?

I enabled API 2.2 calls by modifying URL string /api/2.2/jobs/runs/get
in databricks/sdk/service/jobs.py. Then I ran unit test from
tests/test_jobs_mixin.py
---
 databricks/sdk/mixins/jobs.py |  31 +++++++-
 tests/test_jobs_mixin.py      | 138 ++++++++++++++++++++++++++++++++--
 2 files changed, 160 insertions(+), 9 deletions(-)

diff --git a/databricks/sdk/mixins/jobs.py b/databricks/sdk/mixins/jobs.py
index c38304966..d5e2a1728 100644
--- a/databricks/sdk/mixins/jobs.py
+++ b/databricks/sdk/mixins/jobs.py
@@ -1,6 +1,7 @@
 from typing import Optional
 
 from databricks.sdk.service import jobs
+from databricks.sdk.service.jobs import Job
 
 
 class JobsExt(jobs.JobsAPI):
@@ -52,4 +53,32 @@ def get_run(self,
             run.repair_history.extend(next_run.repair_history)
             run.next_page_token = next_run.next_page_token
 
-        return run
\ No newline at end of file
+        return run
+
+    def get(self, job_id: int, *, page_token: Optional[str] = None) -> Job:
+        """Get a single job.
+
+        Retrieves the details for a single job. If the job has multiple pages of tasks, job_clusters, parameters or environments,
+        it will paginate through all pages and aggregate the results.
+
+        :param job_id: int
+          The canonical identifier of the job to retrieve information about. This field is required.
+        :param page_token: str (optional)
+          Use `next_page_token` returned from the previous GetJob to request the next page of the job's
+          sub-resources.
+
+        :returns: :class:`Job`
+        """
+        job = super().get(job_id, page_token=page_token)
+
+        # jobs/get response includes next_page_token as long as there are more pages to fetch.
+        while job.next_page_token is not None:
+            next_job = super().get(job_id, page_token=job.next_page_token)
+            # Each new page of jobs/get response includes the next page of the tasks, job_clusters, job_parameters, and environments.
+            job.settings.tasks.extend(next_job.settings.tasks)
+            job.settings.job_clusters.extend(next_job.settings.job_clusters)
+            job.settings.parameters.extend(next_job.settings.parameters)
+            job.settings.environments.extend(next_job.settings.environments)
+            job.next_page_token = next_job.next_page_token
+
+        return job
\ No newline at end of file
diff --git a/tests/test_jobs_mixin.py b/tests/test_jobs_mixin.py
index 90f1c0b89..2c39d41d9 100644
--- a/tests/test_jobs_mixin.py
+++ b/tests/test_jobs_mixin.py
@@ -5,15 +5,21 @@
 from databricks.sdk import WorkspaceClient
 
 
-def make_path_pattern(run_id: int, page_token: str) -> Pattern[str]:
+def make_getrun_path_pattern(run_id: int, page_token: str) -> Pattern[str]:
     return re.compile(
         rf'{re.escape("http://localhost/api/")}2.\d{re.escape(f"/jobs/runs/get?page_token={page_token}&run_id={run_id}")}'
     )
 
 
+def make_getjob_path_pattern(job_id: int, page_token: str) -> Pattern[str]:
+    return re.compile(
+        rf'{re.escape("http://localhost/api/")}2.\d{re.escape(f"/jobs/get?job_id={job_id}&page_token={page_token}")}'
+    )
+
+
 def test_get_run_with_no_pagination(config, requests_mock):
     run1 = {"tasks": [{"run_id": 0}, {"run_id": 1}], }
-    requests_mock.get(make_path_pattern(1337, "initialToken"), text=json.dumps(run1))
+    requests_mock.get(make_getrun_path_pattern(1337, "initialToken"), text=json.dumps(run1))
     w = WorkspaceClient(config=config)
 
     run = w.jobs.get_run(1337, page_token="initialToken")
@@ -59,9 +65,9 @@ def test_get_run_pagination_with_tasks(config, requests_mock):
         "next_page_token": "tokenToThirdPage",
     }
     run3 = {"tasks": [{"run_id": 4}]}
-    requests_mock.get(make_path_pattern(1337, "initialToken"), text=json.dumps(run1))
-    requests_mock.get(make_path_pattern(1337, "tokenToSecondPage"), text=json.dumps(run2))
-    requests_mock.get(make_path_pattern(1337, "tokenToThirdPage"), text=json.dumps(run3))
+    requests_mock.get(make_getrun_path_pattern(1337, "initialToken"), text=json.dumps(run1))
+    requests_mock.get(make_getrun_path_pattern(1337, "tokenToSecondPage"), text=json.dumps(run2))
+    requests_mock.get(make_getrun_path_pattern(1337, "tokenToThirdPage"), text=json.dumps(run3))
     w = WorkspaceClient(config=config)
 
     run = w.jobs.get_run(1337, page_token="initialToken")
@@ -116,9 +122,9 @@ def test_get_run_pagination_with_iterations(config, requests_mock):
         "next_page_token": "tokenToThirdPage",
     }
     run3 = {"tasks": [{"run_id": 1337}], "iterations": [{"run_id": 4}], }
-    requests_mock.get(make_path_pattern(1337, "initialToken"), text=json.dumps(run1))
-    requests_mock.get(make_path_pattern(1337, "tokenToSecondPage"), text=json.dumps(run2))
-    requests_mock.get(make_path_pattern(1337, "tokenToThirdPage"), text=json.dumps(run3))
+    requests_mock.get(make_getrun_path_pattern(1337, "initialToken"), text=json.dumps(run1))
+    requests_mock.get(make_getrun_path_pattern(1337, "tokenToSecondPage"), text=json.dumps(run2))
+    requests_mock.get(make_getrun_path_pattern(1337, "tokenToThirdPage"), text=json.dumps(run3))
     w = WorkspaceClient(config=config)
 
     run = w.jobs.get_run(1337, page_token="initialToken")
@@ -139,3 +145,119 @@ def test_get_run_pagination_with_iterations(config, requests_mock):
             'run_id': 4
         }],
     }
+
+
+def test_get_job_with_no_pagination(config, requests_mock):
+    job1 = {"settings": {"tasks": [{"task_key": "taskKey1"}, {"task_key": "taskKey2"}], }}
+    requests_mock.get(make_getjob_path_pattern(1337, "initialToken"), text=json.dumps(job1))
+    w = WorkspaceClient(config=config)
+
+    job = w.jobs.get(1337, page_token="initialToken")
+
+    assert job.as_dict() == {"settings": {"tasks": [{"task_key": "taskKey1"}, {"task_key": "taskKey2"}], }}
+
+
+def test_get_job_pagination_with_tasks(config, requests_mock):
+    from databricks.sdk.service import compute, jobs
+    cluster_spec = compute.ClusterSpec(spark_version="11.3.x-scala2.12",
+                                       custom_tags={"ResourceClass": "SingleNode"},
+                                       num_workers=0,
+                                       node_type_id="Standard_DS3_v2",
+                                       )
+    cluster1 = jobs.JobCluster(job_cluster_key="cluster1", new_cluster=cluster_spec)
+    cluster2 = jobs.JobCluster(job_cluster_key="cluster2", new_cluster=cluster_spec)
+    cluster3 = jobs.JobCluster(job_cluster_key="cluster3", new_cluster=cluster_spec)
+    cluster4 = jobs.JobCluster(job_cluster_key="cluster4", new_cluster=cluster_spec)
+    job1 = {
+        "settings": {
+            "tasks": [{
+                "task_key": "taskKey1"
+            }, {
+                "task_key": "taskKey2"
+            }],
+            "job_clusters": [cluster1.as_dict(), cluster2.as_dict()],
+            "parameters": [{
+                "name": "param1",
+                "default": "default1"
+            }],
+            "environments": [{
+                "environment_key": "key1"
+            }, {
+                "environment_key": "key2"
+            }]
+        },
+        "next_page_token": "tokenToSecondPage"
+    }
+    job2 = {
+        "settings": {
+            "tasks": [{
+                "task_key": "taskKey3"
+            }, {
+                "task_key": "taskKey4"
+            }],
+            "job_clusters": [cluster3.as_dict(), cluster4.as_dict()],
+            "parameters": [{
+                "name": "param2",
+                "default": "default2"
+            }],
+            "environments": [{
+                "environment_key": "key3"
+            }]
+        },
+        "next_page_token": "tokenToThirdPage"
+    }
+    job3 = {
+        "settings": {
+            "tasks": [{
+                "task_key": "taskKey5"
+            }],
+            "parameters": [{
+                "name": "param3",
+                "default": "default3"
+            }]
+        },
+    }
+
+    requests_mock.get(make_getjob_path_pattern(1337, "initialToken"), text=json.dumps(job1))
+    requests_mock.get(make_getjob_path_pattern(1337, "tokenToSecondPage"), text=json.dumps(job2))
+    requests_mock.get(make_getjob_path_pattern(1337, "tokenToThirdPage"), text=json.dumps(job3))
+    w = WorkspaceClient(config=config)
+
+    job = w.jobs.get(1337, page_token="initialToken")
+
+    assert job.as_dict() == {
+        "settings": {
+            "tasks": [{
+                "task_key": "taskKey1"
+            }, {
+                "task_key": "taskKey2"
+            }, {
+                "task_key": "taskKey3"
+            }, {
+                "task_key": "taskKey4"
+            }, {
+                "task_key": "taskKey5"
+            }],
+            "job_clusters": [cluster1.as_dict(),
+                             cluster2.as_dict(),
+                             cluster3.as_dict(),
+                             cluster4.as_dict()],
+            "parameters": [{
+                "name": "param1",
+                "default": "default1"
+            }, {
+                "name": "param2",
+                "default": "default2"
+            }, {
+                "name": "param3",
+                "default": "default3"
+            }],
+            "environments": [{
+                "environment_key": "key1"
+            }, {
+                "environment_key": "key2"
+            }, {
+                "environment_key": "key3"
+            }]
+        }
+    }

From c0d7ee0d75628e4e2078b24ad332450af4abfa45 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Fri, 14 Feb 2025 16:11:00 -0700
Subject: [PATCH 101/136] merge upstream branch into fork

---
 .codegen.json                                 |   17 +-
 .codegen/__init__.py.tmpl                     |  194 --
 .codegen/_openapi_sha                         |    2 +-
 .codegen/error_mapping.py.tmpl                |   20 -
 .codegen/error_overrides.py.tmpl              |   20 -
 .codegen/example.py.tmpl                      |  112 -
 .codegen/lib.tmpl                             |   12 -
 .codegen/service.py.tmpl                      |  419 ---
 .gitattributes                                |    1 +
 .github/PULL_REQUEST_TEMPLATE.md              |   34 +-
 .github/workflows/external-message.yml        |   59 +
 .github/workflows/integration-tests.yml       |   90 +
 .github/workflows/push.yml                    |   34 +-
 .github/workflows/release-test.yml            |    7 +-
 .github/workflows/release.yml                 |    7 +-
 CHANGELOG.md                                  |  291 ++
 databricks/sdk/__init__.py                    |  593 ++--
 databricks/sdk/_base_client.py                |   92 +-
 databricks/sdk/config.py                      |   18 +
 databricks/sdk/credentials_provider.py        |  174 +-
 databricks/sdk/data_plane.py                  |    2 +-
 databricks/sdk/mixins/files.py                |  185 +-
 databricks/sdk/mixins/jobs.py                 |   84 +
 databricks/sdk/mixins/open_ai_client.py       |   60 +-
 databricks/sdk/retries.py                     |    6 +-
 databricks/sdk/service/apps.py                |  421 ++-
 databricks/sdk/service/billing.py             |  627 +++-
 databricks/sdk/service/catalog.py             | 3025 +++++++++++++++--
 databricks/sdk/service/cleanrooms.py          | 1283 +++++++
 databricks/sdk/service/compute.py             | 2123 +++++++++++-
 databricks/sdk/service/dashboards.py          | 1228 +++++--
 databricks/sdk/service/files.py               |  173 +-
 databricks/sdk/service/iam.py                 |  515 ++-
 databricks/sdk/service/jobs.py                | 1814 +++++++++-
 databricks/sdk/service/marketplace.py         |  689 ++++
 databricks/sdk/service/ml.py                  | 1047 +++++-
 databricks/sdk/service/oauth2.py              |  769 ++++-
 databricks/sdk/service/pipelines.py           |  713 +++-
 databricks/sdk/service/provisioning.py        |  455 +++
 databricks/sdk/service/serving.py             | 1262 +++++--
 databricks/sdk/service/settings.py            | 2025 ++++++++++-
 databricks/sdk/service/sharing.py             | 1062 +++---
 databricks/sdk/service/sql.py                 | 1481 +++++++-
 databricks/sdk/service/vectorsearch.py        |  290 ++
 databricks/sdk/service/workspace.py           |  462 ++-
 databricks/sdk/useragent.py                   |   54 +
 databricks/sdk/version.py                     |    2 +-
 docs/account/billing/budget_policy.rst        |   88 +
 docs/account/billing/budgets.rst              |    2 +-
 docs/account/billing/index.rst                |    1 +
 .../account/oauth2/custom_app_integration.rst |   14 +-
 docs/account/oauth2/federation_policy.rst     |  105 +
 docs/account/oauth2/index.rst                 |    2 +
 .../service_principal_federation_policy.rst   |  115 +
 .../oauth2/service_principal_secrets.rst      |    9 +-
 docs/account/provisioning/workspaces.rst      |   13 +-
 .../settings/csp_enablement_account.rst       |   12 +-
 .../settings/disable_legacy_features.rst      |   12 +-
 .../settings/enable_ip_access_lists.rst       |   63 +
 .../settings/esm_enablement_account.rst       |   12 +-
 docs/account/settings/index.rst               |    1 +
 docs/account/settings/personal_compute.rst    |   12 +-
 docs/account/settings/settings.rst            |    6 +
 docs/dbdataclasses/apps.rst                   |   12 -
 docs/dbdataclasses/billing.rst                |   33 +
 docs/dbdataclasses/catalog.rst                |  204 +-
 docs/dbdataclasses/cleanrooms.rst             |  158 +
 docs/dbdataclasses/compute.rst                |   37 +-
 docs/dbdataclasses/dashboards.rst             |  124 +-
 docs/dbdataclasses/iam.rst                    |   26 +
 docs/dbdataclasses/index.rst                  |    1 +
 docs/dbdataclasses/jobs.rst                   |  110 +-
 docs/dbdataclasses/marketplace.rst            |    3 +
 docs/dbdataclasses/oauth2.rst                 |   16 +-
 docs/dbdataclasses/pipelines.rst              |   33 +
 docs/dbdataclasses/provisioning.rst           |    4 +
 docs/dbdataclasses/serving.rst                |   85 +-
 docs/dbdataclasses/settings.rst               |  141 +
 docs/dbdataclasses/sharing.rst                |  138 +-
 docs/dbdataclasses/sql.rst                    |    8 +-
 docs/dbdataclasses/workspace.rst              |    3 +
 docs/gen-client-docs.py                       |   32 +-
 docs/workspace/apps/apps.rst                  |   39 +-
 docs/workspace/catalog/catalogs.rst           |    4 +-
 docs/workspace/catalog/credentials.rst        |  193 ++
 docs/workspace/catalog/external_locations.rst |    1 -
 docs/workspace/catalog/index.rst              |    1 +
 docs/workspace/catalog/online_tables.rst      |   19 +-
 .../workspace/catalog/storage_credentials.rst |    1 -
 docs/workspace/catalog/tables.rst             |    5 +-
 .../cleanrooms/clean_room_assets.rst          |   94 +
 .../cleanrooms/clean_room_task_runs.rst       |   25 +
 docs/workspace/cleanrooms/clean_rooms.rst     |   95 +
 docs/workspace/cleanrooms/index.rst           |   12 +
 docs/workspace/compute/cluster_policies.rst   |    3 +-
 docs/workspace/compute/clusters.rst           |  105 +-
 docs/workspace/compute/instance_pools.rst     |    3 +-
 docs/workspace/dashboards/genie.rst           |   19 +
 docs/workspace/dashboards/index.rst           |    4 +-
 docs/workspace/dashboards/lakeview.rst        |   75 +-
 .../dashboards/lakeview_embedded.rst          |   19 +
 docs/workspace/dashboards/query_execution.rst |   46 +
 docs/workspace/files/files.rst                |   13 +-
 docs/workspace/iam/access_control.rst         |   23 +
 docs/workspace/iam/index.rst                  |    1 +
 docs/workspace/iam/permissions.rst            |    3 +-
 docs/workspace/iam/users.rst                  |    3 +-
 docs/workspace/index.rst                      |    1 +
 docs/workspace/jobs/jobs.rst                  |   77 +-
 docs/workspace/ml/experiments.rst             |    3 +-
 docs/workspace/ml/model_registry.rst          |    4 +-
 docs/workspace/pipelines/pipelines.rst        |   27 +-
 docs/workspace/provisioning/credentials.rst   |  123 +
 docs/workspace/provisioning/index.rst         |   10 +
 docs/workspace/serving/serving_endpoints.rst  |   58 +-
 ...aibi_dashboard_embedding_access_policy.rst |   64 +
 ...i_dashboard_embedding_approved_domains.rst |   65 +
 .../settings/automatic_cluster_update.rst     |   12 +-
 .../settings/compliance_security_profile.rst  |   12 +-
 docs/workspace/settings/default_namespace.rst |   12 +-
 .../settings/disable_legacy_access.rst        |   12 +-
 .../settings/disable_legacy_dbfs.rst          |   12 +-
 .../settings/enhanced_security_monitoring.rst |   12 +-
 docs/workspace/settings/index.rst             |    2 +
 .../settings/notification_destinations.rst    |    1 +
 .../settings/restrict_workspace_admins.rst    |   12 +-
 docs/workspace/settings/settings.rst          |   12 +
 docs/workspace/settings/token_management.rst  |    5 +-
 docs/workspace/sharing/index.rst              |    1 -
 docs/workspace/sharing/providers.rst          |    6 +-
 docs/workspace/sharing/recipients.rst         |   20 +-
 docs/workspace/sql/alerts.rst                 |   12 +-
 docs/workspace/sql/dashboards.rst             |    4 +-
 docs/workspace/sql/index.rst                  |    1 +
 docs/workspace/sql/queries.rst                |   12 +-
 docs/workspace/sql/query_visualizations.rst   |   12 +-
 docs/workspace/sql/redash_config.rst          |   14 +
 docs/workspace/sql/statement_execution.rst    |    9 +-
 docs/workspace/sql/warehouses.rst             |    3 +-
 docs/workspace/workspace/repos.rst            |    5 +-
 docs/workspace/workspace/workspace.rst        |    5 +-
 setup.py                                      |   85 +-
 tests/integration/test_auth.py                |   19 +-
 tests/integration/test_clusters.py            |    4 +-
 tests/integration/test_dbutils.py             |   25 +-
 tests/integration/test_jobs.py                |   25 +-
 tests/test_base_client.py                     |  224 +-
 tests/test_config.py                          |    5 +
 tests/test_core.py                            |   29 +-
 tests/test_credentials_provider.py            |  145 +
 tests/test_data_plane.py                      |    2 +-
 tests/test_files.py                           |  340 ++
 tests/test_jobs_mixin.py                      |  263 ++
 tests/test_model_serving_auth.py              |   73 +-
 tests/test_open_ai_mixin.py                   |   21 +
 tests/test_user_agent.py                      |   44 +
 156 files changed, 24634 insertions(+), 3804 deletions(-)
 delete mode 100644 .codegen/__init__.py.tmpl
 delete mode 100644 .codegen/error_mapping.py.tmpl
 delete mode 100644 .codegen/error_overrides.py.tmpl
 delete mode 100644 .codegen/example.py.tmpl
 delete mode 100644 .codegen/lib.tmpl
 delete mode 100644 .codegen/service.py.tmpl
 create mode 100644 .github/workflows/external-message.yml
 create mode 100644 .github/workflows/integration-tests.yml
 create mode 100644 databricks/sdk/mixins/jobs.py
 create mode 100755 databricks/sdk/service/cleanrooms.py
 create mode 100644 docs/account/billing/budget_policy.rst
 create mode 100644 docs/account/oauth2/federation_policy.rst
 create mode 100644 docs/account/oauth2/service_principal_federation_policy.rst
 create mode 100644 docs/account/settings/enable_ip_access_lists.rst
 create mode 100644 docs/dbdataclasses/cleanrooms.rst
 create mode 100644 docs/workspace/catalog/credentials.rst
 create mode 100644 docs/workspace/cleanrooms/clean_room_assets.rst
 create mode 100644 docs/workspace/cleanrooms/clean_room_task_runs.rst
 create mode 100644 docs/workspace/cleanrooms/clean_rooms.rst
 create mode 100644 docs/workspace/cleanrooms/index.rst
 create mode 100644 docs/workspace/dashboards/lakeview_embedded.rst
 create mode 100644 docs/workspace/dashboards/query_execution.rst
 create mode 100644 docs/workspace/iam/access_control.rst
 create mode 100644 docs/workspace/provisioning/credentials.rst
 create mode 100644 docs/workspace/provisioning/index.rst
 create mode 100644 docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst
 create mode 100644 docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst
 create mode 100644 docs/workspace/sql/redash_config.rst
 create mode 100644 tests/test_credentials_provider.py
 create mode 100644 tests/test_files.py
 create mode 100644 tests/test_jobs_mixin.py

diff --git a/.codegen.json b/.codegen.json
index a1886bd80..3a880d1a9 100644
--- a/.codegen.json
+++ b/.codegen.json
@@ -1,20 +1,6 @@
 {
-  "formatter": "yapf -pri $FILENAMES && autoflake -i $FILENAMES && isort $FILENAMES",
+  "mode": "py_v0",
   "changelog_config": ".codegen/changelog_config.yml",
-  "template_libraries": [
-    ".codegen/lib.tmpl"
-  ],
-  "packages": {
-    ".codegen/service.py.tmpl": "databricks/sdk/service/{{.Name}}.py"
-  },
-  "batch": {
-    ".codegen/__init__.py.tmpl": "databricks/sdk/__init__.py",
-    ".codegen/error_mapping.py.tmpl": "databricks/sdk/errors/platform.py",
-    ".codegen/error_overrides.py.tmpl": "databricks/sdk/errors/overrides.py"
-  },
-  "samples": {
-    ".codegen/example.py.tmpl": "examples/{{if .IsAccount}}account{{else}}workspace{{end}}/{{.Service.SnakeName}}/{{.Method.SnakeName}}_{{.SnakeName}}.py"
-  },
   "version": {
     "databricks/sdk/version.py": "__version__ = '$VERSION'"
   },
@@ -28,6 +14,7 @@
       "pip install '.[dev]'"
     ],
     "post_generate": [
+      "make fmt",
       "pytest -m 'not integration' --cov=databricks --cov-report html tests",
       "pip install .",
       "python docs/gen-client-docs.py"
diff --git a/.codegen/__init__.py.tmpl b/.codegen/__init__.py.tmpl
deleted file mode 100644
index d54e9dfff..000000000
--- a/.codegen/__init__.py.tmpl
+++ /dev/null
@@ -1,194 +0,0 @@
-import databricks.sdk.core as client
-import databricks.sdk.dbutils as dbutils
-from databricks.sdk.credentials_provider import CredentialsStrategy
-
-from databricks.sdk.mixins.files import DbfsExt
-from databricks.sdk.mixins.compute import ClustersExt
-from databricks.sdk.mixins.workspace import WorkspaceExt
-from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt
-{{- range .Services}}
-from databricks.sdk.service.{{.Package.Name}} import {{.PascalName}}API{{end}}
-from databricks.sdk.service.provisioning import Workspace
-from databricks.sdk import azure
-from typing import Optional
-
-{{$args := list "host" "account_id" "username" "password" "client_id" "client_secret"
-  "token" "profile" "config_file" "azure_workspace_resource_id" "azure_client_secret"
-  "azure_client_id" "azure_tenant_id" "azure_environment" "auth_type" "cluster_id"
-  "google_credentials" "google_service_account" }}
-
-{{- define "api" -}}
-  {{- $mixins := dict "ClustersAPI" "ClustersExt" "DbfsAPI" "DbfsExt" "WorkspaceAPI" "WorkspaceExt" "ServingEndpointsAPI" "ServingEndpointsExt" -}}
-  {{- $genApi := concat .PascalName "API" -}}
-  {{- getOrDefault $mixins $genApi $genApi -}}
-{{- end -}}
-
-def _make_dbutils(config: client.Config):
-    # We try to directly check if we are in runtime, instead of
-    # trying to import from databricks.sdk.runtime. This is to prevent
-    # remote dbutils from being created without the config, which is both
-    # expensive (will need to check all credential providers) and can
-    # throw errors (when no env vars are set).
-    try:
-        from dbruntime import UserNamespaceInitializer
-    except ImportError:
-        return dbutils.RemoteDbUtils(config)
-
-    # We are in runtime, so we can use the runtime dbutils
-    from databricks.sdk.runtime import dbutils as runtime_dbutils
-    return runtime_dbutils
-
-
-class WorkspaceClient:
-    """
-    The WorkspaceClient is a client for the workspace-level Databricks REST API.
-    """
-    def __init__(self, *{{range $args}}, {{.}}: Optional[str] = None{{end}},
-                 debug_truncate_bytes: Optional[int] = None,
-                 debug_headers: Optional[bool] = None,
-                 product="unknown",
-                 product_version="0.0.0",
-                 credentials_strategy: Optional[CredentialsStrategy] = None,
-                 credentials_provider: Optional[CredentialsStrategy] = None,
-                 config: Optional[client.Config] = None):
-        if not config:
-          config = client.Config({{range $args}}{{.}}={{.}}, {{end}}
-            credentials_strategy=credentials_strategy,
-            credentials_provider=credentials_provider,
-            debug_truncate_bytes=debug_truncate_bytes,
-            debug_headers=debug_headers,
-            product=product,
-            product_version=product_version)
-        self._config = config.copy()
-        self._dbutils = _make_dbutils(self._config)
-        self._api_client = client.ApiClient(self._config)
-
-        {{- range .Services}}{{if and (not .IsAccounts) (not .HasParent) .HasDataPlaneAPI (not .IsDataPlane)}}
-        {{.SnakeName}} = {{template "api" .}}(self._api_client){{end -}}{{end}}
-
-        {{- range .Services}}
-        {{- if and (not .IsAccounts) (not .HasParent)}}
-        {{- if .IsDataPlane}}
-        self._{{.SnakeName}} = {{template "api" .}}(self._api_client, {{.ControlPlaneService.SnakeName}})
-        {{- else if .HasDataPlaneAPI}}
-        self._{{.SnakeName}} = {{.SnakeName}}
-        {{- else}}
-        self._{{.SnakeName}} = {{template "api" .}}(self._api_client)
-        {{- end -}}
-        {{- end -}}
-        {{end}}
-
-    @property
-    def config(self) -> client.Config:
-        return self._config
-
-    @property
-    def api_client(self) -> client.ApiClient:
-        return self._api_client
-
-    @property
-    def dbutils(self) -> dbutils.RemoteDbUtils:
-        return self._dbutils
-
-    {{- range .Services}}{{if and (not .IsAccounts) (not .HasParent)}}
-    @property
-    def {{.SnakeName}}(self) -> {{template "api" .}}:
-        {{if .Description}}"""{{.Summary}}"""{{end}}
-        return self._{{.SnakeName}}
-    {{end -}}{{end}}
-
-    def get_workspace_id(self) -> int:
-        """Get the workspace ID of the workspace that this client is connected to."""
-        response = self._api_client.do("GET",
-                                       "/api/2.0/preview/scim/v2/Me",
-                                       response_headers=['X-Databricks-Org-Id'])
-        return int(response["X-Databricks-Org-Id"])
-
-    def __repr__(self):
-        return f"WorkspaceClient(host='{self._config.host}', auth_type='{self._config.auth_type}', ...)"
-
-class AccountClient:
-    """
-    The AccountClient is a client for the account-level Databricks REST API.
-    """
-
-    def __init__(self, *{{range $args}}, {{.}}: Optional[str] = None{{end}},
-                 debug_truncate_bytes: Optional[int] = None,
-                 debug_headers: Optional[bool] = None,
-                 product="unknown",
-                 product_version="0.0.0",
-                 credentials_strategy: Optional[CredentialsStrategy] = None,
-                 credentials_provider: Optional[CredentialsStrategy] = None,
-                 config: Optional[client.Config] = None):
-        if not config:
-          config = client.Config({{range $args}}{{.}}={{.}}, {{end}}
-            credentials_strategy=credentials_strategy,
-            credentials_provider=credentials_provider,
-            debug_truncate_bytes=debug_truncate_bytes,
-            debug_headers=debug_headers,
-            product=product,
-            product_version=product_version)
-        self._config = config.copy()
-        self._api_client = client.ApiClient(self._config)
-
-        {{- range .Services}}{{if and .IsAccounts (not .HasParent) .HasDataPlaneAPI (not .IsDataPlane)}}
-        {{(.TrimPrefix "account").SnakeName}} = {{template "api" .}}(self._api_client){{end -}}{{end}}
-
-        {{- range .Services}}
-        {{- if and .IsAccounts (not .HasParent)}}
-        {{- if .IsDataPlane}}
-        self._{{(.TrimPrefix "account").SnakeName}} = {{template "api" .}}(self._api_client, {{.ControlPlaneService.SnakeName}})
-        {{- else if .HasDataPlaneAPI}}
-        self._{{(.TrimPrefix "account").SnakeName}} = {{(.TrimPrefix "account").SnakeName}}
-        {{- else}}
-        self._{{(.TrimPrefix "account").SnakeName}} = {{template "api" .}}(self._api_client)
-        {{- end -}}
-        {{- end -}}
-        {{end}}
-
-    @property
-    def config(self) -> client.Config:
-        return self._config
-
-    @property
-    def api_client(self) -> client.ApiClient:
-        return self._api_client
-
-    {{- range .Services}}{{if and .IsAccounts (not .HasParent)}}
-    @property
-    def {{(.TrimPrefix "account").SnakeName}}(self) -> {{template "api" .}}:{{if .Description}}
-        """{{.Summary}}"""{{end}}
-        return self._{{(.TrimPrefix "account").SnakeName}}
-    {{end -}}{{end}}
-
-    def get_workspace_client(self, workspace: Workspace) -> WorkspaceClient:
-        """Constructs a ``WorkspaceClient`` for the given workspace.
-
-        Returns a ``WorkspaceClient`` that is configured to use the same
-        credentials as this ``AccountClient``. The underlying config is
-        copied from this ``AccountClient``, but the ``host`` and
-        ``azure_workspace_resource_id`` are overridden to match the
-        given workspace, and the ``account_id`` field is cleared.
-
-        Usage:
-
-        .. code-block::
-
-            wss = list(a.workspaces.list())
-            if len(wss) == 0:
-                pytest.skip("no workspaces")
-            w = a.get_workspace_client(wss[0])
-            assert w.current_user.me().active
-
-        :param workspace: The workspace to construct a client for.
-        :return: A ``WorkspaceClient`` for the given workspace.
-        """
-        config = self._config.deep_copy()
-        config.host = config.environment.deployment_url(workspace.deployment_name)
-        config.azure_workspace_resource_id = azure.get_azure_resource_id(workspace)
-        config.account_id = None
-        config.init_auth()
-        return WorkspaceClient(config=config)
-
-    def __repr__(self):
-        return f"AccountClient(account_id='{self._config.account_id}', auth_type='{self._config.auth_type}', ...)"
diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 2d9cb6d86..2a9a021e0 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-cf9c61453990df0f9453670f2fe68e1b128647a2
\ No newline at end of file
+99f644e72261ef5ecf8d74db20f4b7a1e09723cc
diff --git a/.codegen/error_mapping.py.tmpl b/.codegen/error_mapping.py.tmpl
deleted file mode 100644
index b3cc8cea6..000000000
--- a/.codegen/error_mapping.py.tmpl
+++ /dev/null
@@ -1,20 +0,0 @@
-# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-from .base import DatabricksError
-
-{{range .ExceptionTypes}}
-class {{.PascalName}}({{if .Inherit -}}
-    {{.Inherit.PascalName}}
-  {{- else -}}
-    DatabricksError
-  {{- end -}}):
-  """{{.Comment "    " 100 | trimSuffix "\"" }}"""
-{{end}}
-
-STATUS_CODE_MAPPING = { {{range .ErrorStatusCodeMapping}}
-   {{.StatusCode}}: {{.PascalName}},{{- end}}
-}
-
-ERROR_CODE_MAPPING = { {{range .ErrorCodeMapping}}
-    '{{.ErrorCode}}': {{.PascalName}},{{- end}}
-}
diff --git a/.codegen/error_overrides.py.tmpl b/.codegen/error_overrides.py.tmpl
deleted file mode 100644
index adcfea555..000000000
--- a/.codegen/error_overrides.py.tmpl
+++ /dev/null
@@ -1,20 +0,0 @@
-# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-from .base import _ErrorOverride
-from .platform import *
-import re
-
-
-_ALL_OVERRIDES = [
-    {{ range .ErrorOverrides -}}
-    _ErrorOverride(
-        debug_name="{{.Name}}",
-        path_regex=re.compile(r'{{.PathRegex}}'),
-        verb="{{.Verb}}",
-        status_code_matcher=re.compile(r'{{replaceAll "'" "\\'" .StatusCodeMatcher}}'),
-        error_code_matcher=re.compile(r'{{replaceAll "'" "\\'" .ErrorCodeMatcher}}'),
-        message_matcher=re.compile(r'{{replaceAll "'" "\\'" .MessageMatcher}}'),
-        custom_error={{.OverrideErrorCode.PascalName}},
-    ),
-{{- end }}
-]
diff --git a/.codegen/example.py.tmpl b/.codegen/example.py.tmpl
deleted file mode 100644
index dba71d9bf..000000000
--- a/.codegen/example.py.tmpl
+++ /dev/null
@@ -1,112 +0,0 @@
-from databricks.sdk import {{if .IsAccount}}AccountClient{{else}}WorkspaceClient{{end}}
-from databricks.sdk.service import _internal{{range .Suite.ServiceToPackage}}, {{.}}{{end}}
-import time, base64, os
-
-{{$example := .}}
-{{if .IsAccount}}a{{else}}w{{end}} = {{if .IsAccount}}Account{{else}}Workspace{{end}}Client()
-
-{{range .Init}}
-{{.SnakeName}} = {{template "expr" .Value}}
-{{end}}
-
-{{range .Calls}}
-{{if .Service -}}
-  {{template "svc-call" .}}
-{{- else -}}
-  {{with .Assign}}{{.SnakeName}} = {{end}}{{template "expr" .}}
-{{- end}}
-{{end}}
-
-{{with .Cleanup}}
-# cleanup
-{{range . -}}
-  {{template "svc-call" .}}
-{{end}}
-{{end}}
-
-{{define "svc-call" -}}
-  {{with .Assign}}{{.SnakeName}} = {{end}}{{if .IsAccount}}a{{else}}w{{end}}.{{.Service.SnakeName}}.{{.Original.SnakeName}}{{if eq .Original.SnakeName "import"}}_{{end}}({{template "method-args" .}})
-  {{- if .IsWait}}.result(){{end}}
-{{- end}}
-
-{{define "expr" -}}
-{{- if eq .Type "binary" -}}
-    {{template "expr" .Left}} {{.Op}} {{template "expr" .Right}}
-{{- else if eq .Type "index" -}}
-    {{template "expr" .Left}}[{{template "expr" .Right}}]
-{{- else if eq .Type "boolean" -}}
-    {{if .Value}}True{{else}}False{{end}}
-{{- else if eq .Type "heredoc" -}}
-"""{{.Value}}"""
-{{- else if eq .Type "literal" -}}
-    {{.Value}}
-{{- else if eq .Type "lookup" -}}
-    {{template "expr" .X}}.{{.Field.SnakeName}}
-{{- else if eq .Type "enum" -}}
-    {{.Package}}.{{.Entity.PascalName}}.{{.ConstantName}}
-{{- else if eq .Type "variable" -}}
-    {{if eq .SnakeName "true"}}True
-    {{- else if eq .SnakeName "false"}}False
-    {{else}}{{.SnakeName}}{{end}}
-{{- else if eq .Type "entity" -}}
-    {{.Package}}.{{.PascalName}}({{template "kwargs" .FieldValues}})
-{{- else if eq .Type "call" -}}
-    {{template "call" .}}
-{{- else if eq .Type "map" -}}
-    { {{range .Pairs}}{{template "expr" .Key}}: {{template "expr" .Value}},{{end}} }
-{{- else if eq .Type "array" -}}
-    [ {{range $i, $x := .Values}}{{if $i}}, {{end}}{{template "expr" .}}{{end}} ]
-{{- else -}}
-    /* UNKNOWN: {{.Type}} */
-{{- end -}}
-{{- end}}
-
-{{define "method-args" -}}
-  {{with .Request -}}
-    {{template "kwargs" .}}
-  {{- else -}}
-    {{template "args" .}}
-  {{- end}}
-{{- end}}
-
-{{define "kwargs" -}}
-  {{range $i, $x := . -}}
-    {{if $i}}, {{end}}{{.SnakeName}}={{template "expr" .Value}}
-  {{- end}}
-{{- end}}
-
-{{define "args" -}}
-  {{range $i, $x := .Args -}}
-    {{if $i}}, {{end}}{{template "expr" .}}
-  {{- end}}
-{{- end}}
-
-{{define "call" -}}
-{{- if eq .PascalName "GetEnvOrSkipTest" -}}
-os.environ[{{template "args" .}}]
-{{- else if eq .PascalName "Dir" -}}
-os.path.dirname({{template "args" .}})
-{{- else if eq .PascalName "Sprintf" -}}
-{{range $i, $x := .Args}}{{if eq $i 0}}{{template "expr" .}} % ({{else}} {{if gt $i 1}}, {{end}}  {{template "expr" .}} {{end}}{{end}})
-{{- else if eq .PascalName "MustParseInt64" -}}
-{{template "args" .}}
-{{- else if eq .PascalName "RandomEmail" -}}
-f'sdk-{time.time_ns()}@example.com'
-{{- else if eq .PascalName "RandomName" -}}
-f'sdk-{time.time_ns()}'
-{{- else if eq .PascalName "RandomHex" -}}
-hex(time.time_ns())[2:]
-{{- else if eq .PascalName "EncodeToString" -}}
-base64.b64encode({{template "args" .}}.encode()).decode()
-{{- else if eq .PascalName "CanonicalHostName" -}}
-w.config.host
-{{- else if eq .PascalName "SharedRunningCluster" -}}
-w.clusters.ensure_cluster_is_running(os.environ["DATABRICKS_CLUSTER_ID"]) and os.environ["DATABRICKS_CLUSTER_ID"]
-{{- else if eq .PascalName "DltNotebook" -}}
-"CREATE LIVE TABLE dlt_sample AS SELECT 1"
-{{- else if eq .PascalName "MyNotebookPath" -}}
-f'/Users/{w.current_user.me().user_name}/sdk-{time.time_ns()}'
-{{- else -}}
-{{.SnakeName}}({{range $i, $x := .Args}}{{if $i}}, {{end}}{{template "expr" .}}{{end}})
-{{- end -}}
-{{- end}}
diff --git a/.codegen/lib.tmpl b/.codegen/lib.tmpl
deleted file mode 100644
index 50233ca08..000000000
--- a/.codegen/lib.tmpl
+++ /dev/null
@@ -1,12 +0,0 @@
-{{ define "safe-name" -}}
-  {{/* https://docs.python.org/3/reference/lexical_analysis.html#keywords */}}
-  {{- $keywords := list	"False" "await" "else" "import" "pass" "None" "break" "except" "in" "raise"
-                       	"True" "class" "finally" "is" "return" "and" "continue" "for" "lambda" "try"
-                       	"as" "def" "from" "nonlocal" "while" "assert" "del" "global" "not" "with"
-                       	"async" "elif" "if" "or" "yield" -}}
-  {{.}}{{ if in $keywords . }}_{{ end }}
-{{- end}}
-
-{{ define "safe-snake-name" -}}
-  {{ template "safe-name" .SnakeName }}
-{{- end}}
diff --git a/.codegen/service.py.tmpl b/.codegen/service.py.tmpl
deleted file mode 100644
index 4307e0913..000000000
--- a/.codegen/service.py.tmpl
+++ /dev/null
@@ -1,419 +0,0 @@
-# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
-
-from __future__ import annotations
-from dataclasses import dataclass
-from datetime import timedelta
-from enum import Enum
-from typing import Dict, List, Any, Iterator, Type, Callable, Optional, BinaryIO
-import time
-import random
-import logging
-import requests
-
-from ..data_plane import DataPlaneService
-from ..errors import OperationTimeout, OperationFailed
-from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum, Wait, _escape_multi_segment_path_parameter
-from ..oauth import Token
-
-_LOG = logging.getLogger('databricks.sdk')
-
-{{range .ImportedEntities}}
-from databricks.sdk.service import {{.Package.Name}}{{end}}
-
-# all definitions in this file are in alphabetical order
-{{range .Types}}
-{{if or .Fields .IsEmpty -}}{{if not .IsRequest}}@dataclass
-class {{.PascalName}}{{if eq "List" .PascalName}}Request{{end}}:{{if .Description}}
-    """{{.Comment "    " 100}}"""
-    {{end}}
-    {{- range .RequiredFields}}
-    {{template "safe-snake-name" .}}: {{template "type" .Entity}}{{if .Description}}
-    """{{.Comment "    " 100 | trimSuffix "\""}}"""{{end}}
-    {{end}}
-    {{- range .NonRequiredFields}}
-    {{template "safe-snake-name" .}}: Optional[{{template "type" .Entity}}] = None{{if .Description}}
-    """{{.Comment "    " 100 | trimSuffix "\""}}"""{{end}}
-    {{end}}
-    {{if or .IsEmpty .HasJsonField .HasHeaderField .HasByteStreamField -}}
-    def as_dict(self) -> dict:
-        """Serializes the {{.PascalName}}{{if eq "List" .PascalName}}Request{{end}} into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        {{range .Fields}}if self.{{template "safe-snake-name" .}}{{with .Entity.IsPrimitive}} is not None{{end}}: body['{{.Name}}'] = {{template "as_request_type" .}}
-        {{end -}}
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> {{.PascalName}}{{if eq "List" .PascalName}}Request{{end}}:
-        """Deserializes the {{.PascalName}}{{if eq "List" .PascalName}}Request{{end}} from a dictionary."""
-        return cls({{range $i, $f := .Fields}}{{if $i}}, {{end}}{{template "safe-snake-name" $f}}={{template "from_dict_type" $f}}{{end}})
-    {{end}}
-{{end}}
-{{else if .ArrayValue}}type {{.PascalName}} []{{template "type" .ArrayValue}}
-{{else if .MapValue}}{{.PascalName}} = {{template "type" .}}
-{{else if .Enum}}class {{.PascalName}}(Enum):
-    {{if .Description}}"""{{.Comment "    " 100 | trimSuffix "\"" }}"""{{end}}
-    {{range .Enum }}
-    {{.ConstantName}} = '{{.Content}}'{{end}}{{end}}
-{{end}}
-{{- define "from_dict_type" -}}
-	{{- if not .Entity }}None
-	{{- else if .Entity.ArrayValue }}
-		{{- if (or .Entity.ArrayValue.IsObject .Entity.ArrayValue.IsExternal) }}_repeated_dict(d, '{{.Name}}', {{template "type" .Entity.ArrayValue}})
-		{{- else if .Entity.ArrayValue.Enum }}_repeated_enum(d, '{{.Name}}', {{template "type" .Entity.ArrayValue}})
-		{{- else}}d.get('{{.Name}}', None){{- end -}}
-	{{- else if or .Entity.IsObject .Entity.IsExternal .Entity.IsEmpty }}_from_dict(d, '{{.Name}}', {{template "type" .Entity}})
-	{{- else if .Entity.Enum }}_enum(d, '{{.Name}}', {{template "type" .Entity}})
-	{{- else if and .IsHeader (or .Entity.IsInt64 .Entity.IsInt) }} int(d.get('{{.Name}}', None))
-	{{- else}}d.get('{{.Name}}', None){{- end -}}
-{{- end -}}
-{{- define "as_request_type" -}}
-	{{- if not .Entity }}None # ERROR: No Type
-	{{- /* This should be done recursively, but recursion in text templates is not supported. */ -}}
-	{{- else if .Entity.ArrayValue }}[{{if or .Entity.ArrayValue.IsObject .Entity.ArrayValue.IsExternal}}v.as_dict(){{ else if .Entity.ArrayValue.Enum }}v.value{{else}}v{{end}} for v in self.{{template "safe-snake-name" .}}]
-	{{- else if or .Entity.IsObject .Entity.IsExternal .Entity.IsEmpty }}self.{{template "safe-snake-name" .}}.as_dict()
-	{{- else if .Entity.Enum }}self.{{template "safe-snake-name" .}}.value
-	{{- else}}self.{{template "safe-snake-name" .}}{{- end -}}
-{{- end -}}
-{{- define "type" -}}
-	{{- if not . }}any # ERROR: No Type
-	{{- else if .IsExternal }}{{.Package.Name}}.{{.PascalName}}
-	{{- else if .ArrayValue }}List[{{template "type" .ArrayValue}}]
-	{{- else if .MapValue }}Dict[str,{{template "type" .MapValue}}]
-	{{- else if .IsObject }}{{.PascalName}}{{if eq "List" .PascalName}}Request{{end}}
-	{{- else if .Enum }}{{.PascalName}}
-	{{- else if .IsString}}str
-	{{- else if .IsAny}}Any
-	{{- else if .IsEmpty}}{{.PascalName}}
-	{{- else if .IsBool}}bool
-	{{- else if .IsInt64}}int
-	{{- else if .IsFloat64}}float
-	{{- else if .IsInt}}int
-	{{- else if .IsByteStream}}BinaryIO
-	{{- else}}any /* MISSING TYPE */
-	{{- end -}}
-{{- end -}}
-
-{{- define "type-doc" -}}
-	{{- if .IsExternal }}:class:`{{.PascalName}}`
-	{{- else if .IsEmpty}}:class:`{{template "type" .}}`
-	{{- else if .ArrayValue }}List[{{template "type-doc" .ArrayValue}}]
-	{{- else if .MapValue }}Dict[str,{{template "type-doc" .MapValue}}]
-	{{- else if .IsObject }}:class:`{{.PascalName}}{{if eq "List" .PascalName}}Request{{end}}`
-	{{- else if .Enum }}:class:`{{.PascalName}}`
-	{{- else}}{{template "type" . }}
-	{{- end -}}
-{{- end -}}
-
-{{range .Services}}
-class {{.PascalName}}API:{{if .Description}}
-    """{{.Comment "    " 110}}"""
-    {{end}}
-    def __init__(self, api_client{{if .IsDataPlane}}, control_plane{{end}}):
-        self._api = api_client
-        {{if .IsDataPlane -}}
-        self._control_plane = control_plane
-        self._data_plane_service = DataPlaneService()
-        {{end -}}
-        {{range .Subservices}}
-        self._{{.SnakeName}} = {{.PascalName}}API(self._api){{end}}
-
-    {{range .Subservices}}
-    @property
-    def {{.SnakeName}}(self) -> {{.PascalName}}API:
-        {{if .Description}}"""{{.Summary}}"""{{end}}
-        return self._{{.SnakeName}}
-    {{end}}
-
-    {{range .Waits}}
-    def {{template "safe-snake-name" .}}(self{{range .Binding}}, {{template "safe-snake-name" .PollField}}: {{template "type" .PollField.Entity}}{{end}},
-      timeout=timedelta(minutes={{.Timeout}}), callback: Optional[Callable[[{{.Poll.Response.PascalName}}], None]] = None) -> {{.Poll.Response.PascalName}}:
-      deadline = time.time() + timeout.total_seconds()
-      target_states = ({{range .Success}}{{.Entity.PascalName}}.{{.ConstantName}}, {{end}}){{if .Failure}}
-      failure_states = ({{range .Failure}}{{.Entity.PascalName}}.{{.ConstantName}}, {{end}}){{end}}
-      status_message = 'polling...'
-      attempt = 1
-      while time.time() < deadline:
-        poll = self.{{template "safe-snake-name" .Poll}}({{range $i, $b := .Binding}}{{if $i}}, {{end}}{{template "safe-snake-name" .PollField}}={{template "safe-snake-name" .PollField}}{{- end}})
-        status = poll{{range .StatusPath}}.{{template "safe-snake-name" .}}{{end}}
-        {{if .ComplexMessagePath -}}
-        status_message = f'current status: {status}'
-        if poll.{{template "safe-snake-name" .MessagePathHead}}:
-          status_message = poll{{range .MessagePath}}.{{template "safe-snake-name" .}}{{end}}
-        {{- else if .MessagePath -}}
-        status_message = poll{{range .MessagePath}}.{{template "safe-snake-name" .}}{{end}}
-        {{- else -}}
-        status_message = f'current status: {status}'
-        {{- end}}
-        if status in target_states:
-          return poll
-        if callback:
-          callback(poll)
-        {{if .Failure -}}
-        if status in failure_states:
-          msg = f'failed to reach {{range $i, $e := .Success}}{{if $i}} or {{end}}{{$e.Content}}{{end}}, got {status}: {status_message}'
-          raise OperationFailed(msg)
-        {{end}}prefix = f"{{range $i, $b := .Binding}}{{if $i}}, {{end -}}
-           {{template "safe-snake-name" .PollField}}={{"{"}}{{template "safe-snake-name" .PollField}}{{"}"}}
-        {{- end}}"
-        sleep = attempt
-        if sleep > 10:
-          # sleep 10s max per attempt
-          sleep = 10
-        _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
-        time.sleep(sleep + random.random())
-        attempt += 1
-      raise TimeoutError(f'timed out after {timeout}: {status_message}')
-    {{end}}
-
-    {{range .Methods}}
-    def {{template "safe-snake-name" .}}({{ template "method-parameters" . }}){{template "method-return-type" .}}:
-        {{if .Description}}"""{{.Comment "        " 110 | trimSuffix "\"" }}
-        {{with .Request}}{{range .RequiredFields}}
-        :param {{template "safe-snake-name" .}}: {{template "type-doc" .Entity}}{{if .Description}}
-          {{.Comment "          " 110 | trimSuffix "\"" }}{{end}}
-        {{- end}}{{range .NonRequiredFields}}
-        :param {{template "safe-snake-name" .}}: {{template "type-doc" .Entity}} (optional){{if .Description}}
-          {{.Comment "          " 110 | trimSuffix "\"" }}{{end}}
-        {{- end}}
-        {{end}}
-        {{if and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) -}}
-        :returns:
-          Long-running operation waiter for {{template "type-doc" .Wait.Poll.Response}}.
-          See :method:{{template "safe-snake-name" .Wait}} for more details.
-        {{- else if not .Response.IsEmpty }}:returns: {{if .Response.ArrayValue -}}
-          Iterator over {{template "type-doc" .Response.ArrayValue}}
-        {{- else if .Pagination -}}
-          Iterator over {{template "type-doc" .Pagination.Entity}}
-        {{- else -}}
-          {{template "type-doc" .Response}}
-        {{- end}}{{end}}
-        """{{end}}
-        {{if .Request -}}
-        {{template "method-serialize" .}}
-        {{- end}}
-        {{- if .Service.IsDataPlane}}
-        {{template "data-plane" .}}
-        {{- end}}
-        {{template "method-headers" . }}
-        {{if .Response.HasHeaderField -}}
-        {{template "method-response-headers" . }}
-        {{- end}}
-        {{template "method-call" .}}
-
-    {{if and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) }}
-    def {{.SnakeName}}_and_wait({{ template "method-parameters" . }},
-        timeout=timedelta(minutes={{.Wait.Timeout}})) -> {{.Wait.Poll.Response.PascalName}}:
-        return self.{{template "safe-snake-name" .}}({{range $i, $x := .Request.Fields}}{{if $i}}, {{end}}{{template "safe-snake-name" .}}={{template "safe-snake-name" .}}{{end}}).result(timeout=timeout)
-    {{end}}
-    {{end -}}
-{{- end}}
-
-{{define "data-plane" -}}
-        def info_getter():
-            response = self._control_plane.{{.Service.DataPlaneInfoMethod.SnakeName}}(
-                {{- range .Service.DataPlaneInfoMethod.Request.Fields }}
-                {{.SnakeName}} = {{.SnakeName}},
-                {{- end}}
-                )
-            if response.{{(index .DataPlaneInfoFields 0).SnakeName}} is None:
-            	raise Exception("Resource does not support direct Data Plane access")
-            return response{{range .DataPlaneInfoFields}}.{{.SnakeName}}{{end}}
-
-        get_params = [{{- range .Service.DataPlaneInfoMethod.Request.Fields }}{{.SnakeName}},{{end}}]
-        data_plane_details = self._data_plane_service.get_data_plane_details('{{.SnakeName}}', get_params, info_getter, self._api.get_oauth_token)
-        token = data_plane_details.token
-
-        def auth(r: requests.PreparedRequest) -> requests.PreparedRequest:
-            authorization = f"{token.token_type} {token.access_token}"
-            r.headers["Authorization"] = authorization
-            return r
-{{- end}}
-
-{{define "method-parameters" -}}
-  self{{if .Request}}
-       {{- if .Request.MapValue }}, contents: {{template "type" .Request }}{{ end }}
-       {{range .Request.RequiredFields}}, {{template "safe-snake-name" .}}: {{template "type" .Entity}}{{end}}
-       {{if .Request.NonRequiredFields}}, *
-         {{range .Request.NonRequiredFields}}, {{template "safe-snake-name" .}}: Optional[{{template "type" .Entity}}] = None{{end}}
-       {{- end}}
-     {{- end}}
-{{- end}}
-
-{{define "method-serialize" -}}
-        {{if or .Request.HasJsonField .Request.HasQueryField -}}
-        {{if .Request.HasJsonField}}body = {}{{end}}{{if .Request.HasQueryField}}
-        query = {}{{end}}
-        {{- range .Request.Fields}}{{ if and (not .IsPath) (not .IsHeader) }}
-        {{- if .IsQuery }}
-        if {{template "safe-snake-name" .}} is not None: query['{{.Name}}'] = {{template "method-param-bind" .}}{{end}}
-        {{- if .IsJson }}
-        if {{template "safe-snake-name" .}} is not None: body['{{.Name}}'] = {{template "method-param-bind" .}}{{end}}
-        {{- end}}
-        {{- end}}
-        {{- end}}
-{{- end}}
-
-{{ define "method-headers" -}}
-    headers = {
-      {{- range $k, $v := .FixedRequestHeaders}}'{{ $k }}': '{{ $v }}',{{ end -}}
-    }
-{{- end }}
-
-{{ define "method-response-headers" -}}
-    response_headers = [
-      {{- range $h := .ResponseHeaders}}'{{ $h.Name }}',{{ end -}}
-    ]
-{{- end }}
-
-{{- define "method-param-bind" -}}
-      {{- if not .Entity }}None # ERROR: No Type
-      {{- else if .Entity.ArrayValue }}[
-        {{- if or .Entity.ArrayValue.IsObject .Entity.ArrayValue.IsExternal -}}v.as_dict()
-        {{- else if .Entity.ArrayValue.Enum -}}v.value
-        {{- else}}v{{end}} for v in {{template "safe-snake-name" .}}]
-      {{- else if .Entity.IsObject }}{{template "safe-snake-name" .}}.as_dict()
-      {{- else if .Entity.Enum }}{{template "safe-snake-name" .}}.value
-      {{- else}}{{template "safe-snake-name" .}}{{- end -}}
-{{- end -}}
-
-{{define "method-call" -}}
-        {{if .Pagination -}}{{template "method-call-paginated" .}}
-        {{- else if and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) -}}{{template "method-call-retried" .}}
-        {{- else}}{{template "method-call-default" .}}{{end}}
-{{- end}}
-
-{{define "method-call-retried" -}}
-        {{if .Response}}op_response = {{end}}{{template "method-do" .}}
-        return Wait(self.{{template "safe-snake-name" .Wait}}
-          {{if .Response}}, response = {{.Response.PascalName}}.from_dict(op_response){{end}}
-          {{range .Wait.Binding}}, {{template "safe-snake-name" .PollField}}={{if .IsResponseBind}}op_response['{{.Bind.Name}}']{{else}}{{template "safe-snake-name" .Bind}}{{end}}
-        {{- end}})
-{{- end}}
-
-{{define "method-call-paginated" -}}
-        {{if .Pagination.MultiRequest}}
-        {{if .NeedsOffsetDedupe -}}
-        # deduplicate items that may have been added during iteration
-        seen = set()
-        {{- end}}{{if and .Pagination.Offset (not (eq .Path "/api/2.1/clusters/events")) }}
-        query['{{.Pagination.Offset.Name}}'] =
-        {{- if eq .Pagination.Increment 1 -}}
-          1
-        {{- else if contains .Path "/scim/v2/" -}}
-          1
-        {{- else -}}
-          0
-        {{- end}}{{end}}{{if and .Pagination.Limit (contains .Path "/scim/v2/")}}
-        if "{{.Pagination.Limit.Name}}" not in query: query['{{.Pagination.Limit.Name}}'] = 100
-        {{- end}}
-        while True:
-          json = {{template "method-do" .}}
-          if '{{.Pagination.Results.Name}}' in json:
-            for v in json['{{.Pagination.Results.Name}}']:
-              {{if .NeedsOffsetDedupe -}}
-              i = v['{{.IdentifierField.Name}}']
-              if i in seen:
-                continue
-              seen.add(i)
-              {{end -}}
-              yield {{.Pagination.Entity.PascalName}}.from_dict(v)
-          {{ if .Pagination.Token -}}
-          if '{{.Pagination.Token.Bind.Name}}' not in json or not json['{{.Pagination.Token.Bind.Name}}']:
-            return
-          {{if or (eq "GET" .Verb) (eq "HEAD" .Verb)}}query{{else}}body{{end}}['{{.Pagination.Token.PollField.Name}}'] = json['{{.Pagination.Token.Bind.Name}}']
-          {{- else if eq .Path "/api/2.1/clusters/events" -}}
-          if 'next_page' not in json or not json['next_page']:
-            return
-          body = json['next_page']
-          {{- else -}}
-          if '{{.Pagination.Results.Name}}' not in json or not json['{{.Pagination.Results.Name}}']:
-            return
-          {{ if eq .Pagination.Increment 1 -}}
-          query['{{.Pagination.Offset.Name}}'] += 1
-          {{- else -}}
-          query['{{.Pagination.Offset.Name}}'] += len(json['{{.Pagination.Results.Name}}'])
-          {{- end}}
-          {{- end}}
-        {{else -}}
-        json = {{template "method-do" .}}
-        parsed = {{.Response.PascalName}}.from_dict(json).{{template "safe-snake-name" .Pagination.Results}}
-        return parsed if parsed is not None else []
-        {{end}}
-{{- end}}
-
-{{define "method-call-default" -}}
-        {{if not .Response.IsEmpty -}}
-        res = {{end}}{{template "method-do" .}}
-        {{if not .Response.IsEmpty -}}
-          {{- if .Response.ArrayValue -}}
-            return [{{.Response.ArrayValue.PascalName}}.from_dict(v) for v in res]
-          {{- else if .Response.MapValue -}}
-            return res
-          {{- else -}}
-            return {{template "type" .Response}}.from_dict(res)
-          {{- end}}
-        {{- end}}
-{{- end}}
-
-{{define "method-do" -}}
-    self._api.do('{{.Verb}}',
-    {{- if .Service.IsDataPlane -}}
-    url=data_plane_details.endpoint_url
-    {{- else -}}
-    {{ template "path" . }}
-    {{- end -}}
-    {{if .Request}}
-        {{- if .Request.HasQueryField}}, query=query{{end}}
-        {{- if .Request.MapValue}}, body=contents
-        {{- else if .Request.HasJsonField}}, body=body{{end}}
-    {{end}}
-    , headers=headers
-    {{if .Response.HasHeaderField -}}
-    , response_headers=response_headers
-    {{- end}}
-    {{- if and .IsRequestByteStream .RequestBodyField }}, data={{template "safe-snake-name" .RequestBodyField}}{{ end }}
-    {{- if .Service.IsDataPlane -}}
-    ,auth=auth
-    {{- end -}}
-    {{- if .IsResponseByteStream }}, raw=True{{ end }})
-{{- end}}
-
-{{- define "path" -}}
-{{- if .PathParts -}}
-  f'{{range  .PathParts -}}
-    {{- .Prefix -}}
-    {{- if .Field -}}
-      {{- "{" -}}
-      {{- if .Field.IsPathMultiSegment -}}_escape_multi_segment_path_parameter({{ template "path-parameter" . }})
-      {{- else -}}{{ template "path-parameter" . }}
-      {{- end -}}
-      {{- "}" -}}
-    {{- else if .IsAccountId}}
-      {{- "{" -}}
-      self._api.account_id
-      {{- "}" -}}
-    {{- end -}}
-  {{- end }}'
-{{- else -}}
-  '{{.Path}}'
-{{- end -}}
-{{- end -}}
-
-{{- define "path-parameter" -}}
-  {{template "safe-snake-name" .Field}}{{with .Field.Entity.Enum}}.value{{end}}
-{{- end -}}
-
-{{define "method-return-type" -}}
-  {{if and .Wait (and (not .IsCrudRead) (not (eq .SnakeName "get_run"))) }} -> Wait[{{.Wait.Poll.Response.PascalName}}]
-  {{- else if not .Response.IsEmpty }} -> {{if .Response.ArrayValue -}}
-    Iterator[{{template "type" .Response.ArrayValue}}]
-  {{- else if .Pagination -}}
-    Iterator[{{template "type" .Pagination.Entity}}]
-  {{- else -}}
-    {{- if .Response.IsExternal -}}
-      {{.Response.Package.Name}}.{{.Response.PascalName}}
-    {{- else -}}
-      {{.Response.PascalName}}
-    {{- end -}}
-  {{- end}}{{end}}
-{{- end}}
diff --git a/.gitattributes b/.gitattributes
index c8e5b2f0b..a0bfc0940 100755
--- a/.gitattributes
+++ b/.gitattributes
@@ -4,6 +4,7 @@ databricks/sdk/errors/platform.py linguist-generated=true
 databricks/sdk/service/apps.py linguist-generated=true
 databricks/sdk/service/billing.py linguist-generated=true
 databricks/sdk/service/catalog.py linguist-generated=true
+databricks/sdk/service/cleanrooms.py linguist-generated=true
 databricks/sdk/service/compute.py linguist-generated=true
 databricks/sdk/service/dashboards.py linguist-generated=true
 databricks/sdk/service/files.py linguist-generated=true
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index e2d7ab0db..91e519ede 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -1,12 +1,28 @@
-## Changes
-
+## What changes are proposed in this pull request?
 
-## Tests
-
+Provide the readers and reviewers with the information they need to understand
+this PR in a comprehensive manner. 
 
-- [ ] `make test` run locally
-- [ ] `make fmt` applied
-- [ ] relevant integration tests applied
+Specifically, try to answer the two following questions:
 
+- **WHAT** changes are being made in the PR? This should be a summary of the 
+  major changes to allow the reader to quickly understand the PR without having
+  to look at the code. 
+- **WHY** are these changes needed? This should provide the context that the 
+  reader might be missing. For example, were there any decisions behind the 
+  change that are not reflected in the code itself? 
+
+The “why part” is the most important of the two as it usually cannot be 
+inferred from the code itself. A well-written PR description will help future
+developers (including your future self) to know how to interact and update your
+code.
+
+## How is this tested?
+
+Describe any tests you have done; especially if test tests are not part of
+the unit tests (e.g. local tests).
+
+**ALWAYS ANSWER THIS QUESTION:** Answer with "N/A" if tests are not applicable
+to your PR (e.g. if the PR only modifies comments). Do not be afraid of 
+answering "Not tested" if the PR has not been tested. Being clear about what 
+has been done and not done provides important context to the reviewers. 
\ No newline at end of file
diff --git a/.github/workflows/external-message.yml b/.github/workflows/external-message.yml
new file mode 100644
index 000000000..6771057c7
--- /dev/null
+++ b/.github/workflows/external-message.yml
@@ -0,0 +1,59 @@
+name: PR Comment
+
+# WARNING:
+# THIS WORKFLOW ALWAYS RUNS FOR EXTERNAL CONTRIBUTORS WITHOUT ANY APPROVAL.
+# THIS WORKFLOW RUNS FROM MAIN BRANCH, NOT FROM THE PR BRANCH.
+# DO NOT PULL THE PR OR EXECUTE ANY CODE FROM THE PR.
+
+on:
+  pull_request_target:
+    types: [opened, reopened, synchronize]
+    branches:
+      - main
+
+jobs:
+  comment-on-pr:
+    runs-on:
+      group: databricks-deco-testing-runner-group
+      labels: ubuntu-latest-deco
+
+    permissions:
+      pull-requests: write
+
+    steps:
+      - uses: actions/checkout@v4
+
+      - name: Delete old comments
+        env:
+           GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+        run: |
+            # Delete previous comment if it exists
+            previous_comment_ids=$(gh api "repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments" \
+              --jq '.[] | select(.body | startswith("")) | .id')
+            echo "Previous comment IDs: $previous_comment_ids"
+            # Iterate over each comment ID and delete the comment
+            if [ ! -z "$previous_comment_ids" ]; then
+              echo "$previous_comment_ids" | while read -r comment_id; do
+                echo "Deleting comment with ID: $comment_id"
+                gh api "repos/${{ github.repository }}/issues/comments/$comment_id" -X DELETE
+              done
+            fi
+
+      - name: Comment on PR
+        env:
+          GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+          COMMIT_SHA: ${{ github.event.pull_request.head.sha }}
+        run: |
+          gh pr comment ${{ github.event.pull_request.number }} --body \
+          "
+          If integration tests don't run automatically, an authorized user can run them manually by following the instructions below:
+
+          Trigger:
+          [go/deco-tests-run/sdk-py](https://go/deco-tests-run/sdk-py)
+
+          Inputs:
+          * PR number: ${{github.event.pull_request.number}}
+          * Commit SHA: \`${{ env.COMMIT_SHA }}\`
+
+          Checks will be approved automatically on success.
+          "
diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml
new file mode 100644
index 000000000..c308cc03c
--- /dev/null
+++ b/.github/workflows/integration-tests.yml
@@ -0,0 +1,90 @@
+name: Integration Tests
+
+on:
+
+  pull_request:
+    types: [opened, synchronize]
+
+  merge_group:
+
+
+jobs:
+  check-token:
+    name: Check secrets access
+
+    runs-on:
+      group: databricks-deco-testing-runner-group
+      labels: ubuntu-latest-deco
+
+    environment: "test-trigger-is"
+    outputs:
+      has_token: ${{ steps.set-token-status.outputs.has_token }}
+    steps:
+      - name: Check if DECO_WORKFLOW_TRIGGER_APP_ID is set
+        id: set-token-status
+        run: |
+            if [ -z "${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}" ]; then
+              echo "DECO_WORKFLOW_TRIGGER_APP_ID is empty. User has no access to secrets."
+              echo "::set-output name=has_token::false"
+            else
+              echo "DECO_WORKFLOW_TRIGGER_APP_ID is set. User has access to secrets."
+              echo "::set-output name=has_token::true"
+            fi
+
+  trigger-tests:
+    name: Trigger Tests
+
+    runs-on:
+      group: databricks-deco-testing-runner-group
+      labels: ubuntu-latest-deco
+
+    needs: check-token
+    if: github.event_name == 'pull_request'  && needs.check-token.outputs.has_token == 'true'
+    environment: "test-trigger-is"
+
+    steps:
+    - uses: actions/checkout@v3
+
+    - name: Generate GitHub App Token
+      id: generate-token
+      uses: actions/create-github-app-token@v1
+      with:
+        app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
+        private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
+        owner: ${{ secrets.ORG_NAME }}
+        repositories: ${{secrets.REPO_NAME}}
+
+    - name: Trigger Workflow in Another Repo
+      env:
+        GH_TOKEN: ${{ steps.generate-token.outputs.token }}
+      run: |
+        gh workflow run sdk-py-isolated-pr.yml -R ${{ secrets.ORG_NAME }}/${{secrets.REPO_NAME}} \
+        --ref main \
+        -f pull_request_number=${{ github.event.pull_request.number }} \
+        -f commit_sha=${{ github.event.pull_request.head.sha }}
+
+  # Statuses and checks apply to specific commits (by hash).
+  # Enforcement of required checks is done both at the PR level and the merge queue level.
+  # In case of multiple commits in a single PR, the hash of the squashed commit
+  # will not match the one for the latest (approved) commit in the PR.
+  # We auto approve the check for the merge queue for two reasons:
+  # * Queue times out due to duration of tests.
+  # * Avoid running integration tests twice, since it was already run at the tip of the branch before squashing.
+  auto-approve:
+    if: github.event_name == 'merge_group'
+
+    runs-on:
+      group: databricks-deco-testing-runner-group
+      labels: ubuntu-latest-deco
+
+    steps:
+      - name: Mark Check
+        env:
+          GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+        shell: bash
+        run: |
+            gh api -X POST -H "Accept: application/vnd.github+json" \
+              -H "X-GitHub-Api-Version: 2022-11-28" \
+              /repos/${{ github.repository }}/statuses/${{ github.sha }} \
+              -f 'state=success' \
+              -f 'context=Integration Tests Check'
diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
index c7600ebee..a839096c0 100644
--- a/.github/workflows/push.yml
+++ b/.github/workflows/push.yml
@@ -10,26 +10,26 @@ on:
       - main
 
 jobs:
-  tests:
+  tests-ubuntu:
+    uses: ./.github/workflows/test.yml
     strategy:
       fail-fast: false
       matrix:
-        pyVersion: [ '3.7', '3.8', '3.9', '3.10', '3.11', '3.12' ]
-    runs-on: ubuntu-latest
-    steps:
-      - name: Checkout
-        uses: actions/checkout@v4
-
-      - name: Unshallow
-        run: git fetch --prune --unshallow
-
-      - uses: actions/setup-python@v5
-        with:
-          python-version: ${{ matrix.pyVersion }}
-
-      - name: Run tests
-        run: make dev install test
-
+        pyVersion: [ '3.8', '3.9', '3.10', '3.11', '3.12' ]
+    with:
+      os: ubuntu-latest
+      pyVersion: ${{ matrix.pyVersion }}
+
+  tests-windows:
+      uses: ./.github/workflows/test.yml
+      strategy:
+        fail-fast: false
+        matrix:
+          pyVersion: [ '3.9', '3.10', '3.11', '3.12' ]
+      with:
+        os: windows-latest
+        pyVersion: ${{ matrix.pyVersion }}
+          
   fmt:
     runs-on: ubuntu-latest
 
diff --git a/.github/workflows/release-test.yml b/.github/workflows/release-test.yml
index c3349b75a..0e8c4d8e0 100644
--- a/.github/workflows/release-test.yml
+++ b/.github/workflows/release-test.yml
@@ -5,10 +5,15 @@ on:
 
 jobs:
   publish:
-    runs-on: ubuntu-latest
+    runs-on:
+      group: databricks-deco-testing-runner-group
+      labels: ubuntu-latest-deco
+
     environment: release-test
+
     permissions:
       id-token: write
+
     steps:
       - uses: actions/checkout@v3
 
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 892bbc5c6..ae242c1d8 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -7,11 +7,16 @@ on:
 
 jobs:
   publish:
-    runs-on: ubuntu-latest
+    runs-on:
+      group: databricks-deco-testing-runner-group
+      labels: ubuntu-latest-deco
+
     environment: release
+
     permissions:
       contents: write
       id-token: write
+
     steps:
       - uses: actions/checkout@v3
 
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 458921ee0..95a290655 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,296 @@
 # Version changelog
 
+## [Release] Release v0.44.1
+
+### New Features and Improvements
+
+ * Introduce new Credential Strategies for Agents ([#882](https://github.com/databricks/databricks-sdk-py/pull/882)).
+
+
+### Internal Changes
+
+ * GetRun logic paginates more arrays ([#867](https://github.com/databricks/databricks-sdk-py/pull/867)).
+
+
+
+## [Release] Release v0.44.0
+
+### Internal Changes
+
+ * Fix `tests/integration/test_dbutils.py::test_secrets` ([#884](https://github.com/databricks/databricks-sdk-py/pull/884)).
+
+
+### API Changes:
+
+ * Added `get_message_query_result_by_attachment()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html) workspace-level service.
+ * Added `id` field for `databricks.sdk.service.apps.App`.
+ * Added `limit_config` field for `databricks.sdk.service.billing.UpdateBudgetPolicyRequest`.
+ * Added `volumes` field for `databricks.sdk.service.compute.ClusterLogConf`.
+ * Removed `review_state`, `reviews` and `runner_collaborators` fields for `databricks.sdk.service.cleanrooms.CleanRoomAssetNotebook`.
+
+OpenAPI SHA: 99f644e72261ef5ecf8d74db20f4b7a1e09723cc, Date: 2025-02-11
+
+## [Release] Release v0.43.0
+
+### API Changes:
+
+ * Added [w.lakeview_embedded](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/lakeview_embedded.html) workspace-level service and [w.query_execution](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/query_execution.html) workspace-level service.
+ * Added [w.redash_config](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/redash_config.html) workspace-level service.
+ * Added `gcp_oauth_token` field for `databricks.sdk.service.catalog.TemporaryCredentials`.
+ * Added `options` field for `databricks.sdk.service.catalog.UpdateCatalog`.
+ * Added `disabled` field for `databricks.sdk.service.jobs.RunTask`.
+
+OpenAPI SHA: c72c58f97b950fcb924a90ef164bcb10cfcd5ece, Date: 2025-02-03
+
+### Bug Fixes
+
+ * Fix docs generation when two services have the same name ([#872](https://github.com/databricks/databricks-sdk-py/pull/872)).
+
+### Internal Changes
+
+ * Add CICD environment to the User Agent ([#866](https://github.com/databricks/databricks-sdk-py/pull/866)).
+ * Add unit tests for retriable requests ([#879](https://github.com/databricks/databricks-sdk-py/pull/879)).
+ * Extract "before retry" handler, use it to rewind the stream ([#878](https://github.com/databricks/databricks-sdk-py/pull/878)).
+ * Update Model Serving `http_request` mixin to correctly use the underlying API.  ([#876](https://github.com/databricks/databricks-sdk-py/pull/876)).
+
+### Backward Incompatible Changes
+
+* Changed `create()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html) workspace-level service with new required argument order.
+* Changed `http_request()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html) workspace-level service to type `http_request()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html) workspace-level service.
+* Changed `http_request()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving/serving_endpoints.html) workspace-level service to return `databricks.sdk.service.serving.HttpRequestResponse` dataclass.
+* Changed `config` field for `databricks.sdk.service.serving.CreateServingEndpoint` to no longer be required.
+* Removed `securable_kind` field for `databricks.sdk.service.catalog.CatalogInfo`.
+* Removed `securable_kind` field for `databricks.sdk.service.catalog.ConnectionInfo`.
+* Removed `status_code` and `text` fields for `databricks.sdk.service.serving.ExternalFunctionResponse`.
+
+### API Changes:
+
+* Added [a.budget_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/billing/budget_policy.html) account-level service.
+* Added [a.enable_ip_access_lists](https://databricks-sdk-py.readthedocs.io/en/latest/account/settings/settings/enable_ip_access_lists.html) account-level service.
+* Added `review_state`, `reviews` and `runner_collaborators` fields for `databricks.sdk.service.cleanrooms.CleanRoomAssetNotebook`.
+* Added `statement_id` field for `databricks.sdk.service.dashboards.QueryAttachment`.
+* Added `effective_performance_target` field for `databricks.sdk.service.jobs.BaseRun`.
+* Added `performance_target` field for `databricks.sdk.service.jobs.CreateJob`.
+* Added `performance_target` field for `databricks.sdk.service.jobs.JobSettings`.
+* Added `effective_performance_target` field for `databricks.sdk.service.jobs.Run`.
+* Added `performance_target` field for `databricks.sdk.service.jobs.RunNow`.
+* Added `effective_performance_target` field for `databricks.sdk.service.jobs.RunTask`.
+* Added `run_as_repl` field for `databricks.sdk.service.jobs.SparkJarTask`.
+* Added `user_authorized_scopes` field for `databricks.sdk.service.oauth2.CreateCustomAppIntegration`.
+* Added `user_authorized_scopes` field for `databricks.sdk.service.oauth2.GetCustomAppIntegrationOutput`.
+* Added `user_authorized_scopes` field for `databricks.sdk.service.oauth2.UpdateCustomAppIntegration`.
+* Added `contents` field for `databricks.sdk.service.serving.HttpRequestResponse`.
+* Added `clean_room` enum value for `databricks.sdk.service.catalog.SecurableType`.
+* Added `budget_policy_limit_exceeded` enum value for `databricks.sdk.service.jobs.TerminationCodeCode`.
+* Added `arclight_azure_exchange_token_with_user_delegation_key` enum value for `databricks.sdk.service.settings.TokenType`.
+
+OpenAPI SHA: 840c660106f820a1a5dff931d51fa5f65cd9fdd9, Date: 2025-01-28
+
+## [Release] Release v0.41.0
+
+### New Features and Improvements
+
+ * Add `serving.http_request` to call external functions. ([#857](https://github.com/databricks/databricks-sdk-py/pull/857)).
+ * Files API client: recover on download failures ([#844](https://github.com/databricks/databricks-sdk-py/pull/844)) ([#845](https://github.com/databricks/databricks-sdk-py/pull/845)).
+
+
+### Bug Fixes
+
+ * Properly pass query parameters in apps and oauth2 ([#862](https://github.com/databricks/databricks-sdk-py/pull/862)).
+
+
+### Internal Changes
+
+ * Add unit tests for external-browser authentication ([#863](https://github.com/databricks/databricks-sdk-py/pull/863)).
+ * Decouple oauth2 and serving  ([#855](https://github.com/databricks/databricks-sdk-py/pull/855)).
+ * Migrate workflows that need write access to use hosted runners ([#850](https://github.com/databricks/databricks-sdk-py/pull/850)).
+ * Stop testing Python 3.7 on Ubuntu ([#858](https://github.com/databricks/databricks-sdk-py/pull/858)).
+
+
+### API Changes:
+
+ * Added [w.access_control](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/access_control.html) workspace-level service.
+ * Added `http_request()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service.
+ * Added `no_compute` field for `databricks.sdk.service.apps.CreateAppRequest`.
+ * Added `has_more` field for `databricks.sdk.service.jobs.BaseJob`.
+ * Added `has_more` field for `databricks.sdk.service.jobs.BaseRun`.
+ * Added `page_token` field for `databricks.sdk.service.jobs.GetJobRequest`.
+ * Added `has_more` and `next_page_token` fields for `databricks.sdk.service.jobs.Job`.
+ * Added `has_more` field for `databricks.sdk.service.jobs.Run`.
+ * Added `clean_rooms_notebook_output` field for `databricks.sdk.service.jobs.RunOutput`.
+ * Added `scopes` field for `databricks.sdk.service.oauth2.UpdateCustomAppIntegration`.
+ * Added `run_as` field for `databricks.sdk.service.pipelines.CreatePipeline`.
+ * Added `run_as` field for `databricks.sdk.service.pipelines.EditPipeline`.
+ * Added `authorization_details` and `endpoint_url` fields for `databricks.sdk.service.serving.DataPlaneInfo`.
+ * Added `contents` field for `databricks.sdk.service.serving.GetOpenApiResponse`.
+ * Added `activated`, `activation_url`, `authentication_type`, `cloud`, `comment`, `created_at`, `created_by`, `data_recipient_global_metastore_id`, `ip_access_list`, `metastore_id`, `name`, `owner`, `properties_kvpairs`, `region`, `sharing_code`, `tokens`, `updated_at` and `updated_by` fields for `databricks.sdk.service.sharing.RecipientInfo`.
+ * Added `expiration_time` field for `databricks.sdk.service.sharing.RecipientInfo`.
+ * Changed `update()` method for [a.account_federation_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/account_federation_policy.html) account-level service with new required argument order.
+ * Changed `update()` method for [a.service_principal_federation_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/service_principal_federation_policy.html) account-level service with new required argument order.
+ * Changed `update()` method for [w.recipients](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/recipients.html) workspace-level service to return `databricks.sdk.service.sharing.RecipientInfo` dataclass.
+ * Changed `update()` method for [w.recipients](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/recipients.html) workspace-level service return type to become non-empty.
+ * Changed `update()` method for [w.recipients](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/recipients.html) workspace-level service to type `update()` method for [w.recipients](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/recipients.html) workspace-level service.
+ * Changed `get_open_api()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service return type to become non-empty.
+ * Changed `patch()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service to type `patch()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service.
+ * Changed `patch()` method for [w.serving_endpoints](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/serving_endpoints.html) workspace-level service to return `databricks.sdk.service.serving.EndpointTags` dataclass.
+ * Changed `databricks.sdk.service.serving.EndpointTagList` dataclass to.
+ * Changed `collaborator_alias` field for `databricks.sdk.service.cleanrooms.CleanRoomCollaborator` to be required.
+ * Changed `collaborator_alias` field for `databricks.sdk.service.cleanrooms.CleanRoomCollaborator` to be required.
+ * Changed `update_mask` field for `databricks.sdk.service.oauth2.UpdateAccountFederationPolicyRequest` to no longer be required.
+ * Changed `update_mask` field for `databricks.sdk.service.oauth2.UpdateServicePrincipalFederationPolicyRequest` to no longer be required.
+ * Changed `days_of_week` field for `databricks.sdk.service.pipelines.RestartWindow` to type `databricks.sdk.service.pipelines.DayOfWeekList` dataclass.
+ * Changed `behavior` field for `databricks.sdk.service.serving.AiGatewayGuardrailPiiBehavior` to no longer be required.
+ * Changed `behavior` field for `databricks.sdk.service.serving.AiGatewayGuardrailPiiBehavior` to no longer be required.
+ * Changed `project_id` and `region` fields for `databricks.sdk.service.serving.GoogleCloudVertexAiConfig` to be required.
+ * Changed `project_id` and `region` fields for `databricks.sdk.service.serving.GoogleCloudVertexAiConfig` to be required.
+ * Changed `workload_type` field for `databricks.sdk.service.serving.ServedEntityInput` to type `databricks.sdk.service.serving.ServingModelWorkloadType` dataclass.
+ * Changed `workload_type` field for `databricks.sdk.service.serving.ServedEntityOutput` to type `databricks.sdk.service.serving.ServingModelWorkloadType` dataclass.
+ * Changed `workload_type` field for `databricks.sdk.service.serving.ServedModelOutput` to type `databricks.sdk.service.serving.ServingModelWorkloadType` dataclass.
+
+OpenAPI SHA: 58905570a9928fc9ed31fba14a2edaf9a7c55b08, Date: 2025-01-20
+
+## [Release] Release v0.40.0
+
+### API Changes:
+
+ * Added [a.account_federation_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/account_federation_policy.html) account-level service and [a.service_principal_federation_policy](https://databricks-sdk-py.readthedocs.io/en/latest/account/service_principal_federation_policy.html) account-level service.
+ * Added `is_single_node`, `kind` and `use_ml_runtime` fields for `databricks.sdk.service.compute.ClusterAttributes`.
+ * Added `is_single_node`, `kind` and `use_ml_runtime` fields for `databricks.sdk.service.compute.ClusterDetails`.
+ * Added `is_single_node`, `kind` and `use_ml_runtime` fields for `databricks.sdk.service.compute.ClusterSpec`.
+ * Added `is_single_node`, `kind` and `use_ml_runtime` fields for `databricks.sdk.service.compute.CreateCluster`.
+ * Added `is_single_node`, `kind` and `use_ml_runtime` fields for `databricks.sdk.service.compute.EditCluster`.
+ * Added `is_single_node`, `kind` and `use_ml_runtime` fields for `databricks.sdk.service.compute.UpdateClusterResource`.
+ * Added `update_parameter_syntax` field for `databricks.sdk.service.dashboards.MigrateDashboardRequest`.
+ * Added `clean_rooms_notebook_task` field for `databricks.sdk.service.jobs.RunTask`.
+ * Added `clean_rooms_notebook_task` field for `databricks.sdk.service.jobs.SubmitTask`.
+ * Added `clean_rooms_notebook_task` field for `databricks.sdk.service.jobs.Task`.
+ * Changed `days_of_week` field for `databricks.sdk.service.pipelines.RestartWindow` to type `databricks.sdk.service.pipelines.RestartWindowDaysOfWeekList` dataclass.
+
+OpenAPI SHA: a6a317df8327c9b1e5cb59a03a42ffa2aabeef6d, Date: 2024-12-16
+
+## [Release] Release v0.39.0
+
+### Bug Fixes
+
+ * Update Changelog file ([#830](https://github.com/databricks/databricks-sdk-py/pull/830)).
+
+
+### Internal Changes
+
+ * Fix a couple of typos in open_ai_client.py ([#829](https://github.com/databricks/databricks-sdk-py/pull/829)).
+ * Update SDK to OpenAPI spec ([#834](https://github.com/databricks/databricks-sdk-py/pull/834)).
+
+
+### API Changes:
+
+ * Added `databricks.sdk.service.cleanrooms` package.
+ * Added `delete()` method for [w.aibi_dashboard_embedding_access_policy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/aibi_dashboard_embedding_access_policy.html) workspace-level service.
+ * Added `delete()` method for [w.aibi_dashboard_embedding_approved_domains](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/aibi_dashboard_embedding_approved_domains.html) workspace-level service.
+ * Added `databricks_gcp_service_account` field for `databricks.sdk.service.catalog.CreateCredentialRequest`.
+ * Added `databricks_gcp_service_account` field for `databricks.sdk.service.catalog.CredentialInfo`.
+ * Added `gcp_options` field for `databricks.sdk.service.catalog.GenerateTemporaryServiceCredentialRequest`.
+ * Added `databricks_gcp_service_account` field for `databricks.sdk.service.catalog.UpdateCredentialRequest`.
+ * Added `cached_query_schema` field for `databricks.sdk.service.dashboards.QueryAttachment`.
+ * Added .
+ * Removed `gcp_service_account_key` field for `databricks.sdk.service.catalog.CreateCredentialRequest`.
+
+OpenAPI SHA: 7016dcbf2e011459416cf408ce21143bcc4b3a25, Date: 2024-12-05
+
+## [Release] Release v0.38.0
+
+### New Features and Improvements
+
+ * Read streams by 1MB chunks by default. ([#817](https://github.com/databricks/databricks-sdk-py/pull/817)).
+
+### Bug Fixes
+
+ * Rewind seekable streams before retrying ([#821](https://github.com/databricks/databricks-sdk-py/pull/821)).
+ * Properly serialize nested data classes. 
+
+### Internal Changes
+
+ * Reformat SDK with YAPF 0.43. ([#822](https://github.com/databricks/databricks-sdk-py/pull/822)).
+ * Update Jobs GetRun API to support paginated responses for jobs and ForEach tasks ([#819](https://github.com/databricks/databricks-sdk-py/pull/819)).
+
+### API Changes:
+
+ * Added `service_principal_client_id` field for `databricks.sdk.service.apps.App`.
+ * Added `azure_service_principal`, `gcp_service_account_key` and `read_only` fields for `databricks.sdk.service.catalog.CreateCredentialRequest`.
+ * Added `azure_service_principal`, `read_only` and `used_for_managed_storage` fields for `databricks.sdk.service.catalog.CredentialInfo`.
+ * Added `omit_username` field for `databricks.sdk.service.catalog.ListTablesRequest`.
+ * Added `azure_service_principal` and `read_only` fields for `databricks.sdk.service.catalog.UpdateCredentialRequest`.
+ * Added `external_location_name`, `read_only` and `url` fields for `databricks.sdk.service.catalog.ValidateCredentialRequest`.
+ * Added `is_dir` field for `databricks.sdk.service.catalog.ValidateCredentialResponse`.
+ * Added `only` field for `databricks.sdk.service.jobs.RunNow`.
+ * Added `restart_window` field for `databricks.sdk.service.pipelines.CreatePipeline`.
+ * Added `restart_window` field for `databricks.sdk.service.pipelines.EditPipeline`.
+ * Added `restart_window` field for `databricks.sdk.service.pipelines.PipelineSpec`.
+ * Added `private_access_settings_id` field for `databricks.sdk.service.provisioning.UpdateWorkspaceRequest`.
+ * Changed `create_credential()` and `generate_temporary_service_credential()` methods for [w.credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/credentials.html) workspace-level service with new required argument order.
+ * Changed `access_connector_id` field for `databricks.sdk.service.catalog.AzureManagedIdentity` to be required.
+ * Changed `access_connector_id` field for `databricks.sdk.service.catalog.AzureManagedIdentity` to be required.
+ * Changed `name` field for `databricks.sdk.service.catalog.CreateCredentialRequest` to be required.
+ * Changed `credential_name` field for `databricks.sdk.service.catalog.GenerateTemporaryServiceCredentialRequest` to be required.
+
+OpenAPI SHA: f2385add116e3716c8a90a0b68e204deb40f996c, Date: 2024-11-15
+
+## [Release] Release v0.37.0
+
+### Bug Fixes
+
+ * Correctly generate classes with nested body fields ([#808](https://github.com/databricks/databricks-sdk-py/pull/808)).
+
+
+### Internal Changes
+
+ * Add `cleanrooms` package ([#806](https://github.com/databricks/databricks-sdk-py/pull/806)).
+ * Add test instructions for external contributors ([#804](https://github.com/databricks/databricks-sdk-py/pull/804)).
+ * Always write message for manual test execution ([#811](https://github.com/databricks/databricks-sdk-py/pull/811)).
+ * Automatically trigger integration tests on PR ([#800](https://github.com/databricks/databricks-sdk-py/pull/800)).
+ * Better isolate ML serving auth unit tests ([#803](https://github.com/databricks/databricks-sdk-py/pull/803)).
+ * Move templates in the code generator ([#809](https://github.com/databricks/databricks-sdk-py/pull/809)).
+
+
+### API Changes:
+
+ * Added [w.aibi_dashboard_embedding_access_policy](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/aibi_dashboard_embedding_access_policy.html) workspace-level service and [w.aibi_dashboard_embedding_approved_domains](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/settings/aibi_dashboard_embedding_approved_domains.html) workspace-level service.
+ * Added [w.credentials](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/credentials.html) workspace-level service.
+ * Added `app_deployment` field for `databricks.sdk.service.apps.CreateAppDeploymentRequest`.
+ * Added `app` field for `databricks.sdk.service.apps.CreateAppRequest`.
+ * Added `app` field for `databricks.sdk.service.apps.UpdateAppRequest`.
+ * Added `table` field for `databricks.sdk.service.catalog.CreateOnlineTableRequest`.
+ * Added `azure_aad` field for `databricks.sdk.service.catalog.GenerateTemporaryTableCredentialResponse`.
+ * Added `full_name` field for `databricks.sdk.service.catalog.StorageCredentialInfo`.
+ * Added `dashboard` field for `databricks.sdk.service.dashboards.CreateDashboardRequest`.
+ * Added `schedule` field for `databricks.sdk.service.dashboards.CreateScheduleRequest`.
+ * Added `subscription` field for `databricks.sdk.service.dashboards.CreateSubscriptionRequest`.
+ * Added `warehouse_id` field for `databricks.sdk.service.dashboards.Schedule`.
+ * Added `dashboard` field for `databricks.sdk.service.dashboards.UpdateDashboardRequest`.
+ * Added `schedule` field for `databricks.sdk.service.dashboards.UpdateScheduleRequest`.
+ * Added `page_token` field for `databricks.sdk.service.oauth2.ListServicePrincipalSecretsRequest`.
+ * Added `next_page_token` field for `databricks.sdk.service.oauth2.ListServicePrincipalSecretsResponse`.
+ * Added `connection_name` field for `databricks.sdk.service.pipelines.IngestionGatewayPipelineDefinition`.
+ * Added `is_no_public_ip_enabled` field for `databricks.sdk.service.provisioning.CreateWorkspaceRequest`.
+ * Added `external_customer_info` and `is_no_public_ip_enabled` fields for `databricks.sdk.service.provisioning.Workspace`.
+ * Added `last_used_day` field for `databricks.sdk.service.settings.TokenInfo`.
+ * Changed `create()` method for [w.apps](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/apps.html) workspace-level service with new required argument order.
+ * Changed `execute_message_query()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html) workspace-level service . New request type is `databricks.sdk.service.dashboards.GenieExecuteMessageQueryRequest` dataclass.
+ * Changed `execute_message_query()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html) workspace-level service to type `execute_message_query()` method for [w.genie](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/genie.html) workspace-level service.
+ * Changed `create()`, `create_schedule()`, `create_subscription()` and `update_schedule()` methods for [w.lakeview](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/lakeview.html) workspace-level service with new required argument order.
+ * Removed [w.clean_rooms](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/clean_rooms.html) workspace-level service.
+ * Removed `deployment_id`, `mode` and `source_code_path` fields for `databricks.sdk.service.apps.CreateAppDeploymentRequest`.
+ * Removed `description`, `name` and `resources` fields for `databricks.sdk.service.apps.CreateAppRequest`.
+ * Removed `description` and `resources` fields for `databricks.sdk.service.apps.UpdateAppRequest`.
+ * Removed `name` and `spec` fields for `databricks.sdk.service.catalog.CreateOnlineTableRequest`.
+ * Removed `display_name`, `parent_path`, `serialized_dashboard` and `warehouse_id` fields for `databricks.sdk.service.dashboards.CreateDashboardRequest`.
+ * Removed `cron_schedule`, `display_name` and `pause_status` fields for `databricks.sdk.service.dashboards.CreateScheduleRequest`.
+ * Removed `subscriber` field for `databricks.sdk.service.dashboards.CreateSubscriptionRequest`.
+ * Removed `display_name`, `etag`, `serialized_dashboard` and `warehouse_id` fields for `databricks.sdk.service.dashboards.UpdateDashboardRequest`.
+ * Removed `cron_schedule`, `display_name`, `etag` and `pause_status` fields for `databricks.sdk.service.dashboards.UpdateScheduleRequest`.
+ * Removed `prev_page_token` field for `databricks.sdk.service.jobs.Run`.
+
+OpenAPI SHA: 5285ce76f81314f342c1702d5c2ad4ef42488781, Date: 2024-11-04
+
 ## [Release] Release v0.36.0
 
 ### Breaking Changes
diff --git a/databricks/sdk/__init__.py b/databricks/sdk/__init__.py
index 159946461..79b1c3353 100755
--- a/databricks/sdk/__init__.py
+++ b/databricks/sdk/__init__.py
@@ -1,21 +1,27 @@
+# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+import logging
 from typing import Optional
 
 import databricks.sdk.core as client
 import databricks.sdk.dbutils as dbutils
+import databricks.sdk.service as service
 from databricks.sdk import azure
 from databricks.sdk.credentials_provider import CredentialsStrategy
 from databricks.sdk.mixins.compute import ClustersExt
-from databricks.sdk.mixins.files import DbfsExt
+from databricks.sdk.mixins.files import DbfsExt, FilesExt
+from databricks.sdk.mixins.jobs import JobsExt
 from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt
 from databricks.sdk.mixins.workspace import WorkspaceExt
 from databricks.sdk.service.apps import AppsAPI
-from databricks.sdk.service.billing import (BillableUsageAPI, BudgetsAPI,
-                                            LogDeliveryAPI, UsageDashboardsAPI)
+from databricks.sdk.service.billing import (BillableUsageAPI, BudgetPolicyAPI,
+                                            BudgetsAPI, LogDeliveryAPI,
+                                            UsageDashboardsAPI)
 from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI,
                                             AccountMetastoresAPI,
                                             AccountStorageCredentialsAPI,
                                             ArtifactAllowlistsAPI, CatalogsAPI,
-                                            ConnectionsAPI,
+                                            ConnectionsAPI, CredentialsAPI,
                                             ExternalLocationsAPI, FunctionsAPI,
                                             GrantsAPI, MetastoresAPI,
                                             ModelVersionsAPI, OnlineTablesAPI,
@@ -27,6 +33,9 @@
                                             TableConstraintsAPI, TablesAPI,
                                             TemporaryTableCredentialsAPI,
                                             VolumesAPI, WorkspaceBindingsAPI)
+from databricks.sdk.service.cleanrooms import (CleanRoomAssetsAPI,
+                                               CleanRoomsAPI,
+                                               CleanRoomTaskRunsAPI)
 from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI,
                                             CommandExecutionAPI,
                                             GlobalInitScriptsAPI,
@@ -34,9 +43,12 @@
                                             InstanceProfilesAPI, LibrariesAPI,
                                             PolicyComplianceForClustersAPI,
                                             PolicyFamiliesAPI)
-from databricks.sdk.service.dashboards import GenieAPI, LakeviewAPI
+from databricks.sdk.service.dashboards import (GenieAPI, LakeviewAPI,
+                                               LakeviewEmbeddedAPI,
+                                               QueryExecutionAPI)
 from databricks.sdk.service.files import DbfsAPI, FilesAPI
-from databricks.sdk.service.iam import (AccountAccessControlAPI,
+from databricks.sdk.service.iam import (AccessControlAPI,
+                                        AccountAccessControlAPI,
                                         AccountAccessControlProxyAPI,
                                         AccountGroupsAPI,
                                         AccountServicePrincipalsAPI,
@@ -52,9 +64,11 @@
     ProviderListingsAPI, ProviderPersonalizationRequestsAPI,
     ProviderProviderAnalyticsDashboardsAPI, ProviderProvidersAPI)
 from databricks.sdk.service.ml import ExperimentsAPI, ModelRegistryAPI
-from databricks.sdk.service.oauth2 import (CustomAppIntegrationAPI,
+from databricks.sdk.service.oauth2 import (AccountFederationPolicyAPI,
+                                           CustomAppIntegrationAPI,
                                            OAuthPublishedAppsAPI,
                                            PublishedAppIntegrationAPI,
+                                           ServicePrincipalFederationPolicyAPI,
                                            ServicePrincipalSecretsAPI)
 from databricks.sdk.service.pipelines import PipelinesAPI
 from databricks.sdk.service.provisioning import (CredentialsAPI,
@@ -64,26 +78,18 @@
                                                  Workspace, WorkspacesAPI)
 from databricks.sdk.service.serving import (ServingEndpointsAPI,
                                             ServingEndpointsDataPlaneAPI)
-from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
-                                             AccountSettingsAPI,
-                                             AutomaticClusterUpdateAPI,
-                                             ComplianceSecurityProfileAPI,
-                                             CredentialsManagerAPI,
-                                             CspEnablementAccountAPI,
-                                             DefaultNamespaceAPI,
-                                             DisableLegacyAccessAPI,
-                                             DisableLegacyDbfsAPI,
-                                             DisableLegacyFeaturesAPI,
-                                             EnhancedSecurityMonitoringAPI,
-                                             EsmEnablementAccountAPI,
-                                             IpAccessListsAPI,
-                                             NetworkConnectivityAPI,
-                                             NotificationDestinationsAPI,
-                                             PersonalComputeAPI,
-                                             RestrictWorkspaceAdminsAPI,
-                                             SettingsAPI, TokenManagementAPI,
-                                             TokensAPI, WorkspaceConfAPI)
-from databricks.sdk.service.sharing import (CleanRoomsAPI, ProvidersAPI,
+from databricks.sdk.service.settings import (
+    AccountIpAccessListsAPI, AccountSettingsAPI,
+    AibiDashboardEmbeddingAccessPolicyAPI,
+    AibiDashboardEmbeddingApprovedDomainsAPI, AutomaticClusterUpdateAPI,
+    ComplianceSecurityProfileAPI, CredentialsManagerAPI,
+    CspEnablementAccountAPI, DefaultNamespaceAPI, DisableLegacyAccessAPI,
+    DisableLegacyDbfsAPI, DisableLegacyFeaturesAPI, EnableIpAccessListsAPI,
+    EnhancedSecurityMonitoringAPI, EsmEnablementAccountAPI, IpAccessListsAPI,
+    NetworkConnectivityAPI, NotificationDestinationsAPI, PersonalComputeAPI,
+    RestrictWorkspaceAdminsAPI, SettingsAPI, TokenManagementAPI, TokensAPI,
+    WorkspaceConfAPI)
+from databricks.sdk.service.sharing import (ProvidersAPI,
                                             RecipientActivationAPI,
                                             RecipientsAPI, SharesAPI)
 from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI,
@@ -93,12 +99,15 @@
                                         QueryHistoryAPI,
                                         QueryVisualizationsAPI,
                                         QueryVisualizationsLegacyAPI,
-                                        StatementExecutionAPI, WarehousesAPI)
+                                        RedashConfigAPI, StatementExecutionAPI,
+                                        WarehousesAPI)
 from databricks.sdk.service.vectorsearch import (VectorSearchEndpointsAPI,
                                                  VectorSearchIndexesAPI)
 from databricks.sdk.service.workspace import (GitCredentialsAPI, ReposAPI,
                                               SecretsAPI, WorkspaceAPI)
 
+_LOG = logging.getLogger(__name__)
+
 
 def _make_dbutils(config: client.Config):
     # We try to directly check if we are in runtime, instead of
@@ -116,6 +125,14 @@ def _make_dbutils(config: client.Config):
     return runtime_dbutils
 
 
+def _make_files_client(apiClient: client.ApiClient, config: client.Config):
+    if config.enable_experimental_files_api_client:
+        _LOG.info("Experimental Files API client is enabled")
+        return FilesExt(apiClient, config)
+    else:
+        return FilesAPI(apiClient)
+
+
 class WorkspaceClient:
     """
     The WorkspaceClient is a client for the workspace-level Databricks REST API.
@@ -177,98 +194,109 @@ def __init__(self,
         self._dbutils = _make_dbutils(self._config)
         self._api_client = client.ApiClient(self._config)
         serving_endpoints = ServingEndpointsExt(self._api_client)
-        self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client)
-        self._alerts = AlertsAPI(self._api_client)
-        self._alerts_legacy = AlertsLegacyAPI(self._api_client)
-        self._apps = AppsAPI(self._api_client)
-        self._artifact_allowlists = ArtifactAllowlistsAPI(self._api_client)
-        self._catalogs = CatalogsAPI(self._api_client)
-        self._clean_rooms = CleanRoomsAPI(self._api_client)
-        self._cluster_policies = ClusterPoliciesAPI(self._api_client)
+        self._access_control = service.iam.AccessControlAPI(self._api_client)
+        self._account_access_control_proxy = service.iam.AccountAccessControlProxyAPI(self._api_client)
+        self._alerts = service.sql.AlertsAPI(self._api_client)
+        self._alerts_legacy = service.sql.AlertsLegacyAPI(self._api_client)
+        self._apps = service.apps.AppsAPI(self._api_client)
+        self._artifact_allowlists = service.catalog.ArtifactAllowlistsAPI(self._api_client)
+        self._catalogs = service.catalog.CatalogsAPI(self._api_client)
+        self._clean_room_assets = service.cleanrooms.CleanRoomAssetsAPI(self._api_client)
+        self._clean_room_task_runs = service.cleanrooms.CleanRoomTaskRunsAPI(self._api_client)
+        self._clean_rooms = service.cleanrooms.CleanRoomsAPI(self._api_client)
+        self._cluster_policies = service.compute.ClusterPoliciesAPI(self._api_client)
         self._clusters = ClustersExt(self._api_client)
-        self._command_execution = CommandExecutionAPI(self._api_client)
-        self._connections = ConnectionsAPI(self._api_client)
-        self._consumer_fulfillments = ConsumerFulfillmentsAPI(self._api_client)
-        self._consumer_installations = ConsumerInstallationsAPI(self._api_client)
-        self._consumer_listings = ConsumerListingsAPI(self._api_client)
-        self._consumer_personalization_requests = ConsumerPersonalizationRequestsAPI(self._api_client)
-        self._consumer_providers = ConsumerProvidersAPI(self._api_client)
-        self._credentials_manager = CredentialsManagerAPI(self._api_client)
-        self._current_user = CurrentUserAPI(self._api_client)
-        self._dashboard_widgets = DashboardWidgetsAPI(self._api_client)
-        self._dashboards = DashboardsAPI(self._api_client)
-        self._data_sources = DataSourcesAPI(self._api_client)
+        self._command_execution = service.compute.CommandExecutionAPI(self._api_client)
+        self._connections = service.catalog.ConnectionsAPI(self._api_client)
+        self._consumer_fulfillments = service.marketplace.ConsumerFulfillmentsAPI(self._api_client)
+        self._consumer_installations = service.marketplace.ConsumerInstallationsAPI(self._api_client)
+        self._consumer_listings = service.marketplace.ConsumerListingsAPI(self._api_client)
+        self._consumer_personalization_requests = service.marketplace.ConsumerPersonalizationRequestsAPI(
+            self._api_client)
+        self._consumer_providers = service.marketplace.ConsumerProvidersAPI(self._api_client)
+        self._credentials = service.catalog.CredentialsAPI(self._api_client)
+        self._credentials_manager = service.settings.CredentialsManagerAPI(self._api_client)
+        self._current_user = service.iam.CurrentUserAPI(self._api_client)
+        self._dashboard_widgets = service.sql.DashboardWidgetsAPI(self._api_client)
+        self._dashboards = service.sql.DashboardsAPI(self._api_client)
+        self._data_sources = service.sql.DataSourcesAPI(self._api_client)
         self._dbfs = DbfsExt(self._api_client)
-        self._dbsql_permissions = DbsqlPermissionsAPI(self._api_client)
-        self._experiments = ExperimentsAPI(self._api_client)
-        self._external_locations = ExternalLocationsAPI(self._api_client)
-        self._files = FilesAPI(self._api_client)
-        self._functions = FunctionsAPI(self._api_client)
-        self._genie = GenieAPI(self._api_client)
-        self._git_credentials = GitCredentialsAPI(self._api_client)
-        self._global_init_scripts = GlobalInitScriptsAPI(self._api_client)
-        self._grants = GrantsAPI(self._api_client)
-        self._groups = GroupsAPI(self._api_client)
-        self._instance_pools = InstancePoolsAPI(self._api_client)
-        self._instance_profiles = InstanceProfilesAPI(self._api_client)
-        self._ip_access_lists = IpAccessListsAPI(self._api_client)
-        self._jobs = JobsAPI(self._api_client)
-        self._lakeview = LakeviewAPI(self._api_client)
-        self._libraries = LibrariesAPI(self._api_client)
-        self._metastores = MetastoresAPI(self._api_client)
-        self._model_registry = ModelRegistryAPI(self._api_client)
-        self._model_versions = ModelVersionsAPI(self._api_client)
-        self._notification_destinations = NotificationDestinationsAPI(self._api_client)
-        self._online_tables = OnlineTablesAPI(self._api_client)
-        self._permission_migration = PermissionMigrationAPI(self._api_client)
-        self._permissions = PermissionsAPI(self._api_client)
-        self._pipelines = PipelinesAPI(self._api_client)
-        self._policy_compliance_for_clusters = PolicyComplianceForClustersAPI(self._api_client)
-        self._policy_compliance_for_jobs = PolicyComplianceForJobsAPI(self._api_client)
-        self._policy_families = PolicyFamiliesAPI(self._api_client)
-        self._provider_exchange_filters = ProviderExchangeFiltersAPI(self._api_client)
-        self._provider_exchanges = ProviderExchangesAPI(self._api_client)
-        self._provider_files = ProviderFilesAPI(self._api_client)
-        self._provider_listings = ProviderListingsAPI(self._api_client)
-        self._provider_personalization_requests = ProviderPersonalizationRequestsAPI(self._api_client)
-        self._provider_provider_analytics_dashboards = ProviderProviderAnalyticsDashboardsAPI(
+        self._dbsql_permissions = service.sql.DbsqlPermissionsAPI(self._api_client)
+        self._experiments = service.ml.ExperimentsAPI(self._api_client)
+        self._external_locations = service.catalog.ExternalLocationsAPI(self._api_client)
+        self._files = _make_files_client(self._api_client, self._config)
+        self._functions = service.catalog.FunctionsAPI(self._api_client)
+        self._genie = service.dashboards.GenieAPI(self._api_client)
+        self._git_credentials = service.workspace.GitCredentialsAPI(self._api_client)
+        self._global_init_scripts = service.compute.GlobalInitScriptsAPI(self._api_client)
+        self._grants = service.catalog.GrantsAPI(self._api_client)
+        self._groups = service.iam.GroupsAPI(self._api_client)
+        self._instance_pools = service.compute.InstancePoolsAPI(self._api_client)
+        self._instance_profiles = service.compute.InstanceProfilesAPI(self._api_client)
+        self._ip_access_lists = service.settings.IpAccessListsAPI(self._api_client)
+        self._jobs = JobsExt(self._api_client)
+        self._lakeview = service.dashboards.LakeviewAPI(self._api_client)
+        self._lakeview_embedded = service.dashboards.LakeviewEmbeddedAPI(self._api_client)
+        self._libraries = service.compute.LibrariesAPI(self._api_client)
+        self._metastores = service.catalog.MetastoresAPI(self._api_client)
+        self._model_registry = service.ml.ModelRegistryAPI(self._api_client)
+        self._model_versions = service.catalog.ModelVersionsAPI(self._api_client)
+        self._notification_destinations = service.settings.NotificationDestinationsAPI(self._api_client)
+        self._online_tables = service.catalog.OnlineTablesAPI(self._api_client)
+        self._permission_migration = service.iam.PermissionMigrationAPI(self._api_client)
+        self._permissions = service.iam.PermissionsAPI(self._api_client)
+        self._pipelines = service.pipelines.PipelinesAPI(self._api_client)
+        self._policy_compliance_for_clusters = service.compute.PolicyComplianceForClustersAPI(
+            self._api_client)
+        self._policy_compliance_for_jobs = service.jobs.PolicyComplianceForJobsAPI(self._api_client)
+        self._policy_families = service.compute.PolicyFamiliesAPI(self._api_client)
+        self._provider_exchange_filters = service.marketplace.ProviderExchangeFiltersAPI(self._api_client)
+        self._provider_exchanges = service.marketplace.ProviderExchangesAPI(self._api_client)
+        self._provider_files = service.marketplace.ProviderFilesAPI(self._api_client)
+        self._provider_listings = service.marketplace.ProviderListingsAPI(self._api_client)
+        self._provider_personalization_requests = service.marketplace.ProviderPersonalizationRequestsAPI(
             self._api_client)
-        self._provider_providers = ProviderProvidersAPI(self._api_client)
-        self._providers = ProvidersAPI(self._api_client)
-        self._quality_monitors = QualityMonitorsAPI(self._api_client)
-        self._queries = QueriesAPI(self._api_client)
-        self._queries_legacy = QueriesLegacyAPI(self._api_client)
-        self._query_history = QueryHistoryAPI(self._api_client)
-        self._query_visualizations = QueryVisualizationsAPI(self._api_client)
-        self._query_visualizations_legacy = QueryVisualizationsLegacyAPI(self._api_client)
-        self._recipient_activation = RecipientActivationAPI(self._api_client)
-        self._recipients = RecipientsAPI(self._api_client)
-        self._registered_models = RegisteredModelsAPI(self._api_client)
-        self._repos = ReposAPI(self._api_client)
-        self._resource_quotas = ResourceQuotasAPI(self._api_client)
-        self._schemas = SchemasAPI(self._api_client)
-        self._secrets = SecretsAPI(self._api_client)
-        self._service_principals = ServicePrincipalsAPI(self._api_client)
+        self._provider_provider_analytics_dashboards = service.marketplace.ProviderProviderAnalyticsDashboardsAPI(
+            self._api_client)
+        self._provider_providers = service.marketplace.ProviderProvidersAPI(self._api_client)
+        self._providers = service.sharing.ProvidersAPI(self._api_client)
+        self._quality_monitors = service.catalog.QualityMonitorsAPI(self._api_client)
+        self._queries = service.sql.QueriesAPI(self._api_client)
+        self._queries_legacy = service.sql.QueriesLegacyAPI(self._api_client)
+        self._query_execution = service.dashboards.QueryExecutionAPI(self._api_client)
+        self._query_history = service.sql.QueryHistoryAPI(self._api_client)
+        self._query_visualizations = service.sql.QueryVisualizationsAPI(self._api_client)
+        self._query_visualizations_legacy = service.sql.QueryVisualizationsLegacyAPI(self._api_client)
+        self._recipient_activation = service.sharing.RecipientActivationAPI(self._api_client)
+        self._recipients = service.sharing.RecipientsAPI(self._api_client)
+        self._redash_config = service.sql.RedashConfigAPI(self._api_client)
+        self._registered_models = service.catalog.RegisteredModelsAPI(self._api_client)
+        self._repos = service.workspace.ReposAPI(self._api_client)
+        self._resource_quotas = service.catalog.ResourceQuotasAPI(self._api_client)
+        self._schemas = service.catalog.SchemasAPI(self._api_client)
+        self._secrets = service.workspace.SecretsAPI(self._api_client)
+        self._service_principals = service.iam.ServicePrincipalsAPI(self._api_client)
         self._serving_endpoints = serving_endpoints
-        self._serving_endpoints_data_plane = ServingEndpointsDataPlaneAPI(self._api_client, serving_endpoints)
-        self._settings = SettingsAPI(self._api_client)
-        self._shares = SharesAPI(self._api_client)
-        self._statement_execution = StatementExecutionAPI(self._api_client)
-        self._storage_credentials = StorageCredentialsAPI(self._api_client)
-        self._system_schemas = SystemSchemasAPI(self._api_client)
-        self._table_constraints = TableConstraintsAPI(self._api_client)
-        self._tables = TablesAPI(self._api_client)
-        self._temporary_table_credentials = TemporaryTableCredentialsAPI(self._api_client)
-        self._token_management = TokenManagementAPI(self._api_client)
-        self._tokens = TokensAPI(self._api_client)
-        self._users = UsersAPI(self._api_client)
-        self._vector_search_endpoints = VectorSearchEndpointsAPI(self._api_client)
-        self._vector_search_indexes = VectorSearchIndexesAPI(self._api_client)
-        self._volumes = VolumesAPI(self._api_client)
-        self._warehouses = WarehousesAPI(self._api_client)
+        self._serving_endpoints_data_plane = service.serving.ServingEndpointsDataPlaneAPI(
+            self._api_client, serving_endpoints)
+        self._settings = service.settings.SettingsAPI(self._api_client)
+        self._shares = service.sharing.SharesAPI(self._api_client)
+        self._statement_execution = service.sql.StatementExecutionAPI(self._api_client)
+        self._storage_credentials = service.catalog.StorageCredentialsAPI(self._api_client)
+        self._system_schemas = service.catalog.SystemSchemasAPI(self._api_client)
+        self._table_constraints = service.catalog.TableConstraintsAPI(self._api_client)
+        self._tables = service.catalog.TablesAPI(self._api_client)
+        self._temporary_table_credentials = service.catalog.TemporaryTableCredentialsAPI(self._api_client)
+        self._token_management = service.settings.TokenManagementAPI(self._api_client)
+        self._tokens = service.settings.TokensAPI(self._api_client)
+        self._users = service.iam.UsersAPI(self._api_client)
+        self._vector_search_endpoints = service.vectorsearch.VectorSearchEndpointsAPI(self._api_client)
+        self._vector_search_indexes = service.vectorsearch.VectorSearchIndexesAPI(self._api_client)
+        self._volumes = service.catalog.VolumesAPI(self._api_client)
+        self._warehouses = service.sql.WarehousesAPI(self._api_client)
         self._workspace = WorkspaceExt(self._api_client)
-        self._workspace_bindings = WorkspaceBindingsAPI(self._api_client)
-        self._workspace_conf = WorkspaceConfAPI(self._api_client)
+        self._workspace_bindings = service.catalog.WorkspaceBindingsAPI(self._api_client)
+        self._workspace_conf = service.settings.WorkspaceConfAPI(self._api_client)
 
     @property
     def config(self) -> client.Config:
@@ -283,42 +311,57 @@ def dbutils(self) -> dbutils.RemoteDbUtils:
         return self._dbutils
 
     @property
-    def account_access_control_proxy(self) -> AccountAccessControlProxyAPI:
+    def access_control(self) -> service.iam.AccessControlAPI:
+        """Rule based Access Control for Databricks Resources."""
+        return self._access_control
+
+    @property
+    def account_access_control_proxy(self) -> service.iam.AccountAccessControlProxyAPI:
         """These APIs manage access rules on resources in an account."""
         return self._account_access_control_proxy
 
     @property
-    def alerts(self) -> AlertsAPI:
+    def alerts(self) -> service.sql.AlertsAPI:
         """The alerts API can be used to perform CRUD operations on alerts."""
         return self._alerts
 
     @property
-    def alerts_legacy(self) -> AlertsLegacyAPI:
+    def alerts_legacy(self) -> service.sql.AlertsLegacyAPI:
         """The alerts API can be used to perform CRUD operations on alerts."""
         return self._alerts_legacy
 
     @property
-    def apps(self) -> AppsAPI:
+    def apps(self) -> service.apps.AppsAPI:
         """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend Databricks services, and enable users to interact through single sign-on."""
         return self._apps
 
     @property
-    def artifact_allowlists(self) -> ArtifactAllowlistsAPI:
+    def artifact_allowlists(self) -> service.catalog.ArtifactAllowlistsAPI:
         """In Databricks Runtime 13.3 and above, you can add libraries and init scripts to the `allowlist` in UC so that users can leverage these artifacts on compute configured with shared access mode."""
         return self._artifact_allowlists
 
     @property
-    def catalogs(self) -> CatalogsAPI:
+    def catalogs(self) -> service.catalog.CatalogsAPI:
         """A catalog is the first layer of Unity Catalog’s three-level namespace."""
         return self._catalogs
 
     @property
-    def clean_rooms(self) -> CleanRoomsAPI:
-        """A clean room is a secure, privacy-protecting environment where two or more parties can share sensitive enterprise data, including customer data, for measurements, insights, activation and other use cases."""
+    def clean_room_assets(self) -> service.cleanrooms.CleanRoomAssetsAPI:
+        """Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the clean room."""
+        return self._clean_room_assets
+
+    @property
+    def clean_room_task_runs(self) -> service.cleanrooms.CleanRoomTaskRunsAPI:
+        """Clean room task runs are the executions of notebooks in a clean room."""
+        return self._clean_room_task_runs
+
+    @property
+    def clean_rooms(self) -> service.cleanrooms.CleanRoomsAPI:
+        """A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting environment where multiple parties can work together on sensitive enterprise data without direct access to each other’s data."""
         return self._clean_rooms
 
     @property
-    def cluster_policies(self) -> ClusterPoliciesAPI:
+    def cluster_policies(self) -> service.compute.ClusterPoliciesAPI:
         """You can use cluster policies to control users' ability to configure clusters based on a set of rules."""
         return self._cluster_policies
 
@@ -328,62 +371,67 @@ def clusters(self) -> ClustersExt:
         return self._clusters
 
     @property
-    def command_execution(self) -> CommandExecutionAPI:
+    def command_execution(self) -> service.compute.CommandExecutionAPI:
         """This API allows execution of Python, Scala, SQL, or R commands on running Databricks Clusters."""
         return self._command_execution
 
     @property
-    def connections(self) -> ConnectionsAPI:
+    def connections(self) -> service.catalog.ConnectionsAPI:
         """Connections allow for creating a connection to an external data source."""
         return self._connections
 
     @property
-    def consumer_fulfillments(self) -> ConsumerFulfillmentsAPI:
+    def consumer_fulfillments(self) -> service.marketplace.ConsumerFulfillmentsAPI:
         """Fulfillments are entities that allow consumers to preview installations."""
         return self._consumer_fulfillments
 
     @property
-    def consumer_installations(self) -> ConsumerInstallationsAPI:
+    def consumer_installations(self) -> service.marketplace.ConsumerInstallationsAPI:
         """Installations are entities that allow consumers to interact with Databricks Marketplace listings."""
         return self._consumer_installations
 
     @property
-    def consumer_listings(self) -> ConsumerListingsAPI:
+    def consumer_listings(self) -> service.marketplace.ConsumerListingsAPI:
         """Listings are the core entities in the Marketplace."""
         return self._consumer_listings
 
     @property
-    def consumer_personalization_requests(self) -> ConsumerPersonalizationRequestsAPI:
+    def consumer_personalization_requests(self) -> service.marketplace.ConsumerPersonalizationRequestsAPI:
         """Personalization Requests allow customers to interact with the individualized Marketplace listing flow."""
         return self._consumer_personalization_requests
 
     @property
-    def consumer_providers(self) -> ConsumerProvidersAPI:
+    def consumer_providers(self) -> service.marketplace.ConsumerProvidersAPI:
         """Providers are the entities that publish listings to the Marketplace."""
         return self._consumer_providers
 
     @property
-    def credentials_manager(self) -> CredentialsManagerAPI:
+    def credentials(self) -> service.catalog.CredentialsAPI:
+        """A credential represents an authentication and authorization mechanism for accessing services on your cloud tenant."""
+        return self._credentials
+
+    @property
+    def credentials_manager(self) -> service.settings.CredentialsManagerAPI:
         """Credentials manager interacts with with Identity Providers to to perform token exchanges using stored credentials and refresh tokens."""
         return self._credentials_manager
 
     @property
-    def current_user(self) -> CurrentUserAPI:
+    def current_user(self) -> service.iam.CurrentUserAPI:
         """This API allows retrieving information about currently authenticated user or service principal."""
         return self._current_user
 
     @property
-    def dashboard_widgets(self) -> DashboardWidgetsAPI:
+    def dashboard_widgets(self) -> service.sql.DashboardWidgetsAPI:
         """This is an evolving API that facilitates the addition and removal of widgets from existing dashboards within the Databricks Workspace."""
         return self._dashboard_widgets
 
     @property
-    def dashboards(self) -> DashboardsAPI:
+    def dashboards(self) -> service.sql.DashboardsAPI:
         """In general, there is little need to modify dashboards using the API."""
         return self._dashboards
 
     @property
-    def data_sources(self) -> DataSourcesAPI:
+    def data_sources(self) -> service.sql.DataSourcesAPI:
         """This API is provided to assist you in making new query objects."""
         return self._data_sources
 
@@ -393,247 +441,263 @@ def dbfs(self) -> DbfsExt:
         return self._dbfs
 
     @property
-    def dbsql_permissions(self) -> DbsqlPermissionsAPI:
+    def dbsql_permissions(self) -> service.sql.DbsqlPermissionsAPI:
         """The SQL Permissions API is similar to the endpoints of the :method:permissions/set."""
         return self._dbsql_permissions
 
     @property
-    def experiments(self) -> ExperimentsAPI:
+    def experiments(self) -> service.ml.ExperimentsAPI:
         """Experiments are the primary unit of organization in MLflow; all MLflow runs belong to an experiment."""
         return self._experiments
 
     @property
-    def external_locations(self) -> ExternalLocationsAPI:
+    def external_locations(self) -> service.catalog.ExternalLocationsAPI:
         """An external location is an object that combines a cloud storage path with a storage credential that authorizes access to the cloud storage path."""
         return self._external_locations
 
     @property
-    def files(self) -> FilesAPI:
+    def files(self) -> service.files.FilesAPI:
         """The Files API is a standard HTTP API that allows you to read, write, list, and delete files and directories by referring to their URI."""
         return self._files
 
     @property
-    def functions(self) -> FunctionsAPI:
+    def functions(self) -> service.catalog.FunctionsAPI:
         """Functions implement User-Defined Functions (UDFs) in Unity Catalog."""
         return self._functions
 
     @property
-    def genie(self) -> GenieAPI:
+    def genie(self) -> service.dashboards.GenieAPI:
         """Genie provides a no-code experience for business users, powered by AI/BI."""
         return self._genie
 
     @property
-    def git_credentials(self) -> GitCredentialsAPI:
+    def git_credentials(self) -> service.workspace.GitCredentialsAPI:
         """Registers personal access token for Databricks to do operations on behalf of the user."""
         return self._git_credentials
 
     @property
-    def global_init_scripts(self) -> GlobalInitScriptsAPI:
+    def global_init_scripts(self) -> service.compute.GlobalInitScriptsAPI:
         """The Global Init Scripts API enables Workspace administrators to configure global initialization scripts for their workspace."""
         return self._global_init_scripts
 
     @property
-    def grants(self) -> GrantsAPI:
+    def grants(self) -> service.catalog.GrantsAPI:
         """In Unity Catalog, data is secure by default."""
         return self._grants
 
     @property
-    def groups(self) -> GroupsAPI:
+    def groups(self) -> service.iam.GroupsAPI:
         """Groups simplify identity management, making it easier to assign access to Databricks workspace, data, and other securable objects."""
         return self._groups
 
     @property
-    def instance_pools(self) -> InstancePoolsAPI:
+    def instance_pools(self) -> service.compute.InstancePoolsAPI:
         """Instance Pools API are used to create, edit, delete and list instance pools by using ready-to-use cloud instances which reduces a cluster start and auto-scaling times."""
         return self._instance_pools
 
     @property
-    def instance_profiles(self) -> InstanceProfilesAPI:
+    def instance_profiles(self) -> service.compute.InstanceProfilesAPI:
         """The Instance Profiles API allows admins to add, list, and remove instance profiles that users can launch clusters with."""
         return self._instance_profiles
 
     @property
-    def ip_access_lists(self) -> IpAccessListsAPI:
+    def ip_access_lists(self) -> service.settings.IpAccessListsAPI:
         """IP Access List enables admins to configure IP access lists."""
         return self._ip_access_lists
 
     @property
-    def jobs(self) -> JobsAPI:
+    def jobs(self) -> JobsExt:
         """The Jobs API allows you to create, edit, and delete jobs."""
         return self._jobs
 
     @property
-    def lakeview(self) -> LakeviewAPI:
+    def lakeview(self) -> service.dashboards.LakeviewAPI:
         """These APIs provide specific management operations for Lakeview dashboards."""
         return self._lakeview
 
     @property
-    def libraries(self) -> LibrariesAPI:
+    def lakeview_embedded(self) -> service.dashboards.LakeviewEmbeddedAPI:
+        """Token-based Lakeview APIs for embedding dashboards in external applications."""
+        return self._lakeview_embedded
+
+    @property
+    def libraries(self) -> service.compute.LibrariesAPI:
         """The Libraries API allows you to install and uninstall libraries and get the status of libraries on a cluster."""
         return self._libraries
 
     @property
-    def metastores(self) -> MetastoresAPI:
+    def metastores(self) -> service.catalog.MetastoresAPI:
         """A metastore is the top-level container of objects in Unity Catalog."""
         return self._metastores
 
     @property
-    def model_registry(self) -> ModelRegistryAPI:
+    def model_registry(self) -> service.ml.ModelRegistryAPI:
         """Note: This API reference documents APIs for the Workspace Model Registry."""
         return self._model_registry
 
     @property
-    def model_versions(self) -> ModelVersionsAPI:
+    def model_versions(self) -> service.catalog.ModelVersionsAPI:
         """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog."""
         return self._model_versions
 
     @property
-    def notification_destinations(self) -> NotificationDestinationsAPI:
+    def notification_destinations(self) -> service.settings.NotificationDestinationsAPI:
         """The notification destinations API lets you programmatically manage a workspace's notification destinations."""
         return self._notification_destinations
 
     @property
-    def online_tables(self) -> OnlineTablesAPI:
+    def online_tables(self) -> service.catalog.OnlineTablesAPI:
         """Online tables provide lower latency and higher QPS access to data from Delta tables."""
         return self._online_tables
 
     @property
-    def permission_migration(self) -> PermissionMigrationAPI:
+    def permission_migration(self) -> service.iam.PermissionMigrationAPI:
         """APIs for migrating acl permissions, used only by the ucx tool: https://github.com/databrickslabs/ucx."""
         return self._permission_migration
 
     @property
-    def permissions(self) -> PermissionsAPI:
+    def permissions(self) -> service.iam.PermissionsAPI:
         """Permissions API are used to create read, write, edit, update and manage access for various users on different objects and endpoints."""
         return self._permissions
 
     @property
-    def pipelines(self) -> PipelinesAPI:
+    def pipelines(self) -> service.pipelines.PipelinesAPI:
         """The Delta Live Tables API allows you to create, edit, delete, start, and view details about pipelines."""
         return self._pipelines
 
     @property
-    def policy_compliance_for_clusters(self) -> PolicyComplianceForClustersAPI:
+    def policy_compliance_for_clusters(self) -> service.compute.PolicyComplianceForClustersAPI:
         """The policy compliance APIs allow you to view and manage the policy compliance status of clusters in your workspace."""
         return self._policy_compliance_for_clusters
 
     @property
-    def policy_compliance_for_jobs(self) -> PolicyComplianceForJobsAPI:
+    def policy_compliance_for_jobs(self) -> service.jobs.PolicyComplianceForJobsAPI:
         """The compliance APIs allow you to view and manage the policy compliance status of jobs in your workspace."""
         return self._policy_compliance_for_jobs
 
     @property
-    def policy_families(self) -> PolicyFamiliesAPI:
+    def policy_families(self) -> service.compute.PolicyFamiliesAPI:
         """View available policy families."""
         return self._policy_families
 
     @property
-    def provider_exchange_filters(self) -> ProviderExchangeFiltersAPI:
+    def provider_exchange_filters(self) -> service.marketplace.ProviderExchangeFiltersAPI:
         """Marketplace exchanges filters curate which groups can access an exchange."""
         return self._provider_exchange_filters
 
     @property
-    def provider_exchanges(self) -> ProviderExchangesAPI:
+    def provider_exchanges(self) -> service.marketplace.ProviderExchangesAPI:
         """Marketplace exchanges allow providers to share their listings with a curated set of customers."""
         return self._provider_exchanges
 
     @property
-    def provider_files(self) -> ProviderFilesAPI:
+    def provider_files(self) -> service.marketplace.ProviderFilesAPI:
         """Marketplace offers a set of file APIs for various purposes such as preview notebooks and provider icons."""
         return self._provider_files
 
     @property
-    def provider_listings(self) -> ProviderListingsAPI:
+    def provider_listings(self) -> service.marketplace.ProviderListingsAPI:
         """Listings are the core entities in the Marketplace."""
         return self._provider_listings
 
     @property
-    def provider_personalization_requests(self) -> ProviderPersonalizationRequestsAPI:
+    def provider_personalization_requests(self) -> service.marketplace.ProviderPersonalizationRequestsAPI:
         """Personalization requests are an alternate to instantly available listings."""
         return self._provider_personalization_requests
 
     @property
-    def provider_provider_analytics_dashboards(self) -> ProviderProviderAnalyticsDashboardsAPI:
+    def provider_provider_analytics_dashboards(
+            self) -> service.marketplace.ProviderProviderAnalyticsDashboardsAPI:
         """Manage templated analytics solution for providers."""
         return self._provider_provider_analytics_dashboards
 
     @property
-    def provider_providers(self) -> ProviderProvidersAPI:
+    def provider_providers(self) -> service.marketplace.ProviderProvidersAPI:
         """Providers are entities that manage assets in Marketplace."""
         return self._provider_providers
 
     @property
-    def providers(self) -> ProvidersAPI:
+    def providers(self) -> service.sharing.ProvidersAPI:
         """A data provider is an object representing the organization in the real world who shares the data."""
         return self._providers
 
     @property
-    def quality_monitors(self) -> QualityMonitorsAPI:
+    def quality_monitors(self) -> service.catalog.QualityMonitorsAPI:
         """A monitor computes and monitors data or model quality metrics for a table over time."""
         return self._quality_monitors
 
     @property
-    def queries(self) -> QueriesAPI:
+    def queries(self) -> service.sql.QueriesAPI:
         """The queries API can be used to perform CRUD operations on queries."""
         return self._queries
 
     @property
-    def queries_legacy(self) -> QueriesLegacyAPI:
+    def queries_legacy(self) -> service.sql.QueriesLegacyAPI:
         """These endpoints are used for CRUD operations on query definitions."""
         return self._queries_legacy
 
     @property
-    def query_history(self) -> QueryHistoryAPI:
+    def query_execution(self) -> service.dashboards.QueryExecutionAPI:
+        """Query execution APIs for AI / BI Dashboards."""
+        return self._query_execution
+
+    @property
+    def query_history(self) -> service.sql.QueryHistoryAPI:
         """A service responsible for storing and retrieving the list of queries run against SQL endpoints and serverless compute."""
         return self._query_history
 
     @property
-    def query_visualizations(self) -> QueryVisualizationsAPI:
+    def query_visualizations(self) -> service.sql.QueryVisualizationsAPI:
         """This is an evolving API that facilitates the addition and removal of visualizations from existing queries in the Databricks Workspace."""
         return self._query_visualizations
 
     @property
-    def query_visualizations_legacy(self) -> QueryVisualizationsLegacyAPI:
+    def query_visualizations_legacy(self) -> service.sql.QueryVisualizationsLegacyAPI:
         """This is an evolving API that facilitates the addition and removal of vizualisations from existing queries within the Databricks Workspace."""
         return self._query_visualizations_legacy
 
     @property
-    def recipient_activation(self) -> RecipientActivationAPI:
+    def recipient_activation(self) -> service.sharing.RecipientActivationAPI:
         """The Recipient Activation API is only applicable in the open sharing model where the recipient object has the authentication type of `TOKEN`."""
         return self._recipient_activation
 
     @property
-    def recipients(self) -> RecipientsAPI:
+    def recipients(self) -> service.sharing.RecipientsAPI:
         """A recipient is an object you create using :method:recipients/create to represent an organization which you want to allow access shares."""
         return self._recipients
 
     @property
-    def registered_models(self) -> RegisteredModelsAPI:
+    def redash_config(self) -> service.sql.RedashConfigAPI:
+        """Redash V2 service for workspace configurations (internal)."""
+        return self._redash_config
+
+    @property
+    def registered_models(self) -> service.catalog.RegisteredModelsAPI:
         """Databricks provides a hosted version of MLflow Model Registry in Unity Catalog."""
         return self._registered_models
 
     @property
-    def repos(self) -> ReposAPI:
+    def repos(self) -> service.workspace.ReposAPI:
         """The Repos API allows users to manage their git repos."""
         return self._repos
 
     @property
-    def resource_quotas(self) -> ResourceQuotasAPI:
+    def resource_quotas(self) -> service.catalog.ResourceQuotasAPI:
         """Unity Catalog enforces resource quotas on all securable objects, which limits the number of resources that can be created."""
         return self._resource_quotas
 
     @property
-    def schemas(self) -> SchemasAPI:
+    def schemas(self) -> service.catalog.SchemasAPI:
         """A schema (also called a database) is the second layer of Unity Catalog’s three-level namespace."""
         return self._schemas
 
     @property
-    def secrets(self) -> SecretsAPI:
+    def secrets(self) -> service.workspace.SecretsAPI:
         """The Secrets API allows you to manage secrets, secret scopes, and access permissions."""
         return self._secrets
 
     @property
-    def service_principals(self) -> ServicePrincipalsAPI:
+    def service_principals(self) -> service.iam.ServicePrincipalsAPI:
         """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms."""
         return self._service_principals
 
@@ -643,82 +707,82 @@ def serving_endpoints(self) -> ServingEndpointsExt:
         return self._serving_endpoints
 
     @property
-    def serving_endpoints_data_plane(self) -> ServingEndpointsDataPlaneAPI:
+    def serving_endpoints_data_plane(self) -> service.serving.ServingEndpointsDataPlaneAPI:
         """Serving endpoints DataPlane provides a set of operations to interact with data plane endpoints for Serving endpoints service."""
         return self._serving_endpoints_data_plane
 
     @property
-    def settings(self) -> SettingsAPI:
+    def settings(self) -> service.settings.SettingsAPI:
         """Workspace Settings API allows users to manage settings at the workspace level."""
         return self._settings
 
     @property
-    def shares(self) -> SharesAPI:
+    def shares(self) -> service.sharing.SharesAPI:
         """A share is a container instantiated with :method:shares/create."""
         return self._shares
 
     @property
-    def statement_execution(self) -> StatementExecutionAPI:
+    def statement_execution(self) -> service.sql.StatementExecutionAPI:
         """The Databricks SQL Statement Execution API can be used to execute SQL statements on a SQL warehouse and fetch the result."""
         return self._statement_execution
 
     @property
-    def storage_credentials(self) -> StorageCredentialsAPI:
+    def storage_credentials(self) -> service.catalog.StorageCredentialsAPI:
         """A storage credential represents an authentication and authorization mechanism for accessing data stored on your cloud tenant."""
         return self._storage_credentials
 
     @property
-    def system_schemas(self) -> SystemSchemasAPI:
+    def system_schemas(self) -> service.catalog.SystemSchemasAPI:
         """A system schema is a schema that lives within the system catalog."""
         return self._system_schemas
 
     @property
-    def table_constraints(self) -> TableConstraintsAPI:
+    def table_constraints(self) -> service.catalog.TableConstraintsAPI:
         """Primary key and foreign key constraints encode relationships between fields in tables."""
         return self._table_constraints
 
     @property
-    def tables(self) -> TablesAPI:
+    def tables(self) -> service.catalog.TablesAPI:
         """A table resides in the third layer of Unity Catalog’s three-level namespace."""
         return self._tables
 
     @property
-    def temporary_table_credentials(self) -> TemporaryTableCredentialsAPI:
+    def temporary_table_credentials(self) -> service.catalog.TemporaryTableCredentialsAPI:
         """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage locationswhere table data is stored in Databricks."""
         return self._temporary_table_credentials
 
     @property
-    def token_management(self) -> TokenManagementAPI:
+    def token_management(self) -> service.settings.TokenManagementAPI:
         """Enables administrators to get all tokens and delete tokens for other users."""
         return self._token_management
 
     @property
-    def tokens(self) -> TokensAPI:
+    def tokens(self) -> service.settings.TokensAPI:
         """The Token API allows you to create, list, and revoke tokens that can be used to authenticate and access Databricks REST APIs."""
         return self._tokens
 
     @property
-    def users(self) -> UsersAPI:
+    def users(self) -> service.iam.UsersAPI:
         """User identities recognized by Databricks and represented by email addresses."""
         return self._users
 
     @property
-    def vector_search_endpoints(self) -> VectorSearchEndpointsAPI:
+    def vector_search_endpoints(self) -> service.vectorsearch.VectorSearchEndpointsAPI:
         """**Endpoint**: Represents the compute resources to host vector search indexes."""
         return self._vector_search_endpoints
 
     @property
-    def vector_search_indexes(self) -> VectorSearchIndexesAPI:
+    def vector_search_indexes(self) -> service.vectorsearch.VectorSearchIndexesAPI:
         """**Index**: An efficient representation of your embedding vectors that supports real-time and efficient approximate nearest neighbor (ANN) search queries."""
         return self._vector_search_indexes
 
     @property
-    def volumes(self) -> VolumesAPI:
+    def volumes(self) -> service.catalog.VolumesAPI:
         """Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing files."""
         return self._volumes
 
     @property
-    def warehouses(self) -> WarehousesAPI:
+    def warehouses(self) -> service.sql.WarehousesAPI:
         """A SQL warehouse is a compute resource that lets you run SQL commands on data objects within Databricks SQL."""
         return self._warehouses
 
@@ -728,12 +792,12 @@ def workspace(self) -> WorkspaceExt:
         return self._workspace
 
     @property
-    def workspace_bindings(self) -> WorkspaceBindingsAPI:
+    def workspace_bindings(self) -> service.catalog.WorkspaceBindingsAPI:
         """A securable in Databricks can be configured as __OPEN__ or __ISOLATED__."""
         return self._workspace_bindings
 
     @property
-    def workspace_conf(self) -> WorkspaceConfAPI:
+    def workspace_conf(self) -> service.settings.WorkspaceConfAPI:
         """This API allows updating known workspace settings for advanced users."""
         return self._workspace_conf
 
@@ -807,32 +871,36 @@ def __init__(self,
                                    product_version=product_version)
         self._config = config.copy()
         self._api_client = client.ApiClient(self._config)
-        self._access_control = AccountAccessControlAPI(self._api_client)
-        self._billable_usage = BillableUsageAPI(self._api_client)
-        self._credentials = CredentialsAPI(self._api_client)
-        self._custom_app_integration = CustomAppIntegrationAPI(self._api_client)
-        self._encryption_keys = EncryptionKeysAPI(self._api_client)
-        self._groups = AccountGroupsAPI(self._api_client)
-        self._ip_access_lists = AccountIpAccessListsAPI(self._api_client)
-        self._log_delivery = LogDeliveryAPI(self._api_client)
-        self._metastore_assignments = AccountMetastoreAssignmentsAPI(self._api_client)
-        self._metastores = AccountMetastoresAPI(self._api_client)
-        self._network_connectivity = NetworkConnectivityAPI(self._api_client)
-        self._networks = NetworksAPI(self._api_client)
-        self._o_auth_published_apps = OAuthPublishedAppsAPI(self._api_client)
-        self._private_access = PrivateAccessAPI(self._api_client)
-        self._published_app_integration = PublishedAppIntegrationAPI(self._api_client)
-        self._service_principal_secrets = ServicePrincipalSecretsAPI(self._api_client)
-        self._service_principals = AccountServicePrincipalsAPI(self._api_client)
-        self._settings = AccountSettingsAPI(self._api_client)
-        self._storage = StorageAPI(self._api_client)
-        self._storage_credentials = AccountStorageCredentialsAPI(self._api_client)
-        self._usage_dashboards = UsageDashboardsAPI(self._api_client)
-        self._users = AccountUsersAPI(self._api_client)
-        self._vpc_endpoints = VpcEndpointsAPI(self._api_client)
-        self._workspace_assignment = WorkspaceAssignmentAPI(self._api_client)
-        self._workspaces = WorkspacesAPI(self._api_client)
-        self._budgets = BudgetsAPI(self._api_client)
+        self._access_control = service.iam.AccountAccessControlAPI(self._api_client)
+        self._billable_usage = service.billing.BillableUsageAPI(self._api_client)
+        self._budget_policy = service.billing.BudgetPolicyAPI(self._api_client)
+        self._credentials = service.provisioning.CredentialsAPI(self._api_client)
+        self._custom_app_integration = service.oauth2.CustomAppIntegrationAPI(self._api_client)
+        self._encryption_keys = service.provisioning.EncryptionKeysAPI(self._api_client)
+        self._federation_policy = service.oauth2.AccountFederationPolicyAPI(self._api_client)
+        self._groups = service.iam.AccountGroupsAPI(self._api_client)
+        self._ip_access_lists = service.settings.AccountIpAccessListsAPI(self._api_client)
+        self._log_delivery = service.billing.LogDeliveryAPI(self._api_client)
+        self._metastore_assignments = service.catalog.AccountMetastoreAssignmentsAPI(self._api_client)
+        self._metastores = service.catalog.AccountMetastoresAPI(self._api_client)
+        self._network_connectivity = service.settings.NetworkConnectivityAPI(self._api_client)
+        self._networks = service.provisioning.NetworksAPI(self._api_client)
+        self._o_auth_published_apps = service.oauth2.OAuthPublishedAppsAPI(self._api_client)
+        self._private_access = service.provisioning.PrivateAccessAPI(self._api_client)
+        self._published_app_integration = service.oauth2.PublishedAppIntegrationAPI(self._api_client)
+        self._service_principal_federation_policy = service.oauth2.ServicePrincipalFederationPolicyAPI(
+            self._api_client)
+        self._service_principal_secrets = service.oauth2.ServicePrincipalSecretsAPI(self._api_client)
+        self._service_principals = service.iam.AccountServicePrincipalsAPI(self._api_client)
+        self._settings = service.settings.AccountSettingsAPI(self._api_client)
+        self._storage = service.provisioning.StorageAPI(self._api_client)
+        self._storage_credentials = service.catalog.AccountStorageCredentialsAPI(self._api_client)
+        self._usage_dashboards = service.billing.UsageDashboardsAPI(self._api_client)
+        self._users = service.iam.AccountUsersAPI(self._api_client)
+        self._vpc_endpoints = service.provisioning.VpcEndpointsAPI(self._api_client)
+        self._workspace_assignment = service.iam.WorkspaceAssignmentAPI(self._api_client)
+        self._workspaces = service.provisioning.WorkspacesAPI(self._api_client)
+        self._budgets = service.billing.BudgetsAPI(self._api_client)
 
     @property
     def config(self) -> client.Config:
@@ -843,132 +911,147 @@ def api_client(self) -> client.ApiClient:
         return self._api_client
 
     @property
-    def access_control(self) -> AccountAccessControlAPI:
+    def access_control(self) -> service.iam.AccountAccessControlAPI:
         """These APIs manage access rules on resources in an account."""
         return self._access_control
 
     @property
-    def billable_usage(self) -> BillableUsageAPI:
+    def billable_usage(self) -> service.billing.BillableUsageAPI:
         """This API allows you to download billable usage logs for the specified account and date range."""
         return self._billable_usage
 
     @property
-    def credentials(self) -> CredentialsAPI:
+    def budget_policy(self) -> service.billing.BudgetPolicyAPI:
+        """A service serves REST API about Budget policies."""
+        return self._budget_policy
+
+    @property
+    def credentials(self) -> service.provisioning.CredentialsAPI:
         """These APIs manage credential configurations for this workspace."""
         return self._credentials
 
     @property
-    def custom_app_integration(self) -> CustomAppIntegrationAPI:
+    def custom_app_integration(self) -> service.oauth2.CustomAppIntegrationAPI:
         """These APIs enable administrators to manage custom OAuth app integrations, which is required for adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud."""
         return self._custom_app_integration
 
     @property
-    def encryption_keys(self) -> EncryptionKeysAPI:
+    def encryption_keys(self) -> service.provisioning.EncryptionKeysAPI:
         """These APIs manage encryption key configurations for this workspace (optional)."""
         return self._encryption_keys
 
     @property
-    def groups(self) -> AccountGroupsAPI:
+    def federation_policy(self) -> service.oauth2.AccountFederationPolicyAPI:
+        """These APIs manage account federation policies."""
+        return self._federation_policy
+
+    @property
+    def groups(self) -> service.iam.AccountGroupsAPI:
         """Groups simplify identity management, making it easier to assign access to Databricks account, data, and other securable objects."""
         return self._groups
 
     @property
-    def ip_access_lists(self) -> AccountIpAccessListsAPI:
+    def ip_access_lists(self) -> service.settings.AccountIpAccessListsAPI:
         """The Accounts IP Access List API enables account admins to configure IP access lists for access to the account console."""
         return self._ip_access_lists
 
     @property
-    def log_delivery(self) -> LogDeliveryAPI:
+    def log_delivery(self) -> service.billing.LogDeliveryAPI:
         """These APIs manage log delivery configurations for this account."""
         return self._log_delivery
 
     @property
-    def metastore_assignments(self) -> AccountMetastoreAssignmentsAPI:
+    def metastore_assignments(self) -> service.catalog.AccountMetastoreAssignmentsAPI:
         """These APIs manage metastore assignments to a workspace."""
         return self._metastore_assignments
 
     @property
-    def metastores(self) -> AccountMetastoresAPI:
+    def metastores(self) -> service.catalog.AccountMetastoresAPI:
         """These APIs manage Unity Catalog metastores for an account."""
         return self._metastores
 
     @property
-    def network_connectivity(self) -> NetworkConnectivityAPI:
+    def network_connectivity(self) -> service.settings.NetworkConnectivityAPI:
         """These APIs provide configurations for the network connectivity of your workspaces for serverless compute resources."""
         return self._network_connectivity
 
     @property
-    def networks(self) -> NetworksAPI:
+    def networks(self) -> service.provisioning.NetworksAPI:
         """These APIs manage network configurations for customer-managed VPCs (optional)."""
         return self._networks
 
     @property
-    def o_auth_published_apps(self) -> OAuthPublishedAppsAPI:
+    def o_auth_published_apps(self) -> service.oauth2.OAuthPublishedAppsAPI:
         """These APIs enable administrators to view all the available published OAuth applications in Databricks."""
         return self._o_auth_published_apps
 
     @property
-    def private_access(self) -> PrivateAccessAPI:
+    def private_access(self) -> service.provisioning.PrivateAccessAPI:
         """These APIs manage private access settings for this account."""
         return self._private_access
 
     @property
-    def published_app_integration(self) -> PublishedAppIntegrationAPI:
+    def published_app_integration(self) -> service.oauth2.PublishedAppIntegrationAPI:
         """These APIs enable administrators to manage published OAuth app integrations, which is required for adding/using Published OAuth App Integration like Tableau Desktop for Databricks in AWS cloud."""
         return self._published_app_integration
 
     @property
-    def service_principal_secrets(self) -> ServicePrincipalSecretsAPI:
+    def service_principal_federation_policy(self) -> service.oauth2.ServicePrincipalFederationPolicyAPI:
+        """These APIs manage service principal federation policies."""
+        return self._service_principal_federation_policy
+
+    @property
+    def service_principal_secrets(self) -> service.oauth2.ServicePrincipalSecretsAPI:
         """These APIs enable administrators to manage service principal secrets."""
         return self._service_principal_secrets
 
     @property
-    def service_principals(self) -> AccountServicePrincipalsAPI:
+    def service_principals(self) -> service.iam.AccountServicePrincipalsAPI:
         """Identities for use with jobs, automated tools, and systems such as scripts, apps, and CI/CD platforms."""
         return self._service_principals
 
     @property
-    def settings(self) -> AccountSettingsAPI:
+    def settings(self) -> service.settings.AccountSettingsAPI:
         """Accounts Settings API allows users to manage settings at the account level."""
         return self._settings
 
     @property
-    def storage(self) -> StorageAPI:
+    def storage(self) -> service.provisioning.StorageAPI:
         """These APIs manage storage configurations for this workspace."""
         return self._storage
 
     @property
-    def storage_credentials(self) -> AccountStorageCredentialsAPI:
+    def storage_credentials(self) -> service.catalog.AccountStorageCredentialsAPI:
         """These APIs manage storage credentials for a particular metastore."""
         return self._storage_credentials
 
     @property
-    def usage_dashboards(self) -> UsageDashboardsAPI:
+    def usage_dashboards(self) -> service.billing.UsageDashboardsAPI:
         """These APIs manage usage dashboards for this account."""
         return self._usage_dashboards
 
     @property
-    def users(self) -> AccountUsersAPI:
+    def users(self) -> service.iam.AccountUsersAPI:
         """User identities recognized by Databricks and represented by email addresses."""
         return self._users
 
     @property
-    def vpc_endpoints(self) -> VpcEndpointsAPI:
+    def vpc_endpoints(self) -> service.provisioning.VpcEndpointsAPI:
         """These APIs manage VPC endpoint configurations for this account."""
         return self._vpc_endpoints
 
     @property
-    def workspace_assignment(self) -> WorkspaceAssignmentAPI:
+    def workspace_assignment(self) -> service.iam.WorkspaceAssignmentAPI:
         """The Workspace Permission Assignment API allows you to manage workspace permissions for principals in your account."""
         return self._workspace_assignment
 
     @property
-    def workspaces(self) -> WorkspacesAPI:
+    def workspaces(self) -> service.provisioning.WorkspacesAPI:
         """These APIs manage workspaces for this account."""
         return self._workspaces
 
     @property
-    def budgets(self) -> BudgetsAPI:
+    def budgets(self) -> service.billing.BudgetsAPI:
         """These APIs manage budget configurations for this account."""
         return self._budgets
 
diff --git a/databricks/sdk/_base_client.py b/databricks/sdk/_base_client.py
index 95ce39cbe..58fcb10a5 100644
--- a/databricks/sdk/_base_client.py
+++ b/databricks/sdk/_base_client.py
@@ -1,5 +1,7 @@
+import io
 import logging
 import urllib.parse
+from abc import ABC, abstractmethod
 from datetime import timedelta
 from types import TracebackType
 from typing import (Any, BinaryIO, Callable, Dict, Iterable, Iterator, List,
@@ -50,7 +52,8 @@ def __init__(self,
                  http_timeout_seconds: float = None,
                  extra_error_customizers: List[_ErrorCustomizer] = None,
                  debug_headers: bool = False,
-                 clock: Clock = None):
+                 clock: Clock = None,
+                 streaming_buffer_size: int = 1024 * 1024): # 1MB
         """
         :param debug_truncate_bytes:
         :param retry_timeout_seconds:
@@ -68,6 +71,7 @@ def __init__(self,
         :param extra_error_customizers:
         :param debug_headers: Whether to include debug headers in the request log.
         :param clock: Clock object to use for time-related operations.
+        :param streaming_buffer_size: The size of the buffer to use for streaming responses.
         """
 
         self._debug_truncate_bytes = debug_truncate_bytes or 96
@@ -78,6 +82,7 @@ def __init__(self,
         self._clock = clock or RealClock()
         self._session = requests.Session()
         self._session.auth = self._authenticate
+        self._streaming_buffer_size = streaming_buffer_size
 
         # We don't use `max_retries` from HTTPAdapter to align with a more production-ready
         # retry strategy established in the Databricks SDK for Go. See _is_retryable and
@@ -127,6 +132,14 @@ def flatten_dict(d: Dict[str, Any]) -> Dict[str, Any]:
         flattened = dict(flatten_dict(with_fixed_bools))
         return flattened
 
+    @staticmethod
+    def _is_seekable_stream(data) -> bool:
+        if data is None:
+            return False
+        if not isinstance(data, io.IOBase):
+            return False
+        return data.seekable()
+
     def do(self,
            method: str,
            url: str,
@@ -141,24 +154,52 @@ def do(self,
         if headers is None:
             headers = {}
         headers['User-Agent'] = self._user_agent_base
-        retryable = retried(timeout=timedelta(seconds=self._retry_timeout_seconds),
-                            is_retryable=self._is_retryable,
-                            clock=self._clock)
-        response = retryable(self._perform)(method,
-                                            url,
-                                            query=query,
-                                            headers=headers,
-                                            body=body,
-                                            raw=raw,
-                                            files=files,
-                                            data=data,
-                                            auth=auth)
+
+        # Wrap strings and bytes in a seekable stream so that we can rewind them.
+        if isinstance(data, (str, bytes)):
+            data = io.BytesIO(data.encode('utf-8') if isinstance(data, str) else data)
+
+        if not data:
+            # The request is not a stream.
+            call = retried(timeout=timedelta(seconds=self._retry_timeout_seconds),
+                           is_retryable=self._is_retryable,
+                           clock=self._clock)(self._perform)
+        elif self._is_seekable_stream(data):
+            # Keep track of the initial position of the stream so that we can rewind to it
+            # if we need to retry the request.
+            initial_data_position = data.tell()
+
+            def rewind():
+                logger.debug(f"Rewinding input data to offset {initial_data_position} before retry")
+                data.seek(initial_data_position)
+
+            call = retried(timeout=timedelta(seconds=self._retry_timeout_seconds),
+                           is_retryable=self._is_retryable,
+                           clock=self._clock,
+                           before_retry=rewind)(self._perform)
+        else:
+            # Do not retry if the stream is not seekable. This is necessary to avoid bugs
+            # where the retry doesn't re-read already read data from the stream.
+            logger.debug(f"Retry disabled for non-seekable stream: type={type(data)}")
+            call = self._perform
+
+        response = call(method,
+                        url,
+                        query=query,
+                        headers=headers,
+                        body=body,
+                        raw=raw,
+                        files=files,
+                        data=data,
+                        auth=auth)
 
         resp = dict()
         for header in response_headers if response_headers else []:
             resp[header] = response.headers.get(Casing.to_header_case(header))
         if raw:
-            resp["contents"] = _StreamingResponse(response)
+            streaming_response = _StreamingResponse(response)
+            streaming_response.set_chunk_size(self._streaming_buffer_size)
+            resp["contents"] = streaming_response
             return resp
         if not len(response.content):
             return resp
@@ -243,8 +284,20 @@ def _record_request_log(self, response: requests.Response, raw: bool = False) ->
         logger.debug(RoundTrip(response, self._debug_headers, self._debug_truncate_bytes, raw).generate())
 
 
+class _RawResponse(ABC):
+
+    @abstractmethod
+    # follows Response signature: https://github.com/psf/requests/blob/main/src/requests/models.py#L799
+    def iter_content(self, chunk_size: int = 1, decode_unicode: bool = False):
+        pass
+
+    @abstractmethod
+    def close(self):
+        pass
+
+
 class _StreamingResponse(BinaryIO):
-    _response: requests.Response
+    _response: _RawResponse
     _buffer: bytes
     _content: Union[Iterator[bytes], None]
     _chunk_size: Union[int, None]
@@ -256,7 +309,7 @@ def fileno(self) -> int:
     def flush(self) -> int:
         pass
 
-    def __init__(self, response: requests.Response, chunk_size: Union[int, None] = None):
+    def __init__(self, response: _RawResponse, chunk_size: Union[int, None] = None):
         self._response = response
         self._buffer = b''
         self._content = None
@@ -266,7 +319,7 @@ def _open(self) -> None:
         if self._closed:
             raise ValueError("I/O operation on closed file")
         if not self._content:
-            self._content = self._response.iter_content(chunk_size=self._chunk_size)
+            self._content = self._response.iter_content(chunk_size=self._chunk_size, decode_unicode=False)
 
     def __enter__(self) -> BinaryIO:
         self._open()
@@ -283,6 +336,11 @@ def isatty(self) -> bool:
         return False
 
     def read(self, n: int = -1) -> bytes:
+        """
+        Read up to n bytes from the response stream. If n is negative, read 
+        until the end of the stream. 
+        """
+
         self._open()
         read_everything = n < 0
         remaining_bytes = n
diff --git a/databricks/sdk/config.py b/databricks/sdk/config.py
index b4efdf603..a556b5988 100644
--- a/databricks/sdk/config.py
+++ b/databricks/sdk/config.py
@@ -92,6 +92,7 @@ class Config:
     max_connections_per_pool: int = ConfigAttribute()
     databricks_environment: Optional[DatabricksEnvironment] = None
 
+<<<<<<< HEAD
     def __init__(self,
                  *,
                  # Deprecated. Use credentials_strategy instead.
@@ -101,6 +102,23 @@ def __init__(self,
                  product_version=None,
                  clock: Optional[Clock] = None,
                  **kwargs):
+=======
+    enable_experimental_files_api_client: bool = ConfigAttribute(
+        env='DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT')
+    files_api_client_download_max_total_recovers = None
+    files_api_client_download_max_total_recovers_without_progressing = 1
+
+    def __init__(
+            self,
+            *,
+            # Deprecated. Use credentials_strategy instead.
+            credentials_provider: Optional[CredentialsStrategy] = None,
+            credentials_strategy: Optional[CredentialsStrategy] = None,
+            product=None,
+            product_version=None,
+            clock: Optional[Clock] = None,
+            **kwargs):
+>>>>>>> upstream/main
         self._header_factory = None
         self._inner = {}
         self._user_agent_other_info = []
diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py
index a79151b5a..9a5b0748f 100644
--- a/databricks/sdk/credentials_provider.py
+++ b/databricks/sdk/credentials_provider.py
@@ -9,6 +9,10 @@
 import platform
 import subprocess
 import sys
+<<<<<<< HEAD
+=======
+import threading
+>>>>>>> upstream/main
 import time
 from datetime import datetime
 from typing import Callable, Dict, List, Optional, Tuple, Union
@@ -167,6 +171,7 @@ def oauth_service_principal(cfg: 'Config') -> Optional[CredentialsProvider]:
     oidc = cfg.oidc_endpoints
     if oidc is None:
         return None
+
     token_source = ClientCredentials(client_id=cfg.client_id,
                                      client_secret=cfg.client_secret,
                                      token_url=oidc.token_endpoint,
@@ -187,6 +192,10 @@ def token() -> Token:
 def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
     if cfg.auth_type != 'external-browser':
         return None
+<<<<<<< HEAD
+=======
+
+>>>>>>> upstream/main
     client_id, client_secret = None, None
     if cfg.client_id:
         client_id = cfg.client_id
@@ -194,12 +203,20 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
     elif cfg.azure_client_id:
         client_id = cfg.azure_client
         client_secret = cfg.azure_client_secret
+<<<<<<< HEAD
 
     if not client_id:
         client_id = 'databricks-cli'
 
     # Load cached credentials from disk if they exist.
     # Note that these are local to the Python SDK and not reused by other SDKs.
+=======
+    if not client_id:
+        client_id = 'databricks-cli'
+
+    # Load cached credentials from disk if they exist. Note that these are
+    # local to the Python SDK and not reused by other SDKs.
+>>>>>>> upstream/main
     oidc_endpoints = cfg.oidc_endpoints
     redirect_url = 'http://localhost:8020'
     token_cache = TokenCache(host=cfg.host,
@@ -209,6 +226,7 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
                              redirect_url=redirect_url)
     credentials = token_cache.load()
     if credentials:
+<<<<<<< HEAD
         # Force a refresh in case the loaded credentials are expired.
         credentials.token()
     else:
@@ -220,6 +238,27 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
         if not consent:
             return None
         credentials = consent.launch_external_browser()
+=======
+        try:
+            # Pro-actively refresh the loaded credentials. This is done
+            # to detect if the token is expired and needs to be refreshed
+            # by going through the OAuth login flow.
+            credentials.token()
+            return credentials(cfg)
+        # TODO: We should ideally use more specific exceptions.
+        except Exception as e:
+            logger.warning(f'Failed to refresh cached token: {e}. Initiating new OAuth login flow')
+
+    oauth_client = OAuthClient(oidc_endpoints=oidc_endpoints,
+                               client_id=client_id,
+                               redirect_url=redirect_url,
+                               client_secret=client_secret)
+    consent = oauth_client.initiate_consent()
+    if not consent:
+        return None
+
+    credentials = consent.launch_external_browser()
+>>>>>>> upstream/main
     token_cache.save(credentials)
     return credentials(cfg)
 
@@ -304,11 +343,12 @@ def github_oidc_azure(cfg: 'Config') -> Optional[CredentialsProvider]:
         # detect Azure AD Tenant ID if it's not specified directly
         token_endpoint = cfg.oidc_endpoints.token_endpoint
         cfg.azure_tenant_id = token_endpoint.replace(aad_endpoint, '').split('/')[0]
-    inner = ClientCredentials(client_id=cfg.azure_client_id,
-                              client_secret="", # we have no (rotatable) secrets in OIDC flow
-                              token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
-                              endpoint_params=params,
-                              use_params=True)
+    inner = ClientCredentials(
+        client_id=cfg.azure_client_id,
+        client_secret="", # we have no (rotatable) secrets in OIDC flow
+        token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
+        endpoint_params=params,
+        use_params=True)
 
     def refreshed_headers() -> Dict[str, str]:
         token = inner.token()
@@ -666,12 +706,18 @@ def __init__(self, cfg: 'Config'):
         self.host = cfg.host
 
     def refresh(self) -> Token:
-        resp = requests.get(self.url,
-                            timeout=self._metadata_service_timeout,
-                            headers={
-                                self.METADATA_SERVICE_VERSION_HEADER: self.METADATA_SERVICE_VERSION,
-                                self.METADATA_SERVICE_HOST_HEADER: self.host
-                            })
+        resp = requests.get(
+            self.url,
+            timeout=self._metadata_service_timeout,
+            headers={
+                self.METADATA_SERVICE_VERSION_HEADER: self.METADATA_SERVICE_VERSION,
+                self.METADATA_SERVICE_HOST_HEADER: self.host
+            },
+            proxies={
+                # Explicitly exclude localhost from being proxied. This is necessary
+                # for Metadata URLs which typically point to localhost.
+                "no_proxy": "localhost,127.0.0.1"
+            })
         json_resp: dict[str, Union[str, float]] = resp.json()
         access_token = json_resp.get("access_token", None)
         if access_token is None:
@@ -707,6 +753,7 @@ def inner() -> Dict[str, str]:
 # This Code is derived from Mlflow DatabricksModelServingConfigProvider
 # https://github.com/mlflow/mlflow/blob/1219e3ef1aac7d337a618a352cd859b336cf5c81/mlflow/legacy_databricks_cli/configure/provider.py#L332
 class ModelServingAuthProvider():
+<<<<<<< HEAD
     _MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH = "/var/credentials-secret/model-dependencies-oauth-token"
 
     def __init__(self):
@@ -715,6 +762,19 @@ def __init__(self):
         self.refresh_duration = 300 # 300 Seconds
 
     def should_fetch_model_serving_environment_oauth(self) -> bool:
+=======
+    USER_CREDENTIALS = "user_credentials"
+
+    _MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH = "/var/credentials-secret/model-dependencies-oauth-token"
+
+    def __init__(self, credential_type: Optional[str]):
+        self.expiry_time = -1
+        self.current_token = None
+        self.refresh_duration = 300 # 300 Seconds
+        self.credential_type = credential_type
+
+    def should_fetch_model_serving_environment_oauth() -> bool:
+>>>>>>> upstream/main
         """
         Check whether this is the model serving environment
         Additionally check if the oauth token file path exists
@@ -723,15 +783,25 @@ def should_fetch_model_serving_environment_oauth(self) -> bool:
         is_in_model_serving_env = (os.environ.get("IS_IN_DB_MODEL_SERVING_ENV")
                                    or os.environ.get("IS_IN_DATABRICKS_MODEL_SERVING_ENV") or "false")
         return (is_in_model_serving_env == "true"
+<<<<<<< HEAD
                 and os.path.isfile(self._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH))
 
     def get_model_dependency_oauth_token(self, should_retry=True) -> str:
+=======
+                and os.path.isfile(ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH))
+
+    def _get_model_dependency_oauth_token(self, should_retry=True) -> str:
+>>>>>>> upstream/main
         # Use Cached value if it is valid
         if self.current_token is not None and self.expiry_time > time.time():
             return self.current_token
 
         try:
+<<<<<<< HEAD
             with open(self._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH) as f:
+=======
+            with open(ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH) as f:
+>>>>>>> upstream/main
                 oauth_dict = json.load(f)
                 self.current_token = oauth_dict["OAUTH_TOKEN"][0]["oauthTokenValue"]
                 self.expiry_time = time.time() + self.refresh_duration
@@ -741,20 +811,42 @@ def get_model_dependency_oauth_token(self, should_retry=True) -> str:
                 logger.warning("Unable to read oauth token on first attmept in Model Serving Environment",
                                exc_info=e)
                 time.sleep(0.5)
+<<<<<<< HEAD
                 return self.get_model_dependency_oauth_token(should_retry=False)
+=======
+                return self._get_model_dependency_oauth_token(should_retry=False)
+>>>>>>> upstream/main
             else:
                 raise RuntimeError(
                     "Unable to read OAuth credentials from the file mounted in Databricks Model Serving"
                 ) from e
         return self.current_token
 
+<<<<<<< HEAD
     def get_databricks_host_token(self) -> Optional[Tuple[str, str]]:
         if not self.should_fetch_model_serving_environment_oauth():
+=======
+    def _get_invokers_token(self):
+        current_thread = threading.current_thread()
+        thread_data = current_thread.__dict__
+        invokers_token = None
+        if "invokers_token" in thread_data:
+            invokers_token = thread_data["invokers_token"]
+
+        if invokers_token is None:
+            raise RuntimeError("Unable to read Invokers Token in Databricks Model Serving")
+
+        return invokers_token
+
+    def get_databricks_host_token(self) -> Optional[Tuple[str, str]]:
+        if not ModelServingAuthProvider.should_fetch_model_serving_environment_oauth():
+>>>>>>> upstream/main
             return None
 
         # read from DB_MODEL_SERVING_HOST_ENV_VAR if available otherwise MODEL_SERVING_HOST_ENV_VAR
         host = os.environ.get("DATABRICKS_MODEL_SERVING_HOST_URL") or os.environ.get(
             "DB_MODEL_SERVING_HOST_URL")
+<<<<<<< HEAD
         token = self.get_model_dependency_oauth_token()
 
         return (host, token)
@@ -767,6 +859,19 @@ def model_serving_auth(cfg: 'Config') -> Optional[CredentialsProvider]:
         if not model_serving_auth_provider.should_fetch_model_serving_environment_oauth():
             logger.debug("model-serving: Not in Databricks Model Serving, skipping")
             return None
+=======
+
+        if self.credential_type == ModelServingAuthProvider.USER_CREDENTIALS:
+            return (host, self._get_invokers_token())
+        else:
+            return (host, self._get_model_dependency_oauth_token())
+
+
+def model_serving_auth_visitor(cfg: 'Config',
+                               credential_type: Optional[str] = None) -> Optional[CredentialsProvider]:
+    try:
+        model_serving_auth_provider = ModelServingAuthProvider(credential_type)
+>>>>>>> upstream/main
         host, token = model_serving_auth_provider.get_databricks_host_token()
         if token is None:
             raise ValueError(
@@ -777,7 +882,10 @@ def model_serving_auth(cfg: 'Config') -> Optional[CredentialsProvider]:
     except Exception as e:
         logger.warning("Unable to get auth from Databricks Model Serving Environment", exc_info=e)
         return None
+<<<<<<< HEAD
 
+=======
+>>>>>>> upstream/main
     logger.info("Using Databricks Model Serving Authentication")
 
     def inner() -> Dict[str, str]:
@@ -788,6 +896,18 @@ def inner() -> Dict[str, str]:
     return inner
 
 
+<<<<<<< HEAD
+=======
+@credentials_strategy('model-serving', [])
+def model_serving_auth(cfg: 'Config') -> Optional[CredentialsProvider]:
+    if not ModelServingAuthProvider.should_fetch_model_serving_environment_oauth():
+        logger.debug("model-serving: Not in Databricks Model Serving, skipping")
+        return None
+
+    return model_serving_auth_visitor(cfg)
+
+
+>>>>>>> upstream/main
 class DefaultCredentials:
     """ Select the first applicable credential provider from the chain """
 
@@ -830,3 +950,35 @@ def __call__(self, cfg: 'Config') -> CredentialsProvider:
         raise ValueError(
             f'cannot configure default credentials, please check {auth_flow_url} to configure credentials for your preferred authentication method.'
         )
+
+
+class ModelServingUserCredentials(CredentialsStrategy):
+    """
+    This credential strategy is designed for authenticating the Databricks SDK in the model serving environment using user-specific rights. 
+    In the model serving environment, the strategy retrieves a downscoped user token from the thread-local variable. 
+    In any other environments, the class defaults to the DefaultCredentialStrategy. 
+    To use this credential strategy, instantiate the WorkspaceClient with the ModelServingUserCredentials strategy as follows:
+
+    invokers_client = WorkspaceClient(credential_strategy = ModelServingUserCredentials())
+    """
+
+    def __init__(self):
+        self.credential_type = ModelServingAuthProvider.USER_CREDENTIALS
+        self.default_credentials = DefaultCredentials()
+
+    def auth_type(self):
+        if ModelServingAuthProvider.should_fetch_model_serving_environment_oauth():
+            return "model_serving_" + self.credential_type
+        else:
+            return self.default_credentials.auth_type()
+
+    def __call__(self, cfg: 'Config') -> CredentialsProvider:
+        if ModelServingAuthProvider.should_fetch_model_serving_environment_oauth():
+            header_factory = model_serving_auth_visitor(cfg, self.credential_type)
+            if not header_factory:
+                raise ValueError(
+                    f"Unable to authenticate using {self.credential_type} in Databricks Model Serving Environment"
+                )
+            return header_factory
+        else:
+            return self.default_credentials(cfg)
diff --git a/databricks/sdk/data_plane.py b/databricks/sdk/data_plane.py
index 6f6ddf80c..5ad9b79ad 100644
--- a/databricks/sdk/data_plane.py
+++ b/databricks/sdk/data_plane.py
@@ -3,7 +3,6 @@
 from typing import Callable, List
 
 from databricks.sdk.oauth import Token
-from databricks.sdk.service.oauth2 import DataPlaneInfo
 
 
 @dataclass
@@ -19,6 +18,7 @@ class DataPlaneDetails:
 
 class DataPlaneService:
     """Helper class to fetch and manage DataPlane details."""
+    from .service.serving import DataPlaneInfo
 
     def __init__(self):
         self._data_plane_info = {}
diff --git a/databricks/sdk/mixins/files.py b/databricks/sdk/mixins/files.py
index 1e109a1a7..678b4b630 100644
--- a/databricks/sdk/mixins/files.py
+++ b/databricks/sdk/mixins/files.py
@@ -1,6 +1,7 @@
 from __future__ import annotations
 
 import base64
+import logging
 import os
 import pathlib
 import platform
@@ -8,19 +9,27 @@
 import sys
 from abc import ABC, abstractmethod
 from collections import deque
+from collections.abc import Iterator
 from io import BytesIO
 from types import TracebackType
 from typing import (TYPE_CHECKING, AnyStr, BinaryIO, Generator, Iterable,
-                    Iterator, Type, Union)
+                    Optional, Type, Union)
 from urllib import parse
 
+from requests import RequestException
+
+from .._base_client import _RawResponse, _StreamingResponse
 from .._property import _cached_property
 from ..errors import NotFound
 from ..service import files
+from ..service._internal import _escape_multi_segment_path_parameter
+from ..service.files import DownloadResponse
 
 if TYPE_CHECKING:
     from _typeshed import Self
 
+_LOG = logging.getLogger(__name__)
+
 
 class _DbfsIO(BinaryIO):
     MAX_CHUNK_SIZE = 1024 * 1024
@@ -636,3 +645,177 @@ def delete(self, path: str, *, recursive=False):
         if p.is_dir and not recursive:
             raise IOError('deleting directories requires recursive flag')
         p.delete(recursive=recursive)
+
+
+class FilesExt(files.FilesAPI):
+    __doc__ = files.FilesAPI.__doc__
+
+    def __init__(self, api_client, config: Config):
+        super().__init__(api_client)
+        self._config = config.copy()
+
+    def download(self, file_path: str) -> DownloadResponse:
+        """Download a file.
+
+        Downloads a file of any size. The file contents are the response body.
+        This is a standard HTTP file download, not a JSON RPC.
+
+        It is strongly recommended, for fault tolerance reasons,
+        to iteratively consume from the stream with a maximum read(size)
+        defined instead of using indefinite-size reads.
+
+        :param file_path: str
+          The remote path of the file, e.g. /Volumes/path/to/your/file
+
+        :returns: :class:`DownloadResponse`
+        """
+
+        initial_response: DownloadResponse = self._download_raw_stream(file_path=file_path,
+                                                                       start_byte_offset=0,
+                                                                       if_unmodified_since_timestamp=None)
+
+        wrapped_response = self._wrap_stream(file_path, initial_response)
+        initial_response.contents._response = wrapped_response
+        return initial_response
+
+    def _download_raw_stream(self,
+                             file_path: str,
+                             start_byte_offset: int,
+                             if_unmodified_since_timestamp: Optional[str] = None) -> DownloadResponse:
+        headers = {'Accept': 'application/octet-stream', }
+
+        if start_byte_offset and not if_unmodified_since_timestamp:
+            raise Exception("if_unmodified_since_timestamp is required if start_byte_offset is specified")
+
+        if start_byte_offset:
+            headers['Range'] = f'bytes={start_byte_offset}-'
+
+        if if_unmodified_since_timestamp:
+            headers['If-Unmodified-Since'] = if_unmodified_since_timestamp
+
+        response_headers = ['content-length', 'content-type', 'last-modified', ]
+        res = self._api.do('GET',
+                           f'/api/2.0/fs/files{_escape_multi_segment_path_parameter(file_path)}',
+                           headers=headers,
+                           response_headers=response_headers,
+                           raw=True)
+
+        result = DownloadResponse.from_dict(res)
+        if not isinstance(result.contents, _StreamingResponse):
+            raise Exception("Internal error: response contents is of unexpected type: " +
+                            type(result.contents).__name__)
+
+        return result
+
+    def _wrap_stream(self, file_path: str, downloadResponse: DownloadResponse):
+        underlying_response = _ResilientIterator._extract_raw_response(downloadResponse)
+        return _ResilientResponse(self,
+                                  file_path,
+                                  downloadResponse.last_modified,
+                                  offset=0,
+                                  underlying_response=underlying_response)
+
+
+class _ResilientResponse(_RawResponse):
+
+    def __init__(self, api: FilesExt, file_path: str, file_last_modified: str, offset: int,
+                 underlying_response: _RawResponse):
+        self.api = api
+        self.file_path = file_path
+        self.underlying_response = underlying_response
+        self.offset = offset
+        self.file_last_modified = file_last_modified
+
+    def iter_content(self, chunk_size=1, decode_unicode=False):
+        if decode_unicode:
+            raise ValueError('Decode unicode is not supported')
+
+        iterator = self.underlying_response.iter_content(chunk_size=chunk_size, decode_unicode=False)
+        self.iterator = _ResilientIterator(iterator, self.file_path, self.file_last_modified, self.offset,
+                                           self.api, chunk_size)
+        return self.iterator
+
+    def close(self):
+        self.iterator.close()
+
+
+class _ResilientIterator(Iterator):
+    # This class tracks current offset (returned to the client code)
+    # and recovers from failures by requesting download from the current offset.
+
+    @staticmethod
+    def _extract_raw_response(download_response: DownloadResponse) -> _RawResponse:
+        streaming_response: _StreamingResponse = download_response.contents # this is an instance of _StreamingResponse
+        return streaming_response._response
+
+    def __init__(self, underlying_iterator, file_path: str, file_last_modified: str, offset: int,
+                 api: FilesExt, chunk_size: int):
+        self._underlying_iterator = underlying_iterator
+        self._api = api
+        self._file_path = file_path
+
+        # Absolute current offset (0-based), i.e. number of bytes from the beginning of the file
+        # that were so far returned to the caller code.
+        self._offset = offset
+        self._file_last_modified = file_last_modified
+        self._chunk_size = chunk_size
+
+        self._total_recovers_count: int = 0
+        self._recovers_without_progressing_count: int = 0
+        self._closed: bool = False
+
+    def _should_recover(self) -> bool:
+        if self._total_recovers_count == self._api._config.files_api_client_download_max_total_recovers:
+            _LOG.debug("Total recovers limit exceeded")
+            return False
+        if self._api._config.files_api_client_download_max_total_recovers_without_progressing is not None and self._recovers_without_progressing_count >= self._api._config.files_api_client_download_max_total_recovers_without_progressing:
+            _LOG.debug("No progression recovers limit exceeded")
+            return False
+        return True
+
+    def _recover(self) -> bool:
+        if not self._should_recover():
+            return False # recover suppressed, rethrow original exception
+
+        self._total_recovers_count += 1
+        self._recovers_without_progressing_count += 1
+
+        try:
+            self._underlying_iterator.close()
+
+            _LOG.debug("Trying to recover from offset " + str(self._offset))
+
+            # following call includes all the required network retries
+            downloadResponse = self._api._download_raw_stream(self._file_path, self._offset,
+                                                              self._file_last_modified)
+            underlying_response = _ResilientIterator._extract_raw_response(downloadResponse)
+            self._underlying_iterator = underlying_response.iter_content(chunk_size=self._chunk_size,
+                                                                         decode_unicode=False)
+            _LOG.debug("Recover succeeded")
+            return True
+        except:
+            return False # recover failed, rethrow original exception
+
+    def __next__(self):
+        if self._closed:
+            # following _BaseClient
+            raise ValueError("I/O operation on closed file")
+
+        while True:
+            try:
+                returned_bytes = next(self._underlying_iterator)
+                self._offset += len(returned_bytes)
+                self._recovers_without_progressing_count = 0
+                return returned_bytes
+
+            except StopIteration:
+                raise
+
+            # https://requests.readthedocs.io/en/latest/user/quickstart/#errors-and-exceptions
+            except RequestException:
+                if not self._recover():
+                    raise
+
+    def close(self):
+        self._underlying_iterator.close()
+        self._closed = True
diff --git a/databricks/sdk/mixins/jobs.py b/databricks/sdk/mixins/jobs.py
new file mode 100644
index 000000000..d5e2a1728
--- /dev/null
+++ b/databricks/sdk/mixins/jobs.py
@@ -0,0 +1,84 @@
+from typing import Optional
+
+from databricks.sdk.service import jobs
+from databricks.sdk.service.jobs import Job
+
+
+class JobsExt(jobs.JobsAPI):
+
+    def get_run(self,
+                run_id: int,
+                *,
+                include_history: Optional[bool] = None,
+                include_resolved_values: Optional[bool] = None,
+                page_token: Optional[str] = None) -> jobs.Run:
+        """Get a single job run.
+
+        Retrieve the metadata of a run. If a run has multiple pages of tasks, it will paginate through all pages of tasks, iterations, job_clusters, job_parameters, and repair history.
+
+        :param run_id: int
+          The canonical identifier of the run for which to retrieve the metadata. This field is required.
+        :param include_history: bool (optional)
+          Whether to include the repair history in the response.
+        :param include_resolved_values: bool (optional)
+          Whether to include resolved parameter values in the response.
+        :param page_token: str (optional)
+          To list the next page of job tasks, set this field to the value of the `next_page_token` returned in
+          the GetJob response.
+
+        :returns: :class:`Run`
+        """
+        run = super().get_run(run_id,
+                              include_history=include_history,
+                              include_resolved_values=include_resolved_values,
+                              page_token=page_token)
+
+        # When querying a Job run, a page token is returned when there are more than 100 tasks. No iterations are defined for a Job run. Therefore, the next page in the response only includes the next page of tasks.
+        # When querying a ForEach task run, a page token is returned when there are more than 100 iterations. Only a single task is returned, corresponding to the ForEach task itself. Therefore, the client only reads the iterations from the next page and not the tasks.
+        is_paginating_iterations = run.iterations is not None and len(run.iterations) > 0
+
+        # runs/get response includes next_page_token as long as there are more pages to fetch.
+        while run.next_page_token is not None:
+            next_run = super().get_run(run_id,
+                                       include_history=include_history,
+                                       include_resolved_values=include_resolved_values,
+                                       page_token=run.next_page_token)
+            if is_paginating_iterations:
+                run.iterations.extend(next_run.iterations)
+            else:
+                run.tasks.extend(next_run.tasks)
+            # Each new page of runs/get response includes the next page of the job_clusters, job_parameters, and repair history.
+            run.job_clusters.extend(next_run.job_clusters)
+            run.job_parameters.extend(next_run.job_parameters)
+            run.repair_history.extend(next_run.repair_history)
+            run.next_page_token = next_run.next_page_token
+
+        return run
+
+    def get(self, job_id: int, *, page_token: Optional[str] = None) -> Job:
+        """Get a single job.
+
+        Retrieves the details for a single job. If the job has multiple pages of tasks, job_clusters, parameters or environments,
+        it will paginate through all pages and aggregate the results.
+
+        :param job_id: int
+          The canonical identifier of the job to retrieve information about. This field is required.
+        :param page_token: str (optional)
+          Use `next_page_token` returned from the previous GetJob to request the next page of the job's
+          sub-resources.
+
+        :returns: :class:`Job`
+        """
+        job = super().get(job_id, page_token=page_token)
+
+        # jobs/get response includes next_page_token as long as there are more pages to fetch.
+        while job.next_page_token is not None:
+            next_job = super().get(job_id, page_token=job.next_page_token)
+            # Each new page of jobs/get response includes the next page of the tasks, job_clusters, job_parameters, and environments.
+            job.settings.tasks.extend(next_job.settings.tasks)
+            job.settings.job_clusters.extend(next_job.settings.job_clusters)
+            job.settings.parameters.extend(next_job.settings.parameters)
+            job.settings.environments.extend(next_job.settings.environments)
+            job.next_page_token = next_job.next_page_token
+
+        return job
\ No newline at end of file
diff --git a/databricks/sdk/mixins/open_ai_client.py b/databricks/sdk/mixins/open_ai_client.py
index f7a8af02d..e5bea9607 100644
--- a/databricks/sdk/mixins/open_ai_client.py
+++ b/databricks/sdk/mixins/open_ai_client.py
@@ -1,4 +1,10 @@
-from databricks.sdk.service.serving import ServingEndpointsAPI
+import json as js
+from typing import Dict, Optional
+
+from requests import Response
+
+from databricks.sdk.service.serving import (ExternalFunctionRequestHttpMethod,
+                                            ServingEndpointsAPI)
 
 
 class ServingEndpointsExt(ServingEndpointsAPI):
@@ -29,7 +35,7 @@ def get_open_ai_client(self):
             from openai import OpenAI
         except Exception:
             raise ImportError(
-                "Open AI is not installed. Please install the Databricks SDK with the following command `pip isntall databricks-sdk[openai]`"
+                "Open AI is not installed. Please install the Databricks SDK with the following command `pip install databricks-sdk[openai]`"
             )
 
         return OpenAI(
@@ -42,7 +48,7 @@ def get_langchain_chat_open_ai_client(self, model):
             from langchain_openai import ChatOpenAI
         except Exception:
             raise ImportError(
-                "Langchain Open AI is not installed. Please install the Databricks SDK with the following command `pip isntall databricks-sdk[openai]` and ensure you are using python>3.7"
+                "Langchain Open AI is not installed. Please install the Databricks SDK with the following command `pip install databricks-sdk[openai]` and ensure you are using python>3.7"
             )
 
         return ChatOpenAI(
@@ -50,3 +56,51 @@ def get_langchain_chat_open_ai_client(self, model):
             openai_api_base=self._api._cfg.host + "/serving-endpoints",
             api_key="no-token", # Passing in a placeholder to pass validations, this will not be used
             http_client=self._get_authorized_http_client())
+
+    def http_request(self,
+                     conn: str,
+                     method: ExternalFunctionRequestHttpMethod,
+                     path: str,
+                     *,
+                     headers: Optional[Dict[str, str]] = None,
+                     json: Optional[Dict[str, str]] = None,
+                     params: Optional[Dict[str, str]] = None) -> Response:
+        """Make external services call using the credentials stored in UC Connection.
+        **NOTE:** Experimental: This API may change or be removed in a future release without warning.
+        :param conn: str
+          The connection name to use. This is required to identify the external connection.
+        :param method: :class:`ExternalFunctionRequestHttpMethod`
+          The HTTP method to use (e.g., 'GET', 'POST'). This is required.
+        :param path: str
+          The relative path for the API endpoint. This is required.
+        :param headers: Dict[str,str] (optional)
+          Additional headers for the request. If not provided, only auth headers from connections would be
+          passed.
+        :param json: Dict[str,str] (optional)
+          JSON payload for the request.
+        :param params: Dict[str,str] (optional)
+          Query parameters for the request.
+        :returns: :class:`Response`
+        """
+        response = Response()
+        response.status_code = 200
+        server_response = super().http_request(connection_name=conn,
+                                               method=method,
+                                               path=path,
+                                               headers=js.dumps(headers) if headers is not None else None,
+                                               json=js.dumps(json) if json is not None else None,
+                                               params=js.dumps(params) if params is not None else None)
+
+        # Read the content from the HttpRequestResponse object
+        if hasattr(server_response, "contents") and hasattr(server_response.contents, "read"):
+            raw_content = server_response.contents.read() # Read the bytes
+        else:
+            raise ValueError("Invalid response from the server.")
+
+        # Set the raw content
+        if isinstance(raw_content, bytes):
+            response._content = raw_content
+        else:
+            raise ValueError("Contents must be bytes.")
+
+        return response
diff --git a/databricks/sdk/retries.py b/databricks/sdk/retries.py
index b98c54281..4f55087ea 100644
--- a/databricks/sdk/retries.py
+++ b/databricks/sdk/retries.py
@@ -13,7 +13,8 @@ def retried(*,
             on: Sequence[Type[BaseException]] = None,
             is_retryable: Callable[[BaseException], Optional[str]] = None,
             timeout=timedelta(minutes=20),
-            clock: Clock = None):
+            clock: Clock = None,
+            before_retry: Callable = None):
     has_allowlist = on is not None
     has_callback = is_retryable is not None
     if not (has_allowlist or has_callback) or (has_allowlist and has_callback):
@@ -54,6 +55,9 @@ def wrapper(*args, **kwargs):
                         raise err
 
                     logger.debug(f'Retrying: {retry_reason} (sleeping ~{sleep}s)')
+                    if before_retry:
+                        before_retry()
+
                     clock.sleep(sleep + random())
                     attempt += 1
             raise TimeoutError(f'Timed out after {timeout}') from last_err
diff --git a/databricks/sdk/service/apps.py b/databricks/sdk/service/apps.py
index 52796d0e8..d15a6bef2 100755
--- a/databricks/sdk/service/apps.py
+++ b/databricks/sdk/service/apps.py
@@ -45,6 +45,9 @@ class App:
     description: Optional[str] = None
     """The description of the app."""
 
+    id: Optional[str] = None
+    """The unique identifier of the app."""
+
     pending_deployment: Optional[AppDeployment] = None
     """The pending deployment of the app. A deployment is considered pending when it is being prepared
     for deployment to the app compute."""
@@ -52,6 +55,8 @@ class App:
     resources: Optional[List[AppResource]] = None
     """Resources for the app."""
 
+    service_principal_client_id: Optional[str] = None
+
     service_principal_id: Optional[int] = None
 
     service_principal_name: Optional[str] = None
@@ -76,9 +81,37 @@ def as_dict(self) -> dict:
         if self.default_source_code_path is not None:
             body['default_source_code_path'] = self.default_source_code_path
         if self.description is not None: body['description'] = self.description
+        if self.id is not None: body['id'] = self.id
         if self.name is not None: body['name'] = self.name
         if self.pending_deployment: body['pending_deployment'] = self.pending_deployment.as_dict()
         if self.resources: body['resources'] = [v.as_dict() for v in self.resources]
+        if self.service_principal_client_id is not None:
+            body['service_principal_client_id'] = self.service_principal_client_id
+        if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.updater is not None: body['updater'] = self.updater
+        if self.url is not None: body['url'] = self.url
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the App into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.active_deployment: body['active_deployment'] = self.active_deployment
+        if self.app_status: body['app_status'] = self.app_status
+        if self.compute_status: body['compute_status'] = self.compute_status
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.creator is not None: body['creator'] = self.creator
+        if self.default_source_code_path is not None:
+            body['default_source_code_path'] = self.default_source_code_path
+        if self.description is not None: body['description'] = self.description
+        if self.id is not None: body['id'] = self.id
+        if self.name is not None: body['name'] = self.name
+        if self.pending_deployment: body['pending_deployment'] = self.pending_deployment
+        if self.resources: body['resources'] = self.resources
+        if self.service_principal_client_id is not None:
+            body['service_principal_client_id'] = self.service_principal_client_id
         if self.service_principal_id is not None: body['service_principal_id'] = self.service_principal_id
         if self.service_principal_name is not None:
             body['service_principal_name'] = self.service_principal_name
@@ -97,9 +130,11 @@ def from_dict(cls, d: Dict[str, any]) -> App:
                    creator=d.get('creator', None),
                    default_source_code_path=d.get('default_source_code_path', None),
                    description=d.get('description', None),
+                   id=d.get('id', None),
                    name=d.get('name', None),
                    pending_deployment=_from_dict(d, 'pending_deployment', AppDeployment),
                    resources=_repeated_dict(d, 'resources', AppResource),
+                   service_principal_client_id=d.get('service_principal_client_id', None),
                    service_principal_id=d.get('service_principal_id', None),
                    service_principal_name=d.get('service_principal_name', None),
                    update_time=d.get('update_time', None),
@@ -131,6 +166,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppAccessControlRequest:
         """Deserializes the AppAccessControlRequest from a dictionary."""
@@ -168,6 +213,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppAccessControlResponse:
         """Deserializes the AppAccessControlResponse from a dictionary."""
@@ -221,6 +277,19 @@ def as_dict(self) -> dict:
         if self.update_time is not None: body['update_time'] = self.update_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppDeployment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.creator is not None: body['creator'] = self.creator
+        if self.deployment_artifacts: body['deployment_artifacts'] = self.deployment_artifacts
+        if self.deployment_id is not None: body['deployment_id'] = self.deployment_id
+        if self.mode is not None: body['mode'] = self.mode
+        if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
+        if self.status: body['status'] = self.status
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppDeployment:
         """Deserializes the AppDeployment from a dictionary."""
@@ -245,6 +314,12 @@ def as_dict(self) -> dict:
         if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppDeploymentArtifacts into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppDeploymentArtifacts:
         """Deserializes the AppDeploymentArtifacts from a dictionary."""
@@ -280,6 +355,13 @@ def as_dict(self) -> dict:
         if self.state is not None: body['state'] = self.state.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppDeploymentStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        if self.state is not None: body['state'] = self.state
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppDeploymentStatus:
         """Deserializes the AppDeploymentStatus from a dictionary."""
@@ -303,6 +385,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppPermission:
         """Deserializes the AppPermission from a dictionary."""
@@ -335,6 +425,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppPermissions:
         """Deserializes the AppPermissions from a dictionary."""
@@ -357,6 +455,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppPermissionsDescription:
         """Deserializes the AppPermissionsDescription from a dictionary."""
@@ -379,6 +484,13 @@ def as_dict(self) -> dict:
         if self.app_name is not None: body['app_name'] = self.app_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.app_name is not None: body['app_name'] = self.app_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppPermissionsRequest:
         """Deserializes the AppPermissionsRequest from a dictionary."""
@@ -413,6 +525,17 @@ def as_dict(self) -> dict:
         if self.sql_warehouse: body['sql_warehouse'] = self.sql_warehouse.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppResource into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.job: body['job'] = self.job
+        if self.name is not None: body['name'] = self.name
+        if self.secret: body['secret'] = self.secret
+        if self.serving_endpoint: body['serving_endpoint'] = self.serving_endpoint
+        if self.sql_warehouse: body['sql_warehouse'] = self.sql_warehouse
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppResource:
         """Deserializes the AppResource from a dictionary."""
@@ -440,6 +563,13 @@ def as_dict(self) -> dict:
         if self.permission is not None: body['permission'] = self.permission.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppResourceJob into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.permission is not None: body['permission'] = self.permission
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppResourceJob:
         """Deserializes the AppResourceJob from a dictionary."""
@@ -474,6 +604,14 @@ def as_dict(self) -> dict:
         if self.scope is not None: body['scope'] = self.scope
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppResourceSecret into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.permission is not None: body['permission'] = self.permission
+        if self.scope is not None: body['scope'] = self.scope
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppResourceSecret:
         """Deserializes the AppResourceSecret from a dictionary."""
@@ -506,6 +644,13 @@ def as_dict(self) -> dict:
         if self.permission is not None: body['permission'] = self.permission.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppResourceServingEndpoint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.permission is not None: body['permission'] = self.permission
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppResourceServingEndpoint:
         """Deserializes the AppResourceServingEndpoint from a dictionary."""
@@ -536,6 +681,13 @@ def as_dict(self) -> dict:
         if self.permission is not None: body['permission'] = self.permission.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AppResourceSqlWarehouse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.permission is not None: body['permission'] = self.permission
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AppResourceSqlWarehouse:
         """Deserializes the AppResourceSqlWarehouse from a dictionary."""
@@ -573,6 +725,13 @@ def as_dict(self) -> dict:
         if self.state is not None: body['state'] = self.state.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ApplicationStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        if self.state is not None: body['state'] = self.state
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ApplicationStatus:
         """Deserializes the ApplicationStatus from a dictionary."""
@@ -605,74 +764,17 @@ def as_dict(self) -> dict:
         if self.state is not None: body['state'] = self.state.value
         return body
 
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ComputeStatus:
-        """Deserializes the ComputeStatus from a dictionary."""
-        return cls(message=d.get('message', None), state=_enum(d, 'state', ComputeState))
-
-
-@dataclass
-class CreateAppDeploymentRequest:
-    app_name: Optional[str] = None
-    """The name of the app."""
-
-    deployment_id: Optional[str] = None
-    """The unique id of the deployment."""
-
-    mode: Optional[AppDeploymentMode] = None
-    """The mode of which the deployment will manage the source code."""
-
-    source_code_path: Optional[str] = None
-    """The workspace file system path of the source code used to create the app deployment. This is
-    different from `deployment_artifacts.source_code_path`, which is the path used by the deployed
-    app. The former refers to the original source code location of the app in the workspace during
-    deployment creation, whereas the latter provides a system generated stable snapshotted source
-    code path used by the deployment."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CreateAppDeploymentRequest into a dictionary suitable for use as a JSON request body."""
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ComputeStatus into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.app_name is not None: body['app_name'] = self.app_name
-        if self.deployment_id is not None: body['deployment_id'] = self.deployment_id
-        if self.mode is not None: body['mode'] = self.mode.value
-        if self.source_code_path is not None: body['source_code_path'] = self.source_code_path
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateAppDeploymentRequest:
-        """Deserializes the CreateAppDeploymentRequest from a dictionary."""
-        return cls(app_name=d.get('app_name', None),
-                   deployment_id=d.get('deployment_id', None),
-                   mode=_enum(d, 'mode', AppDeploymentMode),
-                   source_code_path=d.get('source_code_path', None))
-
-
-@dataclass
-class CreateAppRequest:
-    name: str
-    """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens.
-    It must be unique within the workspace."""
-
-    description: Optional[str] = None
-    """The description of the app."""
-
-    resources: Optional[List[AppResource]] = None
-    """Resources for the app."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CreateAppRequest into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        if self.resources: body['resources'] = [v.as_dict() for v in self.resources]
+        if self.message is not None: body['message'] = self.message
+        if self.state is not None: body['state'] = self.state
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateAppRequest:
-        """Deserializes the CreateAppRequest from a dictionary."""
-        return cls(description=d.get('description', None),
-                   name=d.get('name', None),
-                   resources=_repeated_dict(d, 'resources', AppResource))
+    def from_dict(cls, d: Dict[str, any]) -> ComputeStatus:
+        """Deserializes the ComputeStatus from a dictionary."""
+        return cls(message=d.get('message', None), state=_enum(d, 'state', ComputeState))
 
 
 @dataclass
@@ -686,6 +788,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetAppPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetAppPermissionLevelsResponse:
         """Deserializes the GetAppPermissionLevelsResponse from a dictionary."""
@@ -707,6 +815,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAppDeploymentsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.app_deployments: body['app_deployments'] = self.app_deployments
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAppDeploymentsResponse:
         """Deserializes the ListAppDeploymentsResponse from a dictionary."""
@@ -728,6 +843,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAppsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apps: body['apps'] = self.apps
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAppsResponse:
         """Deserializes the ListAppsResponse from a dictionary."""
@@ -746,34 +868,6 @@ class StopAppRequest:
     """The name of the app."""
 
 
-@dataclass
-class UpdateAppRequest:
-    name: str
-    """The name of the app. The name must contain only lowercase alphanumeric characters and hyphens.
-    It must be unique within the workspace."""
-
-    description: Optional[str] = None
-    """The description of the app."""
-
-    resources: Optional[List[AppResource]] = None
-    """Resources for the app."""
-
-    def as_dict(self) -> dict:
-        """Serializes the UpdateAppRequest into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.description is not None: body['description'] = self.description
-        if self.name is not None: body['name'] = self.name
-        if self.resources: body['resources'] = [v.as_dict() for v in self.resources]
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateAppRequest:
-        """Deserializes the UpdateAppRequest from a dictionary."""
-        return cls(description=d.get('description', None),
-                   name=d.get('name', None),
-                   resources=_repeated_dict(d, 'resources', AppResource))
-
-
 class AppsAPI:
     """Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend
     Databricks services, and enable users to interact through single sign-on."""
@@ -813,29 +907,31 @@ def wait_get_app_active(self,
             attempt += 1
         raise TimeoutError(f'timed out after {timeout}: {status_message}')
 
-    def wait_get_app_stopped(self,
-                             name: str,
-                             timeout=timedelta(minutes=20),
-                             callback: Optional[Callable[[App], None]] = None) -> App:
+    def wait_get_deployment_app_succeeded(
+            self,
+            app_name: str,
+            deployment_id: str,
+            timeout=timedelta(minutes=20),
+            callback: Optional[Callable[[AppDeployment], None]] = None) -> AppDeployment:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (ComputeState.STOPPED, )
-        failure_states = (ComputeState.ERROR, )
+        target_states = (AppDeploymentState.SUCCEEDED, )
+        failure_states = (AppDeploymentState.FAILED, )
         status_message = 'polling...'
         attempt = 1
         while time.time() < deadline:
-            poll = self.get(name=name)
-            status = poll.compute_status.state
+            poll = self.get_deployment(app_name=app_name, deployment_id=deployment_id)
+            status = poll.status.state
             status_message = f'current status: {status}'
-            if poll.compute_status:
-                status_message = poll.compute_status.message
+            if poll.status:
+                status_message = poll.status.message
             if status in target_states:
                 return poll
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach STOPPED, got {status}: {status_message}'
+                msg = f'failed to reach SUCCEEDED, got {status}: {status_message}'
                 raise OperationFailed(msg)
-            prefix = f"name={name}"
+            prefix = f"app_name={app_name}, deployment_id={deployment_id}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
@@ -845,31 +941,29 @@ def wait_get_app_stopped(self,
             attempt += 1
         raise TimeoutError(f'timed out after {timeout}: {status_message}')
 
-    def wait_get_deployment_app_succeeded(
-            self,
-            app_name: str,
-            deployment_id: str,
-            timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[AppDeployment], None]] = None) -> AppDeployment:
+    def wait_get_app_stopped(self,
+                             name: str,
+                             timeout=timedelta(minutes=20),
+                             callback: Optional[Callable[[App], None]] = None) -> App:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (AppDeploymentState.SUCCEEDED, )
-        failure_states = (AppDeploymentState.FAILED, )
+        target_states = (ComputeState.STOPPED, )
+        failure_states = (ComputeState.ERROR, )
         status_message = 'polling...'
         attempt = 1
         while time.time() < deadline:
-            poll = self.get_deployment(app_name=app_name, deployment_id=deployment_id)
-            status = poll.status.state
+            poll = self.get(name=name)
+            status = poll.compute_status.state
             status_message = f'current status: {status}'
-            if poll.status:
-                status_message = poll.status.message
+            if poll.compute_status:
+                status_message = poll.compute_status.message
             if status in target_states:
                 return poll
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach SUCCEEDED, got {status}: {status_message}'
+                msg = f'failed to reach STOPPED, got {status}: {status_message}'
                 raise OperationFailed(msg)
-            prefix = f"app_name={app_name}, deployment_id={deployment_id}"
+            prefix = f"name={name}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
@@ -879,43 +973,33 @@ def wait_get_deployment_app_succeeded(
             attempt += 1
         raise TimeoutError(f'timed out after {timeout}: {status_message}')
 
-    def create(self,
-               name: str,
-               *,
-               description: Optional[str] = None,
-               resources: Optional[List[AppResource]] = None) -> Wait[App]:
+    def create(self, *, app: Optional[App] = None, no_compute: Optional[bool] = None) -> Wait[App]:
         """Create an app.
         
         Creates a new app.
         
-        :param name: str
-          The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
-          must be unique within the workspace.
-        :param description: str (optional)
-          The description of the app.
-        :param resources: List[:class:`AppResource`] (optional)
-          Resources for the app.
+        :param app: :class:`App` (optional)
+        :param no_compute: bool (optional)
+          If true, the app will not be started after creation.
         
         :returns:
           Long-running operation waiter for :class:`App`.
           See :method:wait_get_app_active for more details.
         """
-        body = {}
-        if description is not None: body['description'] = description
-        if name is not None: body['name'] = name
-        if resources is not None: body['resources'] = [v.as_dict() for v in resources]
+        body = app.as_dict()
+        query = {}
+        if no_compute is not None: query['no_compute'] = no_compute
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        op_response = self._api.do('POST', '/api/2.0/apps', body=body, headers=headers)
+        op_response = self._api.do('POST', '/api/2.0/apps', query=query, body=body, headers=headers)
         return Wait(self.wait_get_app_active, response=App.from_dict(op_response), name=op_response['name'])
 
     def create_and_wait(self,
-                        name: str,
                         *,
-                        description: Optional[str] = None,
-                        resources: Optional[List[AppResource]] = None,
+                        app: Optional[App] = None,
+                        no_compute: Optional[bool] = None,
                         timeout=timedelta(minutes=20)) -> App:
-        return self.create(description=description, name=name, resources=resources).result(timeout=timeout)
+        return self.create(app=app, no_compute=no_compute).result(timeout=timeout)
 
     def delete(self, name: str) -> App:
         """Delete an app.
@@ -933,37 +1017,20 @@ def delete(self, name: str) -> App:
         res = self._api.do('DELETE', f'/api/2.0/apps/{name}', headers=headers)
         return App.from_dict(res)
 
-    def deploy(self,
-               app_name: str,
-               *,
-               deployment_id: Optional[str] = None,
-               mode: Optional[AppDeploymentMode] = None,
-               source_code_path: Optional[str] = None) -> Wait[AppDeployment]:
+    def deploy(self, app_name: str, *, app_deployment: Optional[AppDeployment] = None) -> Wait[AppDeployment]:
         """Create an app deployment.
         
         Creates an app deployment for the app with the supplied name.
         
         :param app_name: str
           The name of the app.
-        :param deployment_id: str (optional)
-          The unique id of the deployment.
-        :param mode: :class:`AppDeploymentMode` (optional)
-          The mode of which the deployment will manage the source code.
-        :param source_code_path: str (optional)
-          The workspace file system path of the source code used to create the app deployment. This is
-          different from `deployment_artifacts.source_code_path`, which is the path used by the deployed app.
-          The former refers to the original source code location of the app in the workspace during deployment
-          creation, whereas the latter provides a system generated stable snapshotted source code path used by
-          the deployment.
+        :param app_deployment: :class:`AppDeployment` (optional)
         
         :returns:
           Long-running operation waiter for :class:`AppDeployment`.
           See :method:wait_get_deployment_app_succeeded for more details.
         """
-        body = {}
-        if deployment_id is not None: body['deployment_id'] = deployment_id
-        if mode is not None: body['mode'] = mode.value
-        if source_code_path is not None: body['source_code_path'] = source_code_path
+        body = app_deployment.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         op_response = self._api.do('POST',
@@ -975,18 +1042,12 @@ def deploy(self,
                     app_name=app_name,
                     deployment_id=op_response['deployment_id'])
 
-    def deploy_and_wait(
-        self,
-        app_name: str,
-        *,
-        deployment_id: Optional[str] = None,
-        mode: Optional[AppDeploymentMode] = None,
-        source_code_path: Optional[str] = None,
-        timeout=timedelta(minutes=20)) -> AppDeployment:
-        return self.deploy(app_name=app_name,
-                           deployment_id=deployment_id,
-                           mode=mode,
-                           source_code_path=source_code_path).result(timeout=timeout)
+    def deploy_and_wait(self,
+                        app_name: str,
+                        *,
+                        app_deployment: Optional[AppDeployment] = None,
+                        timeout=timedelta(minutes=20)) -> AppDeployment:
+        return self.deploy(app_deployment=app_deployment, app_name=app_name).result(timeout=timeout)
 
     def get(self, name: str) -> App:
         """Get an app.
@@ -1121,7 +1182,8 @@ def set_permissions(
             access_control_list: Optional[List[AppAccessControlRequest]] = None) -> AppPermissions:
         """Set app permissions.
         
-        Sets permissions on an app. Apps can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param app_name: str
           The app for which to get or manage permissions.
@@ -1179,11 +1241,7 @@ def stop(self, name: str) -> Wait[App]:
     def stop_and_wait(self, name: str, timeout=timedelta(minutes=20)) -> App:
         return self.stop(name=name).result(timeout=timeout)
 
-    def update(self,
-               name: str,
-               *,
-               description: Optional[str] = None,
-               resources: Optional[List[AppResource]] = None) -> App:
+    def update(self, name: str, *, app: Optional[App] = None) -> App:
         """Update an app.
         
         Updates the app with the supplied name.
@@ -1191,16 +1249,11 @@ def update(self,
         :param name: str
           The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
           must be unique within the workspace.
-        :param description: str (optional)
-          The description of the app.
-        :param resources: List[:class:`AppResource`] (optional)
-          Resources for the app.
+        :param app: :class:`App` (optional)
         
         :returns: :class:`App`
         """
-        body = {}
-        if description is not None: body['description'] = description
-        if resources is not None: body['resources'] = [v.as_dict() for v in resources]
+        body = app.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('PATCH', f'/api/2.0/apps/{name}', body=body, headers=headers)
diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py
index cfb7ba0b4..e23e676fe 100755
--- a/databricks/sdk/service/billing.py
+++ b/databricks/sdk/service/billing.py
@@ -11,6 +11,8 @@
 
 _LOG = logging.getLogger('databricks.sdk')
 
+from databricks.sdk.service import compute
+
 # all definitions in this file are in alphabetical order
 
 
@@ -34,6 +36,15 @@ def as_dict(self) -> dict:
         if self.target is not None: body['target'] = self.target
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ActionConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.action_configuration_id is not None:
+            body['action_configuration_id'] = self.action_configuration_id
+        if self.action_type is not None: body['action_type'] = self.action_type
+        if self.target is not None: body['target'] = self.target
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ActionConfiguration:
         """Deserializes the ActionConfiguration from a dictionary."""
@@ -83,6 +94,18 @@ def as_dict(self) -> dict:
         if self.trigger_type is not None: body['trigger_type'] = self.trigger_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AlertConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.action_configurations: body['action_configurations'] = self.action_configurations
+        if self.alert_configuration_id is not None:
+            body['alert_configuration_id'] = self.alert_configuration_id
+        if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold
+        if self.quantity_type is not None: body['quantity_type'] = self.quantity_type
+        if self.time_period is not None: body['time_period'] = self.time_period
+        if self.trigger_type is not None: body['trigger_type'] = self.trigger_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertConfiguration:
         """Deserializes the AlertConfiguration from a dictionary."""
@@ -149,6 +172,19 @@ def as_dict(self) -> dict:
         if self.update_time is not None: body['update_time'] = self.update_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BudgetConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.alert_configurations: body['alert_configurations'] = self.alert_configurations
+        if self.budget_configuration_id is not None:
+            body['budget_configuration_id'] = self.budget_configuration_id
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.filter: body['filter'] = self.filter
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BudgetConfiguration:
         """Deserializes the BudgetConfiguration from a dictionary."""
@@ -178,6 +214,13 @@ def as_dict(self) -> dict:
         if self.workspace_id: body['workspace_id'] = self.workspace_id.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BudgetConfigurationFilter into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.tags: body['tags'] = self.tags
+        if self.workspace_id: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilter:
         """Deserializes the BudgetConfigurationFilter from a dictionary."""
@@ -198,6 +241,13 @@ def as_dict(self) -> dict:
         if self.values: body['values'] = [v for v in self.values]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BudgetConfigurationFilterClause into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.operator is not None: body['operator'] = self.operator
+        if self.values: body['values'] = self.values
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterClause:
         """Deserializes the BudgetConfigurationFilterClause from a dictionary."""
@@ -223,6 +273,13 @@ def as_dict(self) -> dict:
         if self.value: body['value'] = self.value.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BudgetConfigurationFilterTagClause into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterTagClause:
         """Deserializes the BudgetConfigurationFilterTagClause from a dictionary."""
@@ -242,6 +299,13 @@ def as_dict(self) -> dict:
         if self.values: body['values'] = [v for v in self.values]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BudgetConfigurationFilterWorkspaceIdClause into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.operator is not None: body['operator'] = self.operator
+        if self.values: body['values'] = self.values
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterWorkspaceIdClause:
         """Deserializes the BudgetConfigurationFilterWorkspaceIdClause from a dictionary."""
@@ -249,6 +313,44 @@ def from_dict(cls, d: Dict[str, any]) -> BudgetConfigurationFilterWorkspaceIdCla
                    values=d.get('values', None))
 
 
+@dataclass
+class BudgetPolicy:
+    """Contains the BudgetPolicy details."""
+
+    policy_id: str
+    """The Id of the policy. This field is generated by Databricks and globally unique."""
+
+    custom_tags: Optional[List[compute.CustomPolicyTag]] = None
+    """A list of tags defined by the customer. At most 20 entries are allowed per policy."""
+
+    policy_name: Optional[str] = None
+    """The name of the policy. - Must be unique among active policies. - Can contain only characters
+    from the ISO 8859-1 (latin1) set."""
+
+    def as_dict(self) -> dict:
+        """Serializes the BudgetPolicy into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags]
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.policy_name is not None: body['policy_name'] = self.policy_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BudgetPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.policy_name is not None: body['policy_name'] = self.policy_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> BudgetPolicy:
+        """Deserializes the BudgetPolicy from a dictionary."""
+        return cls(custom_tags=_repeated_dict(d, 'custom_tags', compute.CustomPolicyTag),
+                   policy_id=d.get('policy_id', None),
+                   policy_name=d.get('policy_name', None))
+
+
 @dataclass
 class CreateBillingUsageDashboardRequest:
     dashboard_type: Optional[UsageDashboardType] = None
@@ -265,6 +367,13 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateBillingUsageDashboardRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_type is not None: body['dashboard_type'] = self.dashboard_type
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBillingUsageDashboardRequest:
         """Deserializes the CreateBillingUsageDashboardRequest from a dictionary."""
@@ -283,6 +392,12 @@ def as_dict(self) -> dict:
         if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateBillingUsageDashboardResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBillingUsageDashboardResponse:
         """Deserializes the CreateBillingUsageDashboardResponse from a dictionary."""
@@ -316,6 +431,15 @@ def as_dict(self) -> dict:
         if self.filter: body['filter'] = self.filter.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateBudgetConfigurationBudget into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.alert_configurations: body['alert_configurations'] = self.alert_configurations
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.filter: body['filter'] = self.filter
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudget:
         """Deserializes the CreateBudgetConfigurationBudget from a dictionary."""
@@ -341,6 +465,13 @@ def as_dict(self) -> dict:
         if self.target is not None: body['target'] = self.target
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateBudgetConfigurationBudgetActionConfigurations into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.action_type is not None: body['action_type'] = self.action_type
+        if self.target is not None: body['target'] = self.target
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudgetActionConfigurations:
         """Deserializes the CreateBudgetConfigurationBudgetActionConfigurations from a dictionary."""
@@ -378,6 +509,16 @@ def as_dict(self) -> dict:
         if self.trigger_type is not None: body['trigger_type'] = self.trigger_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateBudgetConfigurationBudgetAlertConfigurations into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.action_configurations: body['action_configurations'] = self.action_configurations
+        if self.quantity_threshold is not None: body['quantity_threshold'] = self.quantity_threshold
+        if self.quantity_type is not None: body['quantity_type'] = self.quantity_type
+        if self.time_period is not None: body['time_period'] = self.time_period
+        if self.trigger_type is not None: body['trigger_type'] = self.trigger_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationBudgetAlertConfigurations:
         """Deserializes the CreateBudgetConfigurationBudgetAlertConfigurations from a dictionary."""
@@ -400,6 +541,12 @@ def as_dict(self) -> dict:
         if self.budget: body['budget'] = self.budget.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateBudgetConfigurationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.budget: body['budget'] = self.budget
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationRequest:
         """Deserializes the CreateBudgetConfigurationRequest from a dictionary."""
@@ -417,12 +564,57 @@ def as_dict(self) -> dict:
         if self.budget: body['budget'] = self.budget.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateBudgetConfigurationResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.budget: body['budget'] = self.budget
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateBudgetConfigurationResponse:
         """Deserializes the CreateBudgetConfigurationResponse from a dictionary."""
         return cls(budget=_from_dict(d, 'budget', BudgetConfiguration))
 
 
+@dataclass
+class CreateBudgetPolicyRequest:
+    """A request to create a BudgetPolicy."""
+
+    custom_tags: Optional[List[compute.CustomPolicyTag]] = None
+    """A list of tags defined by the customer. At most 40 entries are allowed per policy."""
+
+    policy_name: Optional[str] = None
+    """The name of the policy. - Must be unique among active policies. - Can contain only characters of
+    0-9, a-z, A-Z, -, =, ., :, /, @, _, +, whitespace."""
+
+    request_id: Optional[str] = None
+    """A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is
+    recommended. This request is only idempotent if a `request_id` is provided."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateBudgetPolicyRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags]
+        if self.policy_name is not None: body['policy_name'] = self.policy_name
+        if self.request_id is not None: body['request_id'] = self.request_id
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateBudgetPolicyRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.policy_name is not None: body['policy_name'] = self.policy_name
+        if self.request_id is not None: body['request_id'] = self.request_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateBudgetPolicyRequest:
+        """Deserializes the CreateBudgetPolicyRequest from a dictionary."""
+        return cls(custom_tags=_repeated_dict(d, 'custom_tags', compute.CustomPolicyTag),
+                   policy_name=d.get('policy_name', None),
+                   request_id=d.get('request_id', None))
+
+
 @dataclass
 class CreateLogDeliveryConfigurationParams:
     log_type: LogType
@@ -509,6 +701,21 @@ def as_dict(self) -> dict:
         if self.workspace_ids_filter: body['workspace_ids_filter'] = [v for v in self.workspace_ids_filter]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateLogDeliveryConfigurationParams into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.config_name is not None: body['config_name'] = self.config_name
+        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
+        if self.delivery_path_prefix is not None: body['delivery_path_prefix'] = self.delivery_path_prefix
+        if self.delivery_start_time is not None: body['delivery_start_time'] = self.delivery_start_time
+        if self.log_type is not None: body['log_type'] = self.log_type
+        if self.output_format is not None: body['output_format'] = self.output_format
+        if self.status is not None: body['status'] = self.status
+        if self.storage_configuration_id is not None:
+            body['storage_configuration_id'] = self.storage_configuration_id
+        if self.workspace_ids_filter: body['workspace_ids_filter'] = self.workspace_ids_filter
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateLogDeliveryConfigurationParams:
         """Deserializes the CreateLogDeliveryConfigurationParams from a dictionary."""
@@ -531,6 +738,30 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteBudgetConfigurationResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteBudgetConfigurationResponse:
+        """Deserializes the DeleteBudgetConfigurationResponse from a dictionary."""
+        return cls()
+
+
+@dataclass
+class DeleteResponse:
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteBudgetConfigurationResponse:
         """Deserializes the DeleteBudgetConfigurationResponse from a dictionary."""
@@ -563,12 +794,56 @@ def as_dict(self) -> dict:
         if self.contents: body['contents'] = self.contents
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DownloadResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.contents: body['contents'] = self.contents
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DownloadResponse:
         """Deserializes the DownloadResponse from a dictionary."""
         return cls(contents=d.get('contents', None))
 
 
+@dataclass
+class Filter:
+    """Structured representation of a filter to be applied to a list of policies. All specified filters
+    will be applied in conjunction."""
+
+    creator_user_id: Optional[int] = None
+    """The policy creator user id to be filtered on. If unspecified, all policies will be returned."""
+
+    creator_user_name: Optional[str] = None
+    """The policy creator user name to be filtered on. If unspecified, all policies will be returned."""
+
+    policy_name: Optional[str] = None
+    """The partial name of policies to be filtered on. If unspecified, all policies will be returned."""
+
+    def as_dict(self) -> dict:
+        """Serializes the Filter into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.creator_user_id is not None: body['creator_user_id'] = self.creator_user_id
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.policy_name is not None: body['policy_name'] = self.policy_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Filter into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creator_user_id is not None: body['creator_user_id'] = self.creator_user_id
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.policy_name is not None: body['policy_name'] = self.policy_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> Filter:
+        """Deserializes the Filter from a dictionary."""
+        return cls(creator_user_id=d.get('creator_user_id', None),
+                   creator_user_name=d.get('creator_user_name', None),
+                   policy_name=d.get('policy_name', None))
+
+
 @dataclass
 class GetBillingUsageDashboardResponse:
     dashboard_id: Optional[str] = None
@@ -584,6 +859,13 @@ def as_dict(self) -> dict:
         if self.dashboard_url is not None: body['dashboard_url'] = self.dashboard_url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetBillingUsageDashboardResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.dashboard_url is not None: body['dashboard_url'] = self.dashboard_url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetBillingUsageDashboardResponse:
         """Deserializes the GetBillingUsageDashboardResponse from a dictionary."""
@@ -600,12 +882,39 @@ def as_dict(self) -> dict:
         if self.budget: body['budget'] = self.budget.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetBudgetConfigurationResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.budget: body['budget'] = self.budget
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetBudgetConfigurationResponse:
         """Deserializes the GetBudgetConfigurationResponse from a dictionary."""
         return cls(budget=_from_dict(d, 'budget', BudgetConfiguration))
 
 
+@dataclass
+class LimitConfig:
+    """The limit configuration of the policy. Limit configuration provide a budget policy level cost
+    control by enforcing the limit."""
+
+    def as_dict(self) -> dict:
+        """Serializes the LimitConfig into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LimitConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> LimitConfig:
+        """Deserializes the LimitConfig from a dictionary."""
+        return cls()
+
+
 @dataclass
 class ListBudgetConfigurationsResponse:
     budgets: Optional[List[BudgetConfiguration]] = None
@@ -621,6 +930,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListBudgetConfigurationsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.budgets: body['budgets'] = self.budgets
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListBudgetConfigurationsResponse:
         """Deserializes the ListBudgetConfigurationsResponse from a dictionary."""
@@ -628,6 +944,44 @@ def from_dict(cls, d: Dict[str, any]) -> ListBudgetConfigurationsResponse:
                    next_page_token=d.get('next_page_token', None))
 
 
+@dataclass
+class ListBudgetPoliciesResponse:
+    """A list of policies."""
+
+    next_page_token: Optional[str] = None
+    """A token that can be sent as `page_token` to retrieve the next page. If this field is omitted,
+    there are no subsequent pages."""
+
+    policies: Optional[List[BudgetPolicy]] = None
+
+    previous_page_token: Optional[str] = None
+    """A token that can be sent as `page_token` to retrieve the previous page. In this field is
+    omitted, there are no previous pages."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListBudgetPoliciesResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.policies: body['policies'] = [v.as_dict() for v in self.policies]
+        if self.previous_page_token is not None: body['previous_page_token'] = self.previous_page_token
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListBudgetPoliciesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.policies: body['policies'] = self.policies
+        if self.previous_page_token is not None: body['previous_page_token'] = self.previous_page_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListBudgetPoliciesResponse:
+        """Deserializes the ListBudgetPoliciesResponse from a dictionary."""
+        return cls(next_page_token=d.get('next_page_token', None),
+                   policies=_repeated_dict(d, 'policies', BudgetPolicy),
+                   previous_page_token=d.get('previous_page_token', None))
+
+
 class LogDeliveryConfigStatus(Enum):
     """Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled).
     Defaults to `ENABLED`. You can [enable or disable the
@@ -744,6 +1098,26 @@ def as_dict(self) -> dict:
         if self.workspace_ids_filter: body['workspace_ids_filter'] = [v for v in self.workspace_ids_filter]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogDeliveryConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.config_id is not None: body['config_id'] = self.config_id
+        if self.config_name is not None: body['config_name'] = self.config_name
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
+        if self.delivery_path_prefix is not None: body['delivery_path_prefix'] = self.delivery_path_prefix
+        if self.delivery_start_time is not None: body['delivery_start_time'] = self.delivery_start_time
+        if self.log_delivery_status: body['log_delivery_status'] = self.log_delivery_status
+        if self.log_type is not None: body['log_type'] = self.log_type
+        if self.output_format is not None: body['output_format'] = self.output_format
+        if self.status is not None: body['status'] = self.status
+        if self.storage_configuration_id is not None:
+            body['storage_configuration_id'] = self.storage_configuration_id
+        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.workspace_ids_filter: body['workspace_ids_filter'] = self.workspace_ids_filter
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogDeliveryConfiguration:
         """Deserializes the LogDeliveryConfiguration from a dictionary."""
@@ -796,6 +1170,16 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogDeliveryStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.last_attempt_time is not None: body['last_attempt_time'] = self.last_attempt_time
+        if self.last_successful_attempt_time is not None:
+            body['last_successful_attempt_time'] = self.last_successful_attempt_time
+        if self.message is not None: body['message'] = self.message
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogDeliveryStatus:
         """Deserializes the LogDeliveryStatus from a dictionary."""
@@ -846,12 +1230,50 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PatchStatusResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PatchStatusResponse:
         """Deserializes the PatchStatusResponse from a dictionary."""
         return cls()
 
 
+@dataclass
+class SortSpec:
+    descending: Optional[bool] = None
+    """Whether to sort in descending order."""
+
+    field: Optional[SortSpecField] = None
+    """The filed to sort by"""
+
+    def as_dict(self) -> dict:
+        """Serializes the SortSpec into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.descending is not None: body['descending'] = self.descending
+        if self.field is not None: body['field'] = self.field.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SortSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.descending is not None: body['descending'] = self.descending
+        if self.field is not None: body['field'] = self.field
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> SortSpec:
+        """Deserializes the SortSpec from a dictionary."""
+        return cls(descending=d.get('descending', None), field=_enum(d, 'field', SortSpecField))
+
+
+class SortSpecField(Enum):
+
+    POLICY_NAME = 'POLICY_NAME'
+
+
 @dataclass
 class UpdateBudgetConfigurationBudget:
     account_id: Optional[str] = None
@@ -884,6 +1306,17 @@ def as_dict(self) -> dict:
         if self.filter: body['filter'] = self.filter.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateBudgetConfigurationBudget into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.alert_configurations: body['alert_configurations'] = self.alert_configurations
+        if self.budget_configuration_id is not None:
+            body['budget_configuration_id'] = self.budget_configuration_id
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.filter: body['filter'] = self.filter
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationBudget:
         """Deserializes the UpdateBudgetConfigurationBudget from a dictionary."""
@@ -909,6 +1342,13 @@ def as_dict(self) -> dict:
         if self.budget_id is not None: body['budget_id'] = self.budget_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateBudgetConfigurationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.budget: body['budget'] = self.budget
+        if self.budget_id is not None: body['budget_id'] = self.budget_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationRequest:
         """Deserializes the UpdateBudgetConfigurationRequest from a dictionary."""
@@ -927,6 +1367,12 @@ def as_dict(self) -> dict:
         if self.budget: body['budget'] = self.budget.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateBudgetConfigurationResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.budget: body['budget'] = self.budget
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateBudgetConfigurationResponse:
         """Deserializes the UpdateBudgetConfigurationResponse from a dictionary."""
@@ -952,6 +1398,14 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateLogDeliveryConfigurationStatusRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.log_delivery_configuration_id is not None:
+            body['log_delivery_configuration_id'] = self.log_delivery_configuration_id
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateLogDeliveryConfigurationStatusRequest:
         """Deserializes the UpdateLogDeliveryConfigurationStatusRequest from a dictionary."""
@@ -976,6 +1430,13 @@ def as_dict(self) -> dict:
             body['log_delivery_configuration'] = self.log_delivery_configuration.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WrappedCreateLogDeliveryConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.log_delivery_configuration:
+            body['log_delivery_configuration'] = self.log_delivery_configuration
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WrappedCreateLogDeliveryConfiguration:
         """Deserializes the WrappedCreateLogDeliveryConfiguration from a dictionary."""
@@ -994,6 +1455,13 @@ def as_dict(self) -> dict:
             body['log_delivery_configuration'] = self.log_delivery_configuration.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WrappedLogDeliveryConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.log_delivery_configuration:
+            body['log_delivery_configuration'] = self.log_delivery_configuration
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WrappedLogDeliveryConfiguration:
         """Deserializes the WrappedLogDeliveryConfiguration from a dictionary."""
@@ -1012,6 +1480,13 @@ def as_dict(self) -> dict:
             body['log_delivery_configurations'] = [v.as_dict() for v in self.log_delivery_configurations]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WrappedLogDeliveryConfigurations into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.log_delivery_configurations:
+            body['log_delivery_configurations'] = self.log_delivery_configurations
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WrappedLogDeliveryConfigurations:
         """Deserializes the WrappedLogDeliveryConfigurations from a dictionary."""
@@ -1068,6 +1543,156 @@ def download(self,
         return DownloadResponse.from_dict(res)
 
 
+class BudgetPolicyAPI:
+    """A service serves REST API about Budget policies"""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def create(self,
+               *,
+               custom_tags: Optional[List[compute.CustomPolicyTag]] = None,
+               policy_name: Optional[str] = None,
+               request_id: Optional[str] = None) -> BudgetPolicy:
+        """Create a budget policy.
+        
+        Creates a new policy.
+        
+        :param custom_tags: List[:class:`CustomPolicyTag`] (optional)
+          A list of tags defined by the customer. At most 40 entries are allowed per policy.
+        :param policy_name: str (optional)
+          The name of the policy. - Must be unique among active policies. - Can contain only characters of
+          0-9, a-z, A-Z, -, =, ., :, /, @, _, +, whitespace.
+        :param request_id: str (optional)
+          A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is
+          recommended. This request is only idempotent if a `request_id` is provided.
+        
+        :returns: :class:`BudgetPolicy`
+        """
+        body = {}
+        if custom_tags is not None: body['custom_tags'] = [v.as_dict() for v in custom_tags]
+        if policy_name is not None: body['policy_name'] = policy_name
+        if request_id is not None: body['request_id'] = request_id
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST',
+                           f'/api/2.1/accounts/{self._api.account_id}/budget-policies',
+                           body=body,
+                           headers=headers)
+        return BudgetPolicy.from_dict(res)
+
+    def delete(self, policy_id: str):
+        """Delete a budget policy.
+        
+        Deletes a policy
+        
+        :param policy_id: str
+          The Id of the policy.
+        
+        
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        self._api.do('DELETE',
+                     f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}',
+                     headers=headers)
+
+    def get(self, policy_id: str) -> BudgetPolicy:
+        """Get a budget policy.
+        
+        Retrieves a policy by it's ID.
+        
+        :param policy_id: str
+          The Id of the policy.
+        
+        :returns: :class:`BudgetPolicy`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET',
+                           f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}',
+                           headers=headers)
+        return BudgetPolicy.from_dict(res)
+
+    def list(self,
+             *,
+             filter_by: Optional[Filter] = None,
+             page_size: Optional[int] = None,
+             page_token: Optional[str] = None,
+             sort_spec: Optional[SortSpec] = None) -> Iterator[BudgetPolicy]:
+        """List policies.
+        
+        Lists all policies. Policies are returned in the alphabetically ascending order of their names.
+        
+        :param filter_by: :class:`Filter` (optional)
+          A filter to apply to the list of policies.
+        :param page_size: int (optional)
+          The maximum number of budget policies to return. If unspecified, at most 100 budget policies will be
+          returned. The maximum value is 1000; values above 1000 will be coerced to 1000.
+        :param page_token: str (optional)
+          A page token, received from a previous `ListServerlessPolicies` call. Provide this to retrieve the
+          subsequent page. If unspecified, the first page will be returned.
+          
+          When paginating, all other parameters provided to `ListServerlessPoliciesRequest` must match the
+          call that provided the page token.
+        :param sort_spec: :class:`SortSpec` (optional)
+          The sort specification.
+        
+        :returns: Iterator over :class:`BudgetPolicy`
+        """
+
+        query = {}
+        if filter_by is not None: query['filter_by'] = filter_by.as_dict()
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
+        if sort_spec is not None: query['sort_spec'] = sort_spec.as_dict()
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET',
+                                f'/api/2.1/accounts/{self._api.account_id}/budget-policies',
+                                query=query,
+                                headers=headers)
+            if 'policies' in json:
+                for v in json['policies']:
+                    yield BudgetPolicy.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+    def update(self,
+               policy_id: str,
+               *,
+               limit_config: Optional[LimitConfig] = None,
+               policy: Optional[BudgetPolicy] = None) -> BudgetPolicy:
+        """Update a budget policy.
+        
+        Updates a policy
+        
+        :param policy_id: str
+          The Id of the policy. This field is generated by Databricks and globally unique.
+        :param limit_config: :class:`LimitConfig` (optional)
+          DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy
+        :param policy: :class:`BudgetPolicy` (optional)
+          Contains the BudgetPolicy details.
+        
+        :returns: :class:`BudgetPolicy`
+        """
+        body = policy.as_dict()
+        query = {}
+        if limit_config is not None: query['limit_config'] = limit_config.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH',
+                           f'/api/2.1/accounts/{self._api.account_id}/budget-policies/{policy_id}',
+                           query=query,
+                           body=body,
+                           headers=headers)
+        return BudgetPolicy.from_dict(res)
+
+
 class BudgetsAPI:
     """These APIs manage budget configurations for this account. Budgets enable you to monitor usage across your
     account. You can set up budgets to either track account-wide spending, or apply filters to track the
@@ -1121,7 +1746,7 @@ def get(self, budget_id: str) -> GetBudgetConfigurationResponse:
         Gets a budget configuration for an account. Both account and budget configuration are specified by ID.
         
         :param budget_id: str
-          The Databricks budget configuration ID.
+          The budget configuration ID
         
         :returns: :class:`GetBudgetConfigurationResponse`
         """
diff --git a/databricks/sdk/service/catalog.py b/databricks/sdk/service/catalog.py
index b149dbbaa..83d7de4e8 100755
--- a/databricks/sdk/service/catalog.py
+++ b/databricks/sdk/service/catalog.py
@@ -3,11 +3,15 @@
 from __future__ import annotations
 
 import logging
+import random
+import time
 from dataclasses import dataclass
+from datetime import timedelta
 from enum import Enum
-from typing import Dict, Iterator, List, Optional
+from typing import Callable, Dict, Iterator, List, Optional
 
-from ._internal import _enum, _from_dict, _repeated_dict, _repeated_enum
+from ..errors import OperationFailed
+from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum
 
 _LOG = logging.getLogger('databricks.sdk')
 
@@ -24,6 +28,12 @@ def as_dict(self) -> dict:
         if self.metastore_info: body['metastore_info'] = self.metastore_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountsCreateMetastore into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metastore_info: body['metastore_info'] = self.metastore_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsCreateMetastore:
         """Deserializes the AccountsCreateMetastore from a dictionary."""
@@ -48,6 +58,14 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountsCreateMetastoreAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsCreateMetastoreAssignment:
         """Deserializes the AccountsCreateMetastoreAssignment from a dictionary."""
@@ -70,6 +88,13 @@ def as_dict(self) -> dict:
         if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountsCreateStorageCredential into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential_info: body['credential_info'] = self.credential_info
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsCreateStorageCredential:
         """Deserializes the AccountsCreateStorageCredential from a dictionary."""
@@ -87,6 +112,12 @@ def as_dict(self) -> dict:
         if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountsMetastoreAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsMetastoreAssignment:
         """Deserializes the AccountsMetastoreAssignment from a dictionary."""
@@ -103,6 +134,12 @@ def as_dict(self) -> dict:
         if self.metastore_info: body['metastore_info'] = self.metastore_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountsMetastoreInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metastore_info: body['metastore_info'] = self.metastore_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsMetastoreInfo:
         """Deserializes the AccountsMetastoreInfo from a dictionary."""
@@ -119,6 +156,12 @@ def as_dict(self) -> dict:
         if self.credential_info: body['credential_info'] = self.credential_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountsStorageCredentialInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential_info: body['credential_info'] = self.credential_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsStorageCredentialInfo:
         """Deserializes the AccountsStorageCredentialInfo from a dictionary."""
@@ -139,6 +182,13 @@ def as_dict(self) -> dict:
         if self.metastore_info: body['metastore_info'] = self.metastore_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountsUpdateMetastore into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.metastore_info: body['metastore_info'] = self.metastore_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsUpdateMetastore:
         """Deserializes the AccountsUpdateMetastore from a dictionary."""
@@ -164,6 +214,14 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountsUpdateMetastoreAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metastore_assignment: body['metastore_assignment'] = self.metastore_assignment
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsUpdateMetastoreAssignment:
         """Deserializes the AccountsUpdateMetastoreAssignment from a dictionary."""
@@ -191,6 +249,15 @@ def as_dict(self) -> dict:
             body['storage_credential_name'] = self.storage_credential_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountsUpdateStorageCredential into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential_info: body['credential_info'] = self.credential_info
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.storage_credential_name is not None:
+            body['storage_credential_name'] = self.storage_credential_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccountsUpdateStorageCredential:
         """Deserializes the AccountsUpdateStorageCredential from a dictionary."""
@@ -222,6 +289,15 @@ def as_dict(self) -> dict:
         if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ArtifactAllowlistInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.artifact_matchers: body['artifact_matchers'] = self.artifact_matchers
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ArtifactAllowlistInfo:
         """Deserializes the ArtifactAllowlistInfo from a dictionary."""
@@ -246,6 +322,13 @@ def as_dict(self) -> dict:
         if self.match_type is not None: body['match_type'] = self.match_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ArtifactMatcher into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.artifact is not None: body['artifact'] = self.artifact
+        if self.match_type is not None: body['match_type'] = self.match_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ArtifactMatcher:
         """Deserializes the ArtifactMatcher from a dictionary."""
@@ -268,6 +351,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AssignResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AssignResponse:
         """Deserializes the AssignResponse from a dictionary."""
@@ -301,6 +389,15 @@ def as_dict(self) -> dict:
         if self.session_token is not None: body['session_token'] = self.session_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AwsCredentials into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_key_id is not None: body['access_key_id'] = self.access_key_id
+        if self.access_point is not None: body['access_point'] = self.access_point
+        if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
+        if self.session_token is not None: body['session_token'] = self.session_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsCredentials:
         """Deserializes the AwsCredentials from a dictionary."""
@@ -310,6 +407,44 @@ def from_dict(cls, d: Dict[str, any]) -> AwsCredentials:
                    session_token=d.get('session_token', None))
 
 
+@dataclass
+class AwsIamRole:
+    """The AWS IAM role configuration"""
+
+    external_id: Optional[str] = None
+    """The external ID used in role assumption to prevent the confused deputy problem."""
+
+    role_arn: Optional[str] = None
+    """The Amazon Resource Name (ARN) of the AWS IAM role used to vend temporary credentials."""
+
+    unity_catalog_iam_arn: Optional[str] = None
+    """The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity
+    that is going to assume the AWS IAM role."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AwsIamRole into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.external_id is not None: body['external_id'] = self.external_id
+        if self.role_arn is not None: body['role_arn'] = self.role_arn
+        if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AwsIamRole into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.external_id is not None: body['external_id'] = self.external_id
+        if self.role_arn is not None: body['role_arn'] = self.role_arn
+        if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AwsIamRole:
+        """Deserializes the AwsIamRole from a dictionary."""
+        return cls(external_id=d.get('external_id', None),
+                   role_arn=d.get('role_arn', None),
+                   unity_catalog_iam_arn=d.get('unity_catalog_iam_arn', None))
+
+
 @dataclass
 class AwsIamRoleRequest:
     role_arn: str
@@ -321,6 +456,12 @@ def as_dict(self) -> dict:
         if self.role_arn is not None: body['role_arn'] = self.role_arn
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AwsIamRoleRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.role_arn is not None: body['role_arn'] = self.role_arn
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsIamRoleRequest:
         """Deserializes the AwsIamRoleRequest from a dictionary."""
@@ -347,6 +488,14 @@ def as_dict(self) -> dict:
         if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AwsIamRoleResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.external_id is not None: body['external_id'] = self.external_id
+        if self.role_arn is not None: body['role_arn'] = self.role_arn
+        if self.unity_catalog_iam_arn is not None: body['unity_catalog_iam_arn'] = self.unity_catalog_iam_arn
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsIamRoleResponse:
         """Deserializes the AwsIamRoleResponse from a dictionary."""
@@ -355,6 +504,78 @@ def from_dict(cls, d: Dict[str, any]) -> AwsIamRoleResponse:
                    unity_catalog_iam_arn=d.get('unity_catalog_iam_arn', None))
 
 
+@dataclass
+class AzureActiveDirectoryToken:
+    """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed
+    Identity. Read more at
+    https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token"""
+
+    aad_token: Optional[str] = None
+    """Opaque token that contains claims that you can use in Azure Active Directory to access cloud
+    services."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AzureActiveDirectoryToken into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.aad_token is not None: body['aad_token'] = self.aad_token
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AzureActiveDirectoryToken into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aad_token is not None: body['aad_token'] = self.aad_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AzureActiveDirectoryToken:
+        """Deserializes the AzureActiveDirectoryToken from a dictionary."""
+        return cls(aad_token=d.get('aad_token', None))
+
+
+@dataclass
+class AzureManagedIdentity:
+    """The Azure managed identity configuration."""
+
+    access_connector_id: str
+    """The Azure resource ID of the Azure Databricks Access Connector. Use the format
+    `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.Databricks/accessConnectors/{connector-name}`."""
+
+    credential_id: Optional[str] = None
+    """The Databricks internal ID that represents this managed identity. This field is only used to
+    persist the credential_id once it is fetched from the credentials manager - as we only use the
+    protobuf serializer to store credentials, this ID gets persisted to the database. ."""
+
+    managed_identity_id: Optional[str] = None
+    """The Azure resource ID of the managed identity. Use the format,
+    `/subscriptions/{guid}/resourceGroups/{rg-name}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identity-name}`
+    This is only available for user-assgined identities. For system-assigned identities, the
+    access_connector_id is used to identify the identity. If this field is not provided, then we
+    assume the AzureManagedIdentity is using the system-assigned identity."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AzureManagedIdentity into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AzureManagedIdentity into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AzureManagedIdentity:
+        """Deserializes the AzureManagedIdentity from a dictionary."""
+        return cls(access_connector_id=d.get('access_connector_id', None),
+                   credential_id=d.get('credential_id', None),
+                   managed_identity_id=d.get('managed_identity_id', None))
+
+
 @dataclass
 class AzureManagedIdentityRequest:
     access_connector_id: str
@@ -375,6 +596,13 @@ def as_dict(self) -> dict:
         if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AzureManagedIdentityRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id
+        if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureManagedIdentityRequest:
         """Deserializes the AzureManagedIdentityRequest from a dictionary."""
@@ -406,6 +634,14 @@ def as_dict(self) -> dict:
         if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AzureManagedIdentityResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_connector_id is not None: body['access_connector_id'] = self.access_connector_id
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.managed_identity_id is not None: body['managed_identity_id'] = self.managed_identity_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureManagedIdentityResponse:
         """Deserializes the AzureManagedIdentityResponse from a dictionary."""
@@ -416,6 +652,8 @@ def from_dict(cls, d: Dict[str, any]) -> AzureManagedIdentityResponse:
 
 @dataclass
 class AzureServicePrincipal:
+    """The Azure service principal configuration. Only applicable when purpose is **STORAGE**."""
+
     directory_id: str
     """The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application."""
 
@@ -433,6 +671,14 @@ def as_dict(self) -> dict:
         if self.directory_id is not None: body['directory_id'] = self.directory_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AzureServicePrincipal into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.application_id is not None: body['application_id'] = self.application_id
+        if self.client_secret is not None: body['client_secret'] = self.client_secret
+        if self.directory_id is not None: body['directory_id'] = self.directory_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureServicePrincipal:
         """Deserializes the AzureServicePrincipal from a dictionary."""
@@ -455,6 +701,12 @@ def as_dict(self) -> dict:
         if self.sas_token is not None: body['sas_token'] = self.sas_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AzureUserDelegationSas into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.sas_token is not None: body['sas_token'] = self.sas_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureUserDelegationSas:
         """Deserializes the AzureUserDelegationSas from a dictionary."""
@@ -469,6 +721,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelRefreshResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelRefreshResponse:
         """Deserializes the CancelRefreshResponse from a dictionary."""
@@ -530,9 +787,6 @@ class CatalogInfo:
     provisioning_info: Optional[ProvisioningInfo] = None
     """Status of an asynchronously provisioned resource."""
 
-    securable_kind: Optional[CatalogInfoSecurableKind] = None
-    """Kind of catalog securable."""
-
     securable_type: Optional[str] = None
 
     share_name: Optional[str] = None
@@ -574,7 +828,36 @@ def as_dict(self) -> dict:
         if self.properties: body['properties'] = self.properties
         if self.provider_name is not None: body['provider_name'] = self.provider_name
         if self.provisioning_info: body['provisioning_info'] = self.provisioning_info.as_dict()
-        if self.securable_kind is not None: body['securable_kind'] = self.securable_kind.value
+        if self.securable_type is not None: body['securable_type'] = self.securable_type
+        if self.share_name is not None: body['share_name'] = self.share_name
+        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CatalogInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.browse_only is not None: body['browse_only'] = self.browse_only
+        if self.catalog_type is not None: body['catalog_type'] = self.catalog_type
+        if self.comment is not None: body['comment'] = self.comment
+        if self.connection_name is not None: body['connection_name'] = self.connection_name
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.effective_predictive_optimization_flag:
+            body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag
+        if self.enable_predictive_optimization is not None:
+            body['enable_predictive_optimization'] = self.enable_predictive_optimization
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.owner is not None: body['owner'] = self.owner
+        if self.properties: body['properties'] = self.properties
+        if self.provider_name is not None: body['provider_name'] = self.provider_name
+        if self.provisioning_info: body['provisioning_info'] = self.provisioning_info
         if self.securable_type is not None: body['securable_type'] = self.securable_type
         if self.share_name is not None: body['share_name'] = self.share_name
         if self.storage_location is not None: body['storage_location'] = self.storage_location
@@ -605,7 +888,6 @@ def from_dict(cls, d: Dict[str, any]) -> CatalogInfo:
                    properties=d.get('properties', None),
                    provider_name=d.get('provider_name', None),
                    provisioning_info=_from_dict(d, 'provisioning_info', ProvisioningInfo),
-                   securable_kind=_enum(d, 'securable_kind', CatalogInfoSecurableKind),
                    securable_type=d.get('securable_type', None),
                    share_name=d.get('share_name', None),
                    storage_location=d.get('storage_location', None),
@@ -614,24 +896,6 @@ def from_dict(cls, d: Dict[str, any]) -> CatalogInfo:
                    updated_by=d.get('updated_by', None))
 
 
-class CatalogInfoSecurableKind(Enum):
-    """Kind of catalog securable."""
-
-    CATALOG_DELTASHARING = 'CATALOG_DELTASHARING'
-    CATALOG_FOREIGN_BIGQUERY = 'CATALOG_FOREIGN_BIGQUERY'
-    CATALOG_FOREIGN_DATABRICKS = 'CATALOG_FOREIGN_DATABRICKS'
-    CATALOG_FOREIGN_MYSQL = 'CATALOG_FOREIGN_MYSQL'
-    CATALOG_FOREIGN_POSTGRESQL = 'CATALOG_FOREIGN_POSTGRESQL'
-    CATALOG_FOREIGN_REDSHIFT = 'CATALOG_FOREIGN_REDSHIFT'
-    CATALOG_FOREIGN_SNOWFLAKE = 'CATALOG_FOREIGN_SNOWFLAKE'
-    CATALOG_FOREIGN_SQLDW = 'CATALOG_FOREIGN_SQLDW'
-    CATALOG_FOREIGN_SQLSERVER = 'CATALOG_FOREIGN_SQLSERVER'
-    CATALOG_INTERNAL = 'CATALOG_INTERNAL'
-    CATALOG_STANDARD = 'CATALOG_STANDARD'
-    CATALOG_SYSTEM = 'CATALOG_SYSTEM'
-    CATALOG_SYSTEM_DELTASHARING = 'CATALOG_SYSTEM_DELTASHARING'
-
-
 class CatalogIsolationMode(Enum):
     """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
 
@@ -666,6 +930,14 @@ def as_dict(self) -> dict:
         if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CloudflareApiToken into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_key_id is not None: body['access_key_id'] = self.access_key_id
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CloudflareApiToken:
         """Deserializes the CloudflareApiToken from a dictionary."""
@@ -700,7 +972,6 @@ class ColumnInfo:
     """Full data type specification, JSON-serialized."""
 
     type_name: Optional[ColumnTypeName] = None
-    """Name of type (INT, STRUCT, MAP, etc.)."""
 
     type_precision: Optional[int] = None
     """Digits of precision; required for DecimalTypes."""
@@ -728,6 +999,23 @@ def as_dict(self) -> dict:
         if self.type_text is not None: body['type_text'] = self.type_text
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ColumnInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.mask: body['mask'] = self.mask
+        if self.name is not None: body['name'] = self.name
+        if self.nullable is not None: body['nullable'] = self.nullable
+        if self.partition_index is not None: body['partition_index'] = self.partition_index
+        if self.position is not None: body['position'] = self.position
+        if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type
+        if self.type_json is not None: body['type_json'] = self.type_json
+        if self.type_name is not None: body['type_name'] = self.type_name
+        if self.type_precision is not None: body['type_precision'] = self.type_precision
+        if self.type_scale is not None: body['type_scale'] = self.type_scale
+        if self.type_text is not None: body['type_text'] = self.type_text
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ColumnInfo:
         """Deserializes the ColumnInfo from a dictionary."""
@@ -762,6 +1050,13 @@ def as_dict(self) -> dict:
         if self.using_column_names: body['using_column_names'] = [v for v in self.using_column_names]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ColumnMask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.function_name is not None: body['function_name'] = self.function_name
+        if self.using_column_names: body['using_column_names'] = self.using_column_names
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ColumnMask:
         """Deserializes the ColumnMask from a dictionary."""
@@ -770,7 +1065,6 @@ def from_dict(cls, d: Dict[str, any]) -> ColumnMask:
 
 
 class ColumnTypeName(Enum):
-    """Name of type (INT, STRUCT, MAP, etc.)."""
 
     ARRAY = 'ARRAY'
     BINARY = 'BINARY'
@@ -793,6 +1087,7 @@ class ColumnTypeName(Enum):
     TIMESTAMP = 'TIMESTAMP'
     TIMESTAMP_NTZ = 'TIMESTAMP_NTZ'
     USER_DEFINED_TYPE = 'USER_DEFINED_TYPE'
+    VARIANT = 'VARIANT'
 
 
 @dataclass
@@ -839,9 +1134,6 @@ class ConnectionInfo:
     read_only: Optional[bool] = None
     """If the connection is read only."""
 
-    securable_kind: Optional[ConnectionInfoSecurableKind] = None
-    """Kind of connection securable."""
-
     securable_type: Optional[str] = None
 
     updated_at: Optional[int] = None
@@ -870,7 +1162,29 @@ def as_dict(self) -> dict:
         if self.properties: body['properties'] = self.properties
         if self.provisioning_info: body['provisioning_info'] = self.provisioning_info.as_dict()
         if self.read_only is not None: body['read_only'] = self.read_only
-        if self.securable_kind is not None: body['securable_kind'] = self.securable_kind.value
+        if self.securable_type is not None: body['securable_type'] = self.securable_type
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.url is not None: body['url'] = self.url
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ConnectionInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.connection_id is not None: body['connection_id'] = self.connection_id
+        if self.connection_type is not None: body['connection_type'] = self.connection_type
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.credential_type is not None: body['credential_type'] = self.credential_type
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.owner is not None: body['owner'] = self.owner
+        if self.properties: body['properties'] = self.properties
+        if self.provisioning_info: body['provisioning_info'] = self.provisioning_info
+        if self.read_only is not None: body['read_only'] = self.read_only
         if self.securable_type is not None: body['securable_type'] = self.securable_type
         if self.updated_at is not None: body['updated_at'] = self.updated_at
         if self.updated_by is not None: body['updated_by'] = self.updated_by
@@ -894,31 +1208,12 @@ def from_dict(cls, d: Dict[str, any]) -> ConnectionInfo:
                    properties=d.get('properties', None),
                    provisioning_info=_from_dict(d, 'provisioning_info', ProvisioningInfo),
                    read_only=d.get('read_only', None),
-                   securable_kind=_enum(d, 'securable_kind', ConnectionInfoSecurableKind),
                    securable_type=d.get('securable_type', None),
                    updated_at=d.get('updated_at', None),
                    updated_by=d.get('updated_by', None),
                    url=d.get('url', None))
 
 
-class ConnectionInfoSecurableKind(Enum):
-    """Kind of connection securable."""
-
-    CONNECTION_BIGQUERY = 'CONNECTION_BIGQUERY'
-    CONNECTION_BUILTIN_HIVE_METASTORE = 'CONNECTION_BUILTIN_HIVE_METASTORE'
-    CONNECTION_DATABRICKS = 'CONNECTION_DATABRICKS'
-    CONNECTION_EXTERNAL_HIVE_METASTORE = 'CONNECTION_EXTERNAL_HIVE_METASTORE'
-    CONNECTION_GLUE = 'CONNECTION_GLUE'
-    CONNECTION_HTTP_BEARER = 'CONNECTION_HTTP_BEARER'
-    CONNECTION_MYSQL = 'CONNECTION_MYSQL'
-    CONNECTION_ONLINE_CATALOG = 'CONNECTION_ONLINE_CATALOG'
-    CONNECTION_POSTGRESQL = 'CONNECTION_POSTGRESQL'
-    CONNECTION_REDSHIFT = 'CONNECTION_REDSHIFT'
-    CONNECTION_SNOWFLAKE = 'CONNECTION_SNOWFLAKE'
-    CONNECTION_SQLDW = 'CONNECTION_SQLDW'
-    CONNECTION_SQLSERVER = 'CONNECTION_SQLSERVER'
-
-
 class ConnectionType(Enum):
     """The type of connection."""
 
@@ -961,6 +1256,16 @@ def as_dict(self) -> dict:
         if self.timestamp is not None: body['timestamp'] = self.timestamp
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ContinuousUpdateStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.initial_pipeline_sync_progress:
+            body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress
+        if self.last_processed_commit_version is not None:
+            body['last_processed_commit_version'] = self.last_processed_commit_version
+        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ContinuousUpdateStatus:
         """Deserializes the ContinuousUpdateStatus from a dictionary."""
@@ -1011,6 +1316,19 @@ def as_dict(self) -> dict:
         if self.storage_root is not None: body['storage_root'] = self.storage_root
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCatalog into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.connection_name is not None: body['connection_name'] = self.connection_name
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.properties: body['properties'] = self.properties
+        if self.provider_name is not None: body['provider_name'] = self.provider_name
+        if self.share_name is not None: body['share_name'] = self.share_name
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCatalog:
         """Deserializes the CreateCatalog from a dictionary."""
@@ -1055,8 +1373,19 @@ def as_dict(self) -> dict:
         if self.read_only is not None: body['read_only'] = self.read_only
         return body
 
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateConnection:
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateConnection into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.connection_type is not None: body['connection_type'] = self.connection_type
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.properties: body['properties'] = self.properties
+        if self.read_only is not None: body['read_only'] = self.read_only
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateConnection:
         """Deserializes the CreateConnection from a dictionary."""
         return cls(comment=d.get('comment', None),
                    connection_type=_enum(d, 'connection_type', ConnectionType),
@@ -1066,6 +1395,83 @@ def from_dict(cls, d: Dict[str, any]) -> CreateConnection:
                    read_only=d.get('read_only', None))
 
 
+@dataclass
+class CreateCredentialRequest:
+    name: str
+    """The credential name. The name must be unique among storage and service credentials within the
+    metastore."""
+
+    aws_iam_role: Optional[AwsIamRole] = None
+    """The AWS IAM role configuration"""
+
+    azure_managed_identity: Optional[AzureManagedIdentity] = None
+    """The Azure managed identity configuration."""
+
+    azure_service_principal: Optional[AzureServicePrincipal] = None
+    """The Azure service principal configuration. Only applicable when purpose is **STORAGE**."""
+
+    comment: Optional[str] = None
+    """Comment associated with the credential."""
+
+    databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None
+    """GCP long-lived credential. Databricks-created Google Cloud Storage service account."""
+
+    purpose: Optional[CredentialPurpose] = None
+    """Indicates the purpose of the credential."""
+
+    read_only: Optional[bool] = None
+    """Whether the credential is usable only for read operations. Only applicable when purpose is
+    **STORAGE**."""
+
+    skip_validation: Optional[bool] = None
+    """Optional. Supplying true to this argument skips validation of the created set of credentials."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateCredentialRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
+        if self.azure_service_principal:
+            body['azure_service_principal'] = self.azure_service_principal.as_dict()
+        if self.comment is not None: body['comment'] = self.comment
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
+        if self.name is not None: body['name'] = self.name
+        if self.purpose is not None: body['purpose'] = self.purpose.value
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCredentialRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
+        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
+        if self.comment is not None: body['comment'] = self.comment
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
+        if self.name is not None: body['name'] = self.name
+        if self.purpose is not None: body['purpose'] = self.purpose
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateCredentialRequest:
+        """Deserializes the CreateCredentialRequest from a dictionary."""
+        return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole),
+                   azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
+                   azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
+                   comment=d.get('comment', None),
+                   databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
+                                                             DatabricksGcpServiceAccount),
+                   name=d.get('name', None),
+                   purpose=_enum(d, 'purpose', CredentialPurpose),
+                   read_only=d.get('read_only', None),
+                   skip_validation=d.get('skip_validation', None))
+
+
 @dataclass
 class CreateExternalLocation:
     name: str
@@ -1111,6 +1517,20 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateExternalLocation into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_point is not None: body['access_point'] = self.access_point
+        if self.comment is not None: body['comment'] = self.comment
+        if self.credential_name is not None: body['credential_name'] = self.credential_name
+        if self.encryption_details: body['encryption_details'] = self.encryption_details
+        if self.fallback is not None: body['fallback'] = self.fallback
+        if self.name is not None: body['name'] = self.name
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExternalLocation:
         """Deserializes the CreateExternalLocation from a dictionary."""
@@ -1218,6 +1638,32 @@ def as_dict(self) -> dict:
         if self.sql_path is not None: body['sql_path'] = self.sql_path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateFunction into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.comment is not None: body['comment'] = self.comment
+        if self.data_type is not None: body['data_type'] = self.data_type
+        if self.external_language is not None: body['external_language'] = self.external_language
+        if self.external_name is not None: body['external_name'] = self.external_name
+        if self.full_data_type is not None: body['full_data_type'] = self.full_data_type
+        if self.input_params: body['input_params'] = self.input_params
+        if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic
+        if self.is_null_call is not None: body['is_null_call'] = self.is_null_call
+        if self.name is not None: body['name'] = self.name
+        if self.parameter_style is not None: body['parameter_style'] = self.parameter_style
+        if self.properties is not None: body['properties'] = self.properties
+        if self.return_params: body['return_params'] = self.return_params
+        if self.routine_body is not None: body['routine_body'] = self.routine_body
+        if self.routine_definition is not None: body['routine_definition'] = self.routine_definition
+        if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.security_type is not None: body['security_type'] = self.security_type
+        if self.specific_name is not None: body['specific_name'] = self.specific_name
+        if self.sql_data_access is not None: body['sql_data_access'] = self.sql_data_access
+        if self.sql_path is not None: body['sql_path'] = self.sql_path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateFunction:
         """Deserializes the CreateFunction from a dictionary."""
@@ -1261,6 +1707,12 @@ def as_dict(self) -> dict:
         if self.function_info: body['function_info'] = self.function_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateFunctionRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.function_info: body['function_info'] = self.function_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateFunctionRequest:
         """Deserializes the CreateFunctionRequest from a dictionary."""
@@ -1278,7 +1730,7 @@ class CreateFunctionRoutineBody(Enum):
 
 
 class CreateFunctionSecurityType(Enum):
-    """Function security type."""
+    """The security type of the function."""
 
     DEFINER = 'DEFINER'
 
@@ -1312,6 +1764,14 @@ def as_dict(self) -> dict:
         if self.storage_root is not None: body['storage_root'] = self.storage_root
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateMetastore into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.region is not None: body['region'] = self.region
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateMetastore:
         """Deserializes the CreateMetastore from a dictionary."""
@@ -1340,6 +1800,14 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateMetastoreAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateMetastoreAssignment:
         """Deserializes the CreateMetastoreAssignment from a dictionary."""
@@ -1419,6 +1887,27 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateMonitor into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.assets_dir is not None: body['assets_dir'] = self.assets_dir
+        if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name
+        if self.custom_metrics: body['custom_metrics'] = self.custom_metrics
+        if self.data_classification_config:
+            body['data_classification_config'] = self.data_classification_config
+        if self.inference_log: body['inference_log'] = self.inference_log
+        if self.notifications: body['notifications'] = self.notifications
+        if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
+        if self.schedule: body['schedule'] = self.schedule
+        if self.skip_builtin_dashboard is not None:
+            body['skip_builtin_dashboard'] = self.skip_builtin_dashboard
+        if self.slicing_exprs: body['slicing_exprs'] = self.slicing_exprs
+        if self.snapshot: body['snapshot'] = self.snapshot
+        if self.table_name is not None: body['table_name'] = self.table_name
+        if self.time_series: body['time_series'] = self.time_series
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateMonitor:
         """Deserializes the CreateMonitor from a dictionary."""
@@ -1439,29 +1928,6 @@ def from_dict(cls, d: Dict[str, any]) -> CreateMonitor:
                    warehouse_id=d.get('warehouse_id', None))
 
 
-@dataclass
-class CreateOnlineTableRequest:
-    """Online Table information."""
-
-    name: Optional[str] = None
-    """Full three-part (catalog, schema, table) name of the table."""
-
-    spec: Optional[OnlineTableSpec] = None
-    """Specification of the online table."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CreateOnlineTableRequest into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.name is not None: body['name'] = self.name
-        if self.spec: body['spec'] = self.spec.as_dict()
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateOnlineTableRequest:
-        """Deserializes the CreateOnlineTableRequest from a dictionary."""
-        return cls(name=d.get('name', None), spec=_from_dict(d, 'spec', OnlineTableSpec))
-
-
 @dataclass
 class CreateRegisteredModelRequest:
     catalog_name: str
@@ -1489,6 +1955,16 @@ def as_dict(self) -> dict:
         if self.storage_location is not None: body['storage_location'] = self.storage_location
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateRegisteredModelRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRegisteredModelRequest:
         """Deserializes the CreateRegisteredModelRequest from a dictionary."""
@@ -1507,6 +1983,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateResponse:
         """Deserializes the CreateResponse from a dictionary."""
@@ -1540,6 +2021,16 @@ def as_dict(self) -> dict:
         if self.storage_root is not None: body['storage_root'] = self.storage_root
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateSchema into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.properties: body['properties'] = self.properties
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateSchema:
         """Deserializes the CreateSchema from a dictionary."""
@@ -1595,6 +2086,21 @@ def as_dict(self) -> dict:
         if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateStorageCredential into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
+        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
+        if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token
+        if self.comment is not None: body['comment'] = self.comment
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
+        if self.name is not None: body['name'] = self.name
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateStorageCredential:
         """Deserializes the CreateStorageCredential from a dictionary."""
@@ -1627,6 +2133,13 @@ def as_dict(self) -> dict:
         if self.full_name_arg is not None: body['full_name_arg'] = self.full_name_arg
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateTableConstraint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.constraint: body['constraint'] = self.constraint
+        if self.full_name_arg is not None: body['full_name_arg'] = self.full_name_arg
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateTableConstraint:
         """Deserializes the CreateTableConstraint from a dictionary."""
@@ -1664,6 +2177,17 @@ def as_dict(self) -> dict:
         if self.volume_type is not None: body['volume_type'] = self.volume_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateVolumeRequestContent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        if self.volume_type is not None: body['volume_type'] = self.volume_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateVolumeRequestContent:
         """Deserializes the CreateVolumeRequestContent from a dictionary."""
@@ -1675,88 +2199,313 @@ def from_dict(cls, d: Dict[str, any]) -> CreateVolumeRequestContent:
                    volume_type=_enum(d, 'volume_type', VolumeType))
 
 
-class CredentialType(Enum):
-    """The type of credential."""
+@dataclass
+class CredentialInfo:
+    aws_iam_role: Optional[AwsIamRole] = None
+    """The AWS IAM role configuration"""
 
-    BEARER_TOKEN = 'BEARER_TOKEN'
-    USERNAME_PASSWORD = 'USERNAME_PASSWORD'
+    azure_managed_identity: Optional[AzureManagedIdentity] = None
+    """The Azure managed identity configuration."""
 
+    azure_service_principal: Optional[AzureServicePrincipal] = None
+    """The Azure service principal configuration. Only applicable when purpose is **STORAGE**."""
 
-@dataclass
-class CurrentWorkspaceBindings:
-    """Currently assigned workspaces"""
+    comment: Optional[str] = None
+    """Comment associated with the credential."""
 
-    workspaces: Optional[List[int]] = None
-    """A list of workspace IDs."""
+    created_at: Optional[int] = None
+    """Time at which this credential was created, in epoch milliseconds."""
 
-    def as_dict(self) -> dict:
-        """Serializes the CurrentWorkspaceBindings into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.workspaces: body['workspaces'] = [v for v in self.workspaces]
-        return body
+    created_by: Optional[str] = None
+    """Username of credential creator."""
 
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CurrentWorkspaceBindings:
-        """Deserializes the CurrentWorkspaceBindings from a dictionary."""
-        return cls(workspaces=d.get('workspaces', None))
+    databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None
+    """GCP long-lived credential. Databricks-created Google Cloud Storage service account."""
 
+    full_name: Optional[str] = None
+    """The full name of the credential."""
 
-class DataSourceFormat(Enum):
-    """Data source format"""
+    id: Optional[str] = None
+    """The unique identifier of the credential."""
 
-    AVRO = 'AVRO'
-    BIGQUERY_FORMAT = 'BIGQUERY_FORMAT'
-    CSV = 'CSV'
-    DATABRICKS_FORMAT = 'DATABRICKS_FORMAT'
-    DELTA = 'DELTA'
-    DELTASHARING = 'DELTASHARING'
-    HIVE_CUSTOM = 'HIVE_CUSTOM'
-    HIVE_SERDE = 'HIVE_SERDE'
-    JSON = 'JSON'
-    MYSQL_FORMAT = 'MYSQL_FORMAT'
-    NETSUITE_FORMAT = 'NETSUITE_FORMAT'
-    ORC = 'ORC'
-    PARQUET = 'PARQUET'
-    POSTGRESQL_FORMAT = 'POSTGRESQL_FORMAT'
-    REDSHIFT_FORMAT = 'REDSHIFT_FORMAT'
-    SALESFORCE_FORMAT = 'SALESFORCE_FORMAT'
-    SNOWFLAKE_FORMAT = 'SNOWFLAKE_FORMAT'
-    SQLDW_FORMAT = 'SQLDW_FORMAT'
-    SQLSERVER_FORMAT = 'SQLSERVER_FORMAT'
-    TEXT = 'TEXT'
-    UNITY_CATALOG = 'UNITY_CATALOG'
-    VECTOR_INDEX_FORMAT = 'VECTOR_INDEX_FORMAT'
-    WORKDAY_RAAS_FORMAT = 'WORKDAY_RAAS_FORMAT'
+    isolation_mode: Optional[IsolationMode] = None
+    """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
 
+    metastore_id: Optional[str] = None
+    """Unique identifier of the parent metastore."""
 
-@dataclass
-class DatabricksGcpServiceAccountRequest:
+    name: Optional[str] = None
+    """The credential name. The name must be unique among storage and service credentials within the
+    metastore."""
 
-    def as_dict(self) -> dict:
-        """Serializes the DatabricksGcpServiceAccountRequest into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        return body
+    owner: Optional[str] = None
+    """Username of current owner of credential."""
 
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> DatabricksGcpServiceAccountRequest:
-        """Deserializes the DatabricksGcpServiceAccountRequest from a dictionary."""
-        return cls()
+    purpose: Optional[CredentialPurpose] = None
+    """Indicates the purpose of the credential."""
+
+    read_only: Optional[bool] = None
+    """Whether the credential is usable only for read operations. Only applicable when purpose is
+    **STORAGE**."""
 
+    updated_at: Optional[int] = None
+    """Time at which this credential was last modified, in epoch milliseconds."""
 
-@dataclass
-class DatabricksGcpServiceAccountResponse:
-    credential_id: Optional[str] = None
-    """The Databricks internal ID that represents this service account. This is an output-only field."""
+    updated_by: Optional[str] = None
+    """Username of user who last modified the credential."""
 
-    email: Optional[str] = None
-    """The email of the service account. This is an output-only field."""
+    used_for_managed_storage: Optional[bool] = None
+    """Whether this credential is the current metastore's root storage credential. Only applicable when
+    purpose is **STORAGE**."""
 
     def as_dict(self) -> dict:
-        """Serializes the DatabricksGcpServiceAccountResponse into a dictionary suitable for use as a JSON request body."""
+        """Serializes the CredentialInfo into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.credential_id is not None: body['credential_id'] = self.credential_id
-        if self.email is not None: body['email'] = self.email
-        return body
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
+        if self.azure_service_principal:
+            body['azure_service_principal'] = self.azure_service_principal.as_dict()
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.id is not None: body['id'] = self.id
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.purpose is not None: body['purpose'] = self.purpose.value
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.used_for_managed_storage is not None:
+            body['used_for_managed_storage'] = self.used_for_managed_storage
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CredentialInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
+        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.id is not None: body['id'] = self.id
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.purpose is not None: body['purpose'] = self.purpose
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.used_for_managed_storage is not None:
+            body['used_for_managed_storage'] = self.used_for_managed_storage
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CredentialInfo:
+        """Deserializes the CredentialInfo from a dictionary."""
+        return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole),
+                   azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
+                   azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
+                   comment=d.get('comment', None),
+                   created_at=d.get('created_at', None),
+                   created_by=d.get('created_by', None),
+                   databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
+                                                             DatabricksGcpServiceAccount),
+                   full_name=d.get('full_name', None),
+                   id=d.get('id', None),
+                   isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
+                   metastore_id=d.get('metastore_id', None),
+                   name=d.get('name', None),
+                   owner=d.get('owner', None),
+                   purpose=_enum(d, 'purpose', CredentialPurpose),
+                   read_only=d.get('read_only', None),
+                   updated_at=d.get('updated_at', None),
+                   updated_by=d.get('updated_by', None),
+                   used_for_managed_storage=d.get('used_for_managed_storage', None))
+
+
+class CredentialPurpose(Enum):
+
+    SERVICE = 'SERVICE'
+    STORAGE = 'STORAGE'
+
+
+class CredentialType(Enum):
+    """The type of credential."""
+
+    BEARER_TOKEN = 'BEARER_TOKEN'
+    USERNAME_PASSWORD = 'USERNAME_PASSWORD'
+
+
+@dataclass
+class CredentialValidationResult:
+    message: Optional[str] = None
+    """Error message would exist when the result does not equal to **PASS**."""
+
+    result: Optional[ValidateCredentialResult] = None
+    """The results of the tested operation."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CredentialValidationResult into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        if self.result is not None: body['result'] = self.result.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CredentialValidationResult into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        if self.result is not None: body['result'] = self.result
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CredentialValidationResult:
+        """Deserializes the CredentialValidationResult from a dictionary."""
+        return cls(message=d.get('message', None), result=_enum(d, 'result', ValidateCredentialResult))
+
+
+@dataclass
+class CurrentWorkspaceBindings:
+    """Currently assigned workspaces"""
+
+    workspaces: Optional[List[int]] = None
+    """A list of workspace IDs."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CurrentWorkspaceBindings into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.workspaces: body['workspaces'] = [v for v in self.workspaces]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CurrentWorkspaceBindings into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.workspaces: body['workspaces'] = self.workspaces
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CurrentWorkspaceBindings:
+        """Deserializes the CurrentWorkspaceBindings from a dictionary."""
+        return cls(workspaces=d.get('workspaces', None))
+
+
+class DataSourceFormat(Enum):
+    """Data source format"""
+
+    AVRO = 'AVRO'
+    BIGQUERY_FORMAT = 'BIGQUERY_FORMAT'
+    CSV = 'CSV'
+    DATABRICKS_FORMAT = 'DATABRICKS_FORMAT'
+    DELTA = 'DELTA'
+    DELTASHARING = 'DELTASHARING'
+    HIVE_CUSTOM = 'HIVE_CUSTOM'
+    HIVE_SERDE = 'HIVE_SERDE'
+    JSON = 'JSON'
+    MYSQL_FORMAT = 'MYSQL_FORMAT'
+    NETSUITE_FORMAT = 'NETSUITE_FORMAT'
+    ORC = 'ORC'
+    PARQUET = 'PARQUET'
+    POSTGRESQL_FORMAT = 'POSTGRESQL_FORMAT'
+    REDSHIFT_FORMAT = 'REDSHIFT_FORMAT'
+    SALESFORCE_FORMAT = 'SALESFORCE_FORMAT'
+    SNOWFLAKE_FORMAT = 'SNOWFLAKE_FORMAT'
+    SQLDW_FORMAT = 'SQLDW_FORMAT'
+    SQLSERVER_FORMAT = 'SQLSERVER_FORMAT'
+    TEXT = 'TEXT'
+    UNITY_CATALOG = 'UNITY_CATALOG'
+    VECTOR_INDEX_FORMAT = 'VECTOR_INDEX_FORMAT'
+    WORKDAY_RAAS_FORMAT = 'WORKDAY_RAAS_FORMAT'
+
+
+@dataclass
+class DatabricksGcpServiceAccount:
+    """GCP long-lived credential. Databricks-created Google Cloud Storage service account."""
+
+    credential_id: Optional[str] = None
+    """The Databricks internal ID that represents this managed identity. This field is only used to
+    persist the credential_id once it is fetched from the credentials manager - as we only use the
+    protobuf serializer to store credentials, this ID gets persisted to the database"""
+
+    email: Optional[str] = None
+    """The email of the service account."""
+
+    private_key_id: Optional[str] = None
+    """The ID that represents the private key for this Service Account"""
+
+    def as_dict(self) -> dict:
+        """Serializes the DatabricksGcpServiceAccount into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.email is not None: body['email'] = self.email
+        if self.private_key_id is not None: body['private_key_id'] = self.private_key_id
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DatabricksGcpServiceAccount into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.email is not None: body['email'] = self.email
+        if self.private_key_id is not None: body['private_key_id'] = self.private_key_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DatabricksGcpServiceAccount:
+        """Deserializes the DatabricksGcpServiceAccount from a dictionary."""
+        return cls(credential_id=d.get('credential_id', None),
+                   email=d.get('email', None),
+                   private_key_id=d.get('private_key_id', None))
+
+
+@dataclass
+class DatabricksGcpServiceAccountRequest:
+
+    def as_dict(self) -> dict:
+        """Serializes the DatabricksGcpServiceAccountRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DatabricksGcpServiceAccountRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DatabricksGcpServiceAccountRequest:
+        """Deserializes the DatabricksGcpServiceAccountRequest from a dictionary."""
+        return cls()
+
+
+@dataclass
+class DatabricksGcpServiceAccountResponse:
+    credential_id: Optional[str] = None
+    """The Databricks internal ID that represents this service account. This is an output-only field."""
+
+    email: Optional[str] = None
+    """The email of the service account. This is an output-only field."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DatabricksGcpServiceAccountResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.email is not None: body['email'] = self.email
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DatabricksGcpServiceAccountResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.email is not None: body['email'] = self.email
+        return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DatabricksGcpServiceAccountResponse:
@@ -1772,12 +2521,36 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteAliasResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteAliasResponse:
         """Deserializes the DeleteAliasResponse from a dictionary."""
         return cls()
 
 
+@dataclass
+class DeleteCredentialResponse:
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteCredentialResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteCredentialResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteCredentialResponse:
+        """Deserializes the DeleteCredentialResponse from a dictionary."""
+        return cls()
+
+
 @dataclass
 class DeleteResponse:
 
@@ -1786,6 +2559,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -1806,6 +2584,12 @@ def as_dict(self) -> dict:
         if self.delta_runtime_properties: body['delta_runtime_properties'] = self.delta_runtime_properties
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeltaRuntimePropertiesKvPairs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.delta_runtime_properties: body['delta_runtime_properties'] = self.delta_runtime_properties
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeltaRuntimePropertiesKvPairs:
         """Deserializes the DeltaRuntimePropertiesKvPairs from a dictionary."""
@@ -1830,6 +2614,13 @@ def as_dict(self) -> dict:
         if self.table: body['table'] = self.table.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Dependency into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.function: body['function'] = self.function
+        if self.table: body['table'] = self.table
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Dependency:
         """Deserializes the Dependency from a dictionary."""
@@ -1850,6 +2641,12 @@ def as_dict(self) -> dict:
         if self.dependencies: body['dependencies'] = [v.as_dict() for v in self.dependencies]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DependencyList into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dependencies: body['dependencies'] = self.dependencies
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DependencyList:
         """Deserializes the DependencyList from a dictionary."""
@@ -1864,6 +2661,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DisableResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DisableResponse:
         """Deserializes the DisableResponse from a dictionary."""
@@ -1882,6 +2684,12 @@ def as_dict(self) -> dict:
             body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EffectivePermissionsList into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EffectivePermissionsList:
         """Deserializes the EffectivePermissionsList from a dictionary."""
@@ -1910,6 +2718,14 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EffectivePredictiveOptimizationFlag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited_from_name is not None: body['inherited_from_name'] = self.inherited_from_name
+        if self.inherited_from_type is not None: body['inherited_from_type'] = self.inherited_from_type
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EffectivePredictiveOptimizationFlag:
         """Deserializes the EffectivePredictiveOptimizationFlag from a dictionary."""
@@ -1948,6 +2764,14 @@ def as_dict(self) -> dict:
         if self.privilege is not None: body['privilege'] = self.privilege.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EffectivePrivilege into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited_from_name is not None: body['inherited_from_name'] = self.inherited_from_name
+        if self.inherited_from_type is not None: body['inherited_from_type'] = self.inherited_from_type
+        if self.privilege is not None: body['privilege'] = self.privilege
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EffectivePrivilege:
         """Deserializes the EffectivePrivilege from a dictionary."""
@@ -1971,6 +2795,13 @@ def as_dict(self) -> dict:
         if self.privileges: body['privileges'] = [v.as_dict() for v in self.privileges]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EffectivePrivilegeAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.principal is not None: body['principal'] = self.principal
+        if self.privileges: body['privileges'] = self.privileges
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EffectivePrivilegeAssignment:
         """Deserializes the EffectivePrivilegeAssignment from a dictionary."""
@@ -1994,6 +2825,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EnableResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnableResponse:
         """Deserializes the EnableResponse from a dictionary."""
@@ -2013,6 +2849,12 @@ def as_dict(self) -> dict:
         if self.sse_encryption_details: body['sse_encryption_details'] = self.sse_encryption_details.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EncryptionDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.sse_encryption_details: body['sse_encryption_details'] = self.sse_encryption_details
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EncryptionDetails:
         """Deserializes the EncryptionDetails from a dictionary."""
@@ -2052,7 +2894,6 @@ class ExternalLocationInfo:
     sufficient."""
 
     isolation_mode: Optional[IsolationMode] = None
-    """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
 
     metastore_id: Optional[str] = None
     """Unique identifier of metastore hosting the external location."""
@@ -2097,6 +2938,28 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExternalLocationInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_point is not None: body['access_point'] = self.access_point
+        if self.browse_only is not None: body['browse_only'] = self.browse_only
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.credential_name is not None: body['credential_name'] = self.credential_name
+        if self.encryption_details: body['encryption_details'] = self.encryption_details
+        if self.fallback is not None: body['fallback'] = self.fallback
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExternalLocationInfo:
         """Deserializes the ExternalLocationInfo from a dictionary."""
@@ -2141,6 +3004,14 @@ def as_dict(self) -> dict:
         if self.timestamp is not None: body['timestamp'] = self.timestamp
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FailedStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.last_processed_commit_version is not None:
+            body['last_processed_commit_version'] = self.last_processed_commit_version
+        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FailedStatus:
         """Deserializes the FailedStatus from a dictionary."""
@@ -2171,6 +3042,15 @@ def as_dict(self) -> dict:
         if self.parent_table is not None: body['parent_table'] = self.parent_table
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ForeignKeyConstraint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.child_columns: body['child_columns'] = self.child_columns
+        if self.name is not None: body['name'] = self.name
+        if self.parent_columns: body['parent_columns'] = self.parent_columns
+        if self.parent_table is not None: body['parent_table'] = self.parent_table
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ForeignKeyConstraint:
         """Deserializes the ForeignKeyConstraint from a dictionary."""
@@ -2194,6 +3074,12 @@ def as_dict(self) -> dict:
         if self.function_full_name is not None: body['function_full_name'] = self.function_full_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FunctionDependency into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.function_full_name is not None: body['function_full_name'] = self.function_full_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FunctionDependency:
         """Deserializes the FunctionDependency from a dictionary."""
@@ -2330,6 +3216,41 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FunctionInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.browse_only is not None: body['browse_only'] = self.browse_only
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.data_type is not None: body['data_type'] = self.data_type
+        if self.external_language is not None: body['external_language'] = self.external_language
+        if self.external_name is not None: body['external_name'] = self.external_name
+        if self.full_data_type is not None: body['full_data_type'] = self.full_data_type
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.function_id is not None: body['function_id'] = self.function_id
+        if self.input_params: body['input_params'] = self.input_params
+        if self.is_deterministic is not None: body['is_deterministic'] = self.is_deterministic
+        if self.is_null_call is not None: body['is_null_call'] = self.is_null_call
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.parameter_style is not None: body['parameter_style'] = self.parameter_style
+        if self.properties is not None: body['properties'] = self.properties
+        if self.return_params: body['return_params'] = self.return_params
+        if self.routine_body is not None: body['routine_body'] = self.routine_body
+        if self.routine_definition is not None: body['routine_definition'] = self.routine_definition
+        if self.routine_dependencies: body['routine_dependencies'] = self.routine_dependencies
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.security_type is not None: body['security_type'] = self.security_type
+        if self.specific_name is not None: body['specific_name'] = self.specific_name
+        if self.sql_data_access is not None: body['sql_data_access'] = self.sql_data_access
+        if self.sql_path is not None: body['sql_path'] = self.sql_path
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FunctionInfo:
         """Deserializes the FunctionInfo from a dictionary."""
@@ -2382,7 +3303,7 @@ class FunctionInfoRoutineBody(Enum):
 
 
 class FunctionInfoSecurityType(Enum):
-    """Function security type."""
+    """The security type of the function."""
 
     DEFINER = 'DEFINER'
 
@@ -2404,7 +3325,6 @@ class FunctionParameterInfo:
     """Full data type spec, SQL/catalogString text."""
 
     type_name: ColumnTypeName
-    """Name of type (INT, STRUCT, MAP, etc.)."""
 
     position: int
     """Ordinal position of column (starting at position 0)."""
@@ -2450,6 +3370,23 @@ def as_dict(self) -> dict:
         if self.type_text is not None: body['type_text'] = self.type_text
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FunctionParameterInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.parameter_default is not None: body['parameter_default'] = self.parameter_default
+        if self.parameter_mode is not None: body['parameter_mode'] = self.parameter_mode
+        if self.parameter_type is not None: body['parameter_type'] = self.parameter_type
+        if self.position is not None: body['position'] = self.position
+        if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type
+        if self.type_json is not None: body['type_json'] = self.type_json
+        if self.type_name is not None: body['type_name'] = self.type_name
+        if self.type_precision is not None: body['type_precision'] = self.type_precision
+        if self.type_scale is not None: body['type_scale'] = self.type_scale
+        if self.type_text is not None: body['type_text'] = self.type_text
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FunctionParameterInfo:
         """Deserializes the FunctionParameterInfo from a dictionary."""
@@ -2478,6 +3415,12 @@ def as_dict(self) -> dict:
         if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FunctionParameterInfos into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.parameters: body['parameters'] = self.parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FunctionParameterInfos:
         """Deserializes the FunctionParameterInfos from a dictionary."""
@@ -2510,10 +3453,106 @@ def as_dict(self) -> dict:
         if self.oauth_token is not None: body['oauth_token'] = self.oauth_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GcpOauthToken into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.oauth_token is not None: body['oauth_token'] = self.oauth_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GcpOauthToken:
+        """Deserializes the GcpOauthToken from a dictionary."""
+        return cls(oauth_token=d.get('oauth_token', None))
+
+
+@dataclass
+class GenerateTemporaryServiceCredentialAzureOptions:
+    """The Azure cloud options to customize the requested temporary credential"""
+
+    resources: Optional[List[str]] = None
+    """The resources to which the temporary Azure credential should apply. These resources are the
+    scopes that are passed to the token provider (see
+    https://learn.microsoft.com/python/api/azure-core/azure.core.credentials.tokencredential?view=azure-python)"""
+
+    def as_dict(self) -> dict:
+        """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.resources: body['resources'] = [v for v in self.resources]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenerateTemporaryServiceCredentialAzureOptions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.resources: body['resources'] = self.resources
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryServiceCredentialAzureOptions:
+        """Deserializes the GenerateTemporaryServiceCredentialAzureOptions from a dictionary."""
+        return cls(resources=d.get('resources', None))
+
+
+@dataclass
+class GenerateTemporaryServiceCredentialGcpOptions:
+    """The GCP cloud options to customize the requested temporary credential"""
+
+    scopes: Optional[List[str]] = None
+    """The scopes to which the temporary GCP credential should apply. These resources are the scopes
+    that are passed to the token provider (see
+    https://google-auth.readthedocs.io/en/latest/reference/google.auth.html#google.auth.credentials.Credentials)"""
+
+    def as_dict(self) -> dict:
+        """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.scopes: body['scopes'] = [v for v in self.scopes]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenerateTemporaryServiceCredentialGcpOptions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.scopes: body['scopes'] = self.scopes
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryServiceCredentialGcpOptions:
+        """Deserializes the GenerateTemporaryServiceCredentialGcpOptions from a dictionary."""
+        return cls(scopes=d.get('scopes', None))
+
+
+@dataclass
+class GenerateTemporaryServiceCredentialRequest:
+    credential_name: str
+    """The name of the service credential used to generate a temporary credential"""
+
+    azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None
+    """The Azure cloud options to customize the requested temporary credential"""
+
+    gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions] = None
+    """The GCP cloud options to customize the requested temporary credential"""
+
+    def as_dict(self) -> dict:
+        """Serializes the GenerateTemporaryServiceCredentialRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.azure_options: body['azure_options'] = self.azure_options.as_dict()
+        if self.credential_name is not None: body['credential_name'] = self.credential_name
+        if self.gcp_options: body['gcp_options'] = self.gcp_options.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenerateTemporaryServiceCredentialRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.azure_options: body['azure_options'] = self.azure_options
+        if self.credential_name is not None: body['credential_name'] = self.credential_name
+        if self.gcp_options: body['gcp_options'] = self.gcp_options
+        return body
+
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> GcpOauthToken:
-        """Deserializes the GcpOauthToken from a dictionary."""
-        return cls(oauth_token=d.get('oauth_token', None))
+    def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryServiceCredentialRequest:
+        """Deserializes the GenerateTemporaryServiceCredentialRequest from a dictionary."""
+        return cls(azure_options=_from_dict(d, 'azure_options',
+                                            GenerateTemporaryServiceCredentialAzureOptions),
+                   credential_name=d.get('credential_name', None),
+                   gcp_options=_from_dict(d, 'gcp_options', GenerateTemporaryServiceCredentialGcpOptions))
 
 
 @dataclass
@@ -2533,6 +3572,13 @@ def as_dict(self) -> dict:
         if self.table_id is not None: body['table_id'] = self.table_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenerateTemporaryTableCredentialRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.operation is not None: body['operation'] = self.operation
+        if self.table_id is not None: body['table_id'] = self.table_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryTableCredentialRequest:
         """Deserializes the GenerateTemporaryTableCredentialRequest from a dictionary."""
@@ -2545,6 +3591,11 @@ class GenerateTemporaryTableCredentialResponse:
     """AWS temporary credentials for API authentication. Read more at
     https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html."""
 
+    azure_aad: Optional[AzureActiveDirectoryToken] = None
+    """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed
+    Identity. Read more at
+    https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token"""
+
     azure_user_delegation_sas: Optional[AzureUserDelegationSas] = None
     """Azure temporary credentials for API authentication. Read more at
     https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas"""
@@ -2568,6 +3619,7 @@ def as_dict(self) -> dict:
         """Serializes the GenerateTemporaryTableCredentialResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials.as_dict()
+        if self.azure_aad: body['azure_aad'] = self.azure_aad.as_dict()
         if self.azure_user_delegation_sas:
             body['azure_user_delegation_sas'] = self.azure_user_delegation_sas.as_dict()
         if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
@@ -2576,10 +3628,23 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenerateTemporaryTableCredentialResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials
+        if self.azure_aad: body['azure_aad'] = self.azure_aad
+        if self.azure_user_delegation_sas: body['azure_user_delegation_sas'] = self.azure_user_delegation_sas
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token
+        if self.r2_temp_credentials: body['r2_temp_credentials'] = self.r2_temp_credentials
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryTableCredentialResponse:
         """Deserializes the GenerateTemporaryTableCredentialResponse from a dictionary."""
         return cls(aws_temp_credentials=_from_dict(d, 'aws_temp_credentials', AwsCredentials),
+                   azure_aad=_from_dict(d, 'azure_aad', AzureActiveDirectoryToken),
                    azure_user_delegation_sas=_from_dict(d, 'azure_user_delegation_sas',
                                                         AzureUserDelegationSas),
                    expiration_time=d.get('expiration_time', None),
@@ -2591,6 +3656,7 @@ def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryTableCredentialRespons
 class GetBindingsSecurableType(Enum):
 
     CATALOG = 'catalog'
+    CREDENTIAL = 'credential'
     EXTERNAL_LOCATION = 'external_location'
     STORAGE_CREDENTIAL = 'storage_credential'
 
@@ -2687,6 +3753,38 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetMetastoreSummaryResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cloud is not None: body['cloud'] = self.cloud
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.default_data_access_config_id is not None:
+            body['default_data_access_config_id'] = self.default_data_access_config_id
+        if self.delta_sharing_organization_name is not None:
+            body['delta_sharing_organization_name'] = self.delta_sharing_organization_name
+        if self.delta_sharing_recipient_token_lifetime_in_seconds is not None:
+            body[
+                'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds
+        if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope
+        if self.external_access_enabled is not None:
+            body['external_access_enabled'] = self.external_access_enabled
+        if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.privilege_model_version is not None:
+            body['privilege_model_version'] = self.privilege_model_version
+        if self.region is not None: body['region'] = self.region
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.storage_root_credential_id is not None:
+            body['storage_root_credential_id'] = self.storage_root_credential_id
+        if self.storage_root_credential_name is not None:
+            body['storage_root_credential_name'] = self.storage_root_credential_name
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetMetastoreSummaryResponse:
         """Deserializes the GetMetastoreSummaryResponse from a dictionary."""
@@ -2731,6 +3829,12 @@ def as_dict(self) -> dict:
         if self.quota_info: body['quota_info'] = self.quota_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetQuotaResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.quota_info: body['quota_info'] = self.quota_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetQuotaResponse:
         """Deserializes the GetQuotaResponse from a dictionary."""
@@ -2738,7 +3842,6 @@ def from_dict(cls, d: Dict[str, any]) -> GetQuotaResponse:
 
 
 class IsolationMode(Enum):
-    """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
 
     ISOLATION_MODE_ISOLATED = 'ISOLATION_MODE_ISOLATED'
     ISOLATION_MODE_OPEN = 'ISOLATION_MODE_OPEN'
@@ -2756,6 +3859,12 @@ def as_dict(self) -> dict:
         if self.workspace_ids: body['workspace_ids'] = [v for v in self.workspace_ids]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAccountMetastoreAssignmentsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.workspace_ids: body['workspace_ids'] = self.workspace_ids
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAccountMetastoreAssignmentsResponse:
         """Deserializes the ListAccountMetastoreAssignmentsResponse from a dictionary."""
@@ -2774,6 +3883,12 @@ def as_dict(self) -> dict:
             body['storage_credentials'] = [v.as_dict() for v in self.storage_credentials]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAccountStorageCredentialsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.storage_credentials: body['storage_credentials'] = self.storage_credentials
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAccountStorageCredentialsResponse:
         """Deserializes the ListAccountStorageCredentialsResponse from a dictionary."""
@@ -2796,6 +3911,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListCatalogsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalogs: body['catalogs'] = self.catalogs
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListCatalogsResponse:
         """Deserializes the ListCatalogsResponse from a dictionary."""
@@ -2819,6 +3941,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListConnectionsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.connections: body['connections'] = self.connections
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListConnectionsResponse:
         """Deserializes the ListConnectionsResponse from a dictionary."""
@@ -2826,6 +3955,35 @@ def from_dict(cls, d: Dict[str, any]) -> ListConnectionsResponse:
                    next_page_token=d.get('next_page_token', None))
 
 
+@dataclass
+class ListCredentialsResponse:
+    credentials: Optional[List[CredentialInfo]] = None
+
+    next_page_token: Optional[str] = None
+    """Opaque token to retrieve the next page of results. Absent if there are no more pages.
+    __page_token__ should be set to this value for the next request (for the next page of results)."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListCredentialsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.credentials: body['credentials'] = [v.as_dict() for v in self.credentials]
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListCredentialsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credentials: body['credentials'] = self.credentials
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListCredentialsResponse:
+        """Deserializes the ListCredentialsResponse from a dictionary."""
+        return cls(credentials=_repeated_dict(d, 'credentials', CredentialInfo),
+                   next_page_token=d.get('next_page_token', None))
+
+
 @dataclass
 class ListExternalLocationsResponse:
     external_locations: Optional[List[ExternalLocationInfo]] = None
@@ -2843,6 +4001,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListExternalLocationsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.external_locations: body['external_locations'] = self.external_locations
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListExternalLocationsResponse:
         """Deserializes the ListExternalLocationsResponse from a dictionary."""
@@ -2866,6 +4031,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListFunctionsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.functions: body['functions'] = self.functions
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListFunctionsResponse:
         """Deserializes the ListFunctionsResponse from a dictionary."""
@@ -2884,6 +4056,12 @@ def as_dict(self) -> dict:
         if self.metastores: body['metastores'] = [v.as_dict() for v in self.metastores]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListMetastoresResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metastores: body['metastores'] = self.metastores
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListMetastoresResponse:
         """Deserializes the ListMetastoresResponse from a dictionary."""
@@ -2905,6 +4083,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListModelVersionsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.model_versions: body['model_versions'] = self.model_versions
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListModelVersionsResponse:
         """Deserializes the ListModelVersionsResponse from a dictionary."""
@@ -2928,6 +4113,13 @@ def as_dict(self) -> dict:
         if self.quotas: body['quotas'] = [v.as_dict() for v in self.quotas]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListQuotasResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.quotas: body['quotas'] = self.quotas
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListQuotasResponse:
         """Deserializes the ListQuotasResponse from a dictionary."""
@@ -2950,6 +4142,13 @@ def as_dict(self) -> dict:
         if self.registered_models: body['registered_models'] = [v.as_dict() for v in self.registered_models]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListRegisteredModelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.registered_models: body['registered_models'] = self.registered_models
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListRegisteredModelsResponse:
         """Deserializes the ListRegisteredModelsResponse from a dictionary."""
@@ -2973,6 +4172,13 @@ def as_dict(self) -> dict:
         if self.schemas: body['schemas'] = [v.as_dict() for v in self.schemas]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListSchemasResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.schemas: body['schemas'] = self.schemas
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSchemasResponse:
         """Deserializes the ListSchemasResponse from a dictionary."""
@@ -2996,6 +4202,13 @@ def as_dict(self) -> dict:
             body['storage_credentials'] = [v.as_dict() for v in self.storage_credentials]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListStorageCredentialsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.storage_credentials: body['storage_credentials'] = self.storage_credentials
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListStorageCredentialsResponse:
         """Deserializes the ListStorageCredentialsResponse from a dictionary."""
@@ -3019,6 +4232,13 @@ def as_dict(self) -> dict:
         if self.schemas: body['schemas'] = [v.as_dict() for v in self.schemas]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListSystemSchemasResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.schemas: body['schemas'] = self.schemas
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSystemSchemasResponse:
         """Deserializes the ListSystemSchemasResponse from a dictionary."""
@@ -3042,6 +4262,13 @@ def as_dict(self) -> dict:
         if self.tables: body['tables'] = [v.as_dict() for v in self.tables]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListTableSummariesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.tables: body['tables'] = self.tables
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListTableSummariesResponse:
         """Deserializes the ListTableSummariesResponse from a dictionary."""
@@ -3065,6 +4292,13 @@ def as_dict(self) -> dict:
         if self.tables: body['tables'] = [v.as_dict() for v in self.tables]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListTablesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.tables: body['tables'] = self.tables
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListTablesResponse:
         """Deserializes the ListTablesResponse from a dictionary."""
@@ -3088,6 +4322,13 @@ def as_dict(self) -> dict:
         if self.volumes: body['volumes'] = [v.as_dict() for v in self.volumes]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListVolumesResponseContent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.volumes: body['volumes'] = self.volumes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListVolumesResponseContent:
         """Deserializes the ListVolumesResponseContent from a dictionary."""
@@ -3120,6 +4361,14 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MetastoreAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MetastoreAssignment:
         """Deserializes the MetastoreAssignment from a dictionary."""
@@ -3220,6 +4469,38 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MetastoreInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cloud is not None: body['cloud'] = self.cloud
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.default_data_access_config_id is not None:
+            body['default_data_access_config_id'] = self.default_data_access_config_id
+        if self.delta_sharing_organization_name is not None:
+            body['delta_sharing_organization_name'] = self.delta_sharing_organization_name
+        if self.delta_sharing_recipient_token_lifetime_in_seconds is not None:
+            body[
+                'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds
+        if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope
+        if self.external_access_enabled is not None:
+            body['external_access_enabled'] = self.external_access_enabled
+        if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.privilege_model_version is not None:
+            body['privilege_model_version'] = self.privilege_model_version
+        if self.region is not None: body['region'] = self.region
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.storage_root_credential_id is not None:
+            body['storage_root_credential_id'] = self.storage_root_credential_id
+        if self.storage_root_credential_name is not None:
+            body['storage_root_credential_name'] = self.storage_root_credential_name
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MetastoreInfo:
         """Deserializes the MetastoreInfo from a dictionary."""
@@ -3339,6 +4620,31 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ModelVersionInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aliases: body['aliases'] = self.aliases
+        if self.browse_only is not None: body['browse_only'] = self.browse_only
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.id is not None: body['id'] = self.id
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.model_name is not None: body['model_name'] = self.model_name
+        if self.model_version_dependencies:
+            body['model_version_dependencies'] = self.model_version_dependencies
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_workspace_id is not None: body['run_workspace_id'] = self.run_workspace_id
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.source is not None: body['source'] = self.source
+        if self.status is not None: body['status'] = self.status
+        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelVersionInfo:
         """Deserializes the ModelVersionInfo from a dictionary."""
@@ -3395,6 +4701,15 @@ def as_dict(self) -> dict:
         if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorCronSchedule into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.pause_status is not None: body['pause_status'] = self.pause_status
+        if self.quartz_cron_expression is not None:
+            body['quartz_cron_expression'] = self.quartz_cron_expression
+        if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorCronSchedule:
         """Deserializes the MonitorCronSchedule from a dictionary."""
@@ -3421,6 +4736,12 @@ def as_dict(self) -> dict:
         if self.enabled is not None: body['enabled'] = self.enabled
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorDataClassificationConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enabled is not None: body['enabled'] = self.enabled
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorDataClassificationConfig:
         """Deserializes the MonitorDataClassificationConfig from a dictionary."""
@@ -3439,6 +4760,12 @@ def as_dict(self) -> dict:
         if self.email_addresses: body['email_addresses'] = [v for v in self.email_addresses]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorDestination into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.email_addresses: body['email_addresses'] = self.email_addresses
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorDestination:
         """Deserializes the MonitorDestination from a dictionary."""
@@ -3490,6 +4817,18 @@ def as_dict(self) -> dict:
         if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorInferenceLog into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.granularities: body['granularities'] = self.granularities
+        if self.label_col is not None: body['label_col'] = self.label_col
+        if self.model_id_col is not None: body['model_id_col'] = self.model_id_col
+        if self.prediction_col is not None: body['prediction_col'] = self.prediction_col
+        if self.prediction_proba_col is not None: body['prediction_proba_col'] = self.prediction_proba_col
+        if self.problem_type is not None: body['problem_type'] = self.problem_type
+        if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorInferenceLog:
         """Deserializes the MonitorInferenceLog from a dictionary."""
@@ -3601,6 +4940,33 @@ def as_dict(self) -> dict:
         if self.time_series: body['time_series'] = self.time_series.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.assets_dir is not None: body['assets_dir'] = self.assets_dir
+        if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name
+        if self.custom_metrics: body['custom_metrics'] = self.custom_metrics
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.data_classification_config:
+            body['data_classification_config'] = self.data_classification_config
+        if self.drift_metrics_table_name is not None:
+            body['drift_metrics_table_name'] = self.drift_metrics_table_name
+        if self.inference_log: body['inference_log'] = self.inference_log
+        if self.latest_monitor_failure_msg is not None:
+            body['latest_monitor_failure_msg'] = self.latest_monitor_failure_msg
+        if self.monitor_version is not None: body['monitor_version'] = self.monitor_version
+        if self.notifications: body['notifications'] = self.notifications
+        if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
+        if self.profile_metrics_table_name is not None:
+            body['profile_metrics_table_name'] = self.profile_metrics_table_name
+        if self.schedule: body['schedule'] = self.schedule
+        if self.slicing_exprs: body['slicing_exprs'] = self.slicing_exprs
+        if self.snapshot: body['snapshot'] = self.snapshot
+        if self.status is not None: body['status'] = self.status
+        if self.table_name is not None: body['table_name'] = self.table_name
+        if self.time_series: body['time_series'] = self.time_series
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorInfo:
         """Deserializes the MonitorInfo from a dictionary."""
@@ -3672,6 +5038,16 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorMetric into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.definition is not None: body['definition'] = self.definition
+        if self.input_columns: body['input_columns'] = self.input_columns
+        if self.name is not None: body['name'] = self.name
+        if self.output_data_type is not None: body['output_data_type'] = self.output_data_type
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorMetric:
         """Deserializes the MonitorMetric from a dictionary."""
@@ -3712,6 +5088,14 @@ def as_dict(self) -> dict:
             body['on_new_classification_tag_detected'] = self.on_new_classification_tag_detected.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorNotifications into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.on_failure: body['on_failure'] = self.on_failure
+        if self.on_new_classification_tag_detected:
+            body['on_new_classification_tag_detected'] = self.on_new_classification_tag_detected
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorNotifications:
         """Deserializes the MonitorNotifications from a dictionary."""
@@ -3751,6 +5135,17 @@ def as_dict(self) -> dict:
         if self.trigger is not None: body['trigger'] = self.trigger.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorRefreshInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.end_time_ms is not None: body['end_time_ms'] = self.end_time_ms
+        if self.message is not None: body['message'] = self.message
+        if self.refresh_id is not None: body['refresh_id'] = self.refresh_id
+        if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms
+        if self.state is not None: body['state'] = self.state
+        if self.trigger is not None: body['trigger'] = self.trigger
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorRefreshInfo:
         """Deserializes the MonitorRefreshInfo from a dictionary."""
@@ -3790,6 +5185,12 @@ def as_dict(self) -> dict:
         if self.refreshes: body['refreshes'] = [v.as_dict() for v in self.refreshes]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorRefreshListResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.refreshes: body['refreshes'] = self.refreshes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorRefreshListResponse:
         """Deserializes the MonitorRefreshListResponse from a dictionary."""
@@ -3804,6 +5205,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorSnapshot into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorSnapshot:
         """Deserializes the MonitorSnapshot from a dictionary."""
@@ -3831,6 +5237,13 @@ def as_dict(self) -> dict:
         if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MonitorTimeSeries into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.granularities: body['granularities'] = self.granularities
+        if self.timestamp_col is not None: body['timestamp_col'] = self.timestamp_col
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MonitorTimeSeries:
         """Deserializes the MonitorTimeSeries from a dictionary."""
@@ -3848,6 +5261,12 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NamedTableConstraint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NamedTableConstraint:
         """Deserializes the NamedTableConstraint from a dictionary."""
@@ -3883,7 +5302,18 @@ def as_dict(self) -> dict:
         if self.status: body['status'] = self.status.as_dict()
         if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url
         if self.unity_catalog_provisioning_state is not None:
-            body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state.value
+            body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the OnlineTable into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.spec: body['spec'] = self.spec
+        if self.status: body['status'] = self.status
+        if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url
+        if self.unity_catalog_provisioning_state is not None:
+            body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state
         return body
 
     @classmethod
@@ -3941,6 +5371,19 @@ def as_dict(self) -> dict:
         if self.timeseries_key is not None: body['timeseries_key'] = self.timeseries_key
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the OnlineTableSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.perform_full_copy is not None: body['perform_full_copy'] = self.perform_full_copy
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.primary_key_columns: body['primary_key_columns'] = self.primary_key_columns
+        if self.run_continuously: body['run_continuously'] = self.run_continuously
+        if self.run_triggered: body['run_triggered'] = self.run_triggered
+        if self.source_table_full_name is not None:
+            body['source_table_full_name'] = self.source_table_full_name
+        if self.timeseries_key is not None: body['timeseries_key'] = self.timeseries_key
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OnlineTableSpec:
         """Deserializes the OnlineTableSpec from a dictionary."""
@@ -3962,6 +5405,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the OnlineTableSpecContinuousSchedulingPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OnlineTableSpecContinuousSchedulingPolicy:
         """Deserializes the OnlineTableSpecContinuousSchedulingPolicy from a dictionary."""
@@ -3976,6 +5424,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the OnlineTableSpecTriggeredSchedulingPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OnlineTableSpecTriggeredSchedulingPolicy:
         """Deserializes the OnlineTableSpecTriggeredSchedulingPolicy from a dictionary."""
@@ -4037,6 +5490,17 @@ def as_dict(self) -> dict:
             body['triggered_update_status'] = self.triggered_update_status.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the OnlineTableStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.continuous_update_status: body['continuous_update_status'] = self.continuous_update_status
+        if self.detailed_state is not None: body['detailed_state'] = self.detailed_state
+        if self.failed_status: body['failed_status'] = self.failed_status
+        if self.message is not None: body['message'] = self.message
+        if self.provisioning_status: body['provisioning_status'] = self.provisioning_status
+        if self.triggered_update_status: body['triggered_update_status'] = self.triggered_update_status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OnlineTableStatus:
         """Deserializes the OnlineTableStatus from a dictionary."""
@@ -4067,6 +5531,14 @@ def as_dict(self) -> dict:
         if self.remove: body['remove'] = [v.value for v in self.remove]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PermissionsChange into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.add: body['add'] = self.add
+        if self.principal is not None: body['principal'] = self.principal
+        if self.remove: body['remove'] = self.remove
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionsChange:
         """Deserializes the PermissionsChange from a dictionary."""
@@ -4087,6 +5559,12 @@ def as_dict(self) -> dict:
             body['privilege_assignments'] = [v.as_dict() for v in self.privilege_assignments]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PermissionsList into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionsList:
         """Deserializes the PermissionsList from a dictionary."""
@@ -4126,6 +5604,19 @@ def as_dict(self) -> dict:
         if self.total_row_count is not None: body['total_row_count'] = self.total_row_count
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineProgress into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.estimated_completion_time_seconds is not None:
+            body['estimated_completion_time_seconds'] = self.estimated_completion_time_seconds
+        if self.latest_version_currently_processing is not None:
+            body['latest_version_currently_processing'] = self.latest_version_currently_processing
+        if self.sync_progress_completion is not None:
+            body['sync_progress_completion'] = self.sync_progress_completion
+        if self.synced_row_count is not None: body['synced_row_count'] = self.synced_row_count
+        if self.total_row_count is not None: body['total_row_count'] = self.total_row_count
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineProgress:
         """Deserializes the PipelineProgress from a dictionary."""
@@ -4151,6 +5642,13 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PrimaryKeyConstraint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.child_columns: body['child_columns'] = self.child_columns
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PrimaryKeyConstraint:
         """Deserializes the PrimaryKeyConstraint from a dictionary."""
@@ -4169,6 +5667,7 @@ class Privilege(Enum):
     CREATE_EXTERNAL_TABLE = 'CREATE_EXTERNAL_TABLE'
     CREATE_EXTERNAL_VOLUME = 'CREATE_EXTERNAL_VOLUME'
     CREATE_FOREIGN_CATALOG = 'CREATE_FOREIGN_CATALOG'
+    CREATE_FOREIGN_SECURABLE = 'CREATE_FOREIGN_SECURABLE'
     CREATE_FUNCTION = 'CREATE_FUNCTION'
     CREATE_MANAGED_STORAGE = 'CREATE_MANAGED_STORAGE'
     CREATE_MATERIALIZED_VIEW = 'CREATE_MATERIALIZED_VIEW'
@@ -4220,6 +5719,13 @@ def as_dict(self) -> dict:
         if self.privileges: body['privileges'] = [v.value for v in self.privileges]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PrivilegeAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.principal is not None: body['principal'] = self.principal
+        if self.privileges: body['privileges'] = self.privileges
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PrivilegeAssignment:
         """Deserializes the PrivilegeAssignment from a dictionary."""
@@ -4241,6 +5747,12 @@ def as_dict(self) -> dict:
         if self.state is not None: body['state'] = self.state.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ProvisioningInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.state is not None: body['state'] = self.state
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ProvisioningInfo:
         """Deserializes the ProvisioningInfo from a dictionary."""
@@ -4250,6 +5762,7 @@ def from_dict(cls, d: Dict[str, any]) -> ProvisioningInfo:
 class ProvisioningInfoState(Enum):
 
     ACTIVE = 'ACTIVE'
+    DEGRADED = 'DEGRADED'
     DELETING = 'DELETING'
     FAILED = 'FAILED'
     PROVISIONING = 'PROVISIONING'
@@ -4272,6 +5785,13 @@ def as_dict(self) -> dict:
             body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ProvisioningStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.initial_pipeline_sync_progress:
+            body['initial_pipeline_sync_progress'] = self.initial_pipeline_sync_progress
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ProvisioningStatus:
         """Deserializes the ProvisioningStatus from a dictionary."""
@@ -4311,6 +5831,17 @@ def as_dict(self) -> dict:
         if self.quota_name is not None: body['quota_name'] = self.quota_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QuotaInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.last_refreshed_at is not None: body['last_refreshed_at'] = self.last_refreshed_at
+        if self.parent_full_name is not None: body['parent_full_name'] = self.parent_full_name
+        if self.parent_securable_type is not None: body['parent_securable_type'] = self.parent_securable_type
+        if self.quota_count is not None: body['quota_count'] = self.quota_count
+        if self.quota_limit is not None: body['quota_limit'] = self.quota_limit
+        if self.quota_name is not None: body['quota_name'] = self.quota_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QuotaInfo:
         """Deserializes the QuotaInfo from a dictionary."""
@@ -4344,6 +5875,14 @@ def as_dict(self) -> dict:
         if self.session_token is not None: body['session_token'] = self.session_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the R2Credentials into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_key_id is not None: body['access_key_id'] = self.access_key_id
+        if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
+        if self.session_token is not None: body['session_token'] = self.session_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> R2Credentials:
         """Deserializes the R2Credentials from a dictionary."""
@@ -4368,6 +5907,13 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegenerateDashboardRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.table_name is not None: body['table_name'] = self.table_name
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegenerateDashboardRequest:
         """Deserializes the RegenerateDashboardRequest from a dictionary."""
@@ -4389,6 +5935,13 @@ def as_dict(self) -> dict:
         if self.parent_folder is not None: body['parent_folder'] = self.parent_folder
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegenerateDashboardResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.parent_folder is not None: body['parent_folder'] = self.parent_folder
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegenerateDashboardResponse:
         """Deserializes the RegenerateDashboardResponse from a dictionary."""
@@ -4412,6 +5965,13 @@ def as_dict(self) -> dict:
         if self.version_num is not None: body['version_num'] = self.version_num
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegisteredModelAlias into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alias_name is not None: body['alias_name'] = self.alias_name
+        if self.version_num is not None: body['version_num'] = self.version_num
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelAlias:
         """Deserializes the RegisteredModelAlias from a dictionary."""
@@ -4482,6 +6042,25 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegisteredModelInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aliases: body['aliases'] = self.aliases
+        if self.browse_only is not None: body['browse_only'] = self.browse_only
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelInfo:
         """Deserializes the RegisteredModelInfo from a dictionary."""
@@ -4584,6 +6163,31 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SchemaInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.browse_only is not None: body['browse_only'] = self.browse_only
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.catalog_type is not None: body['catalog_type'] = self.catalog_type
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.effective_predictive_optimization_flag:
+            body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag
+        if self.enable_predictive_optimization is not None:
+            body['enable_predictive_optimization'] = self.enable_predictive_optimization
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.properties: body['properties'] = self.properties
+        if self.schema_id is not None: body['schema_id'] = self.schema_id
+        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SchemaInfo:
         """Deserializes the SchemaInfo from a dictionary."""
@@ -4617,19 +6221,21 @@ def from_dict(cls, d: Dict[str, any]) -> SchemaInfo:
 class SecurableType(Enum):
     """The type of Unity Catalog securable"""
 
-    CATALOG = 'catalog'
-    CONNECTION = 'connection'
-    EXTERNAL_LOCATION = 'external_location'
-    FUNCTION = 'function'
-    METASTORE = 'metastore'
-    PIPELINE = 'pipeline'
-    PROVIDER = 'provider'
-    RECIPIENT = 'recipient'
-    SCHEMA = 'schema'
-    SHARE = 'share'
-    STORAGE_CREDENTIAL = 'storage_credential'
-    TABLE = 'table'
-    VOLUME = 'volume'
+    CATALOG = 'CATALOG'
+    CLEAN_ROOM = 'CLEAN_ROOM'
+    CONNECTION = 'CONNECTION'
+    CREDENTIAL = 'CREDENTIAL'
+    EXTERNAL_LOCATION = 'EXTERNAL_LOCATION'
+    FUNCTION = 'FUNCTION'
+    METASTORE = 'METASTORE'
+    PIPELINE = 'PIPELINE'
+    PROVIDER = 'PROVIDER'
+    RECIPIENT = 'RECIPIENT'
+    SCHEMA = 'SCHEMA'
+    SHARE = 'SHARE'
+    STORAGE_CREDENTIAL = 'STORAGE_CREDENTIAL'
+    TABLE = 'TABLE'
+    VOLUME = 'VOLUME'
 
 
 @dataclass
@@ -4647,6 +6253,13 @@ def as_dict(self) -> dict:
         if self.artifact_type is not None: body['artifact_type'] = self.artifact_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetArtifactAllowlist into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.artifact_matchers: body['artifact_matchers'] = self.artifact_matchers
+        if self.artifact_type is not None: body['artifact_type'] = self.artifact_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetArtifactAllowlist:
         """Deserializes the SetArtifactAllowlist from a dictionary."""
@@ -4673,6 +6286,14 @@ def as_dict(self) -> dict:
         if self.version_num is not None: body['version_num'] = self.version_num
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetRegisteredModelAliasRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alias is not None: body['alias'] = self.alias
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.version_num is not None: body['version_num'] = self.version_num
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetRegisteredModelAliasRequest:
         """Deserializes the SetRegisteredModelAliasRequest from a dictionary."""
@@ -4698,6 +6319,13 @@ def as_dict(self) -> dict:
         if self.aws_kms_key_arn is not None: body['aws_kms_key_arn'] = self.aws_kms_key_arn
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SseEncryptionDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.algorithm is not None: body['algorithm'] = self.algorithm
+        if self.aws_kms_key_arn is not None: body['aws_kms_key_arn'] = self.aws_kms_key_arn
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SseEncryptionDetails:
         """Deserializes the SseEncryptionDetails from a dictionary."""
@@ -4738,11 +6366,13 @@ class StorageCredentialInfo:
     databricks_gcp_service_account: Optional[DatabricksGcpServiceAccountResponse] = None
     """The Databricks managed GCP service account configuration."""
 
+    full_name: Optional[str] = None
+    """The full name of the credential."""
+
     id: Optional[str] = None
     """The unique identifier of the credential."""
 
     isolation_mode: Optional[IsolationMode] = None
-    """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
 
     metastore_id: Optional[str] = None
     """Unique identifier of parent metastore."""
@@ -4778,6 +6408,7 @@ def as_dict(self) -> dict:
         if self.created_by is not None: body['created_by'] = self.created_by
         if self.databricks_gcp_service_account:
             body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
+        if self.full_name is not None: body['full_name'] = self.full_name
         if self.id is not None: body['id'] = self.id
         if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
         if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
@@ -4790,6 +6421,31 @@ def as_dict(self) -> dict:
             body['used_for_managed_storage'] = self.used_for_managed_storage
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StorageCredentialInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
+        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
+        if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.id is not None: body['id'] = self.id
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.used_for_managed_storage is not None:
+            body['used_for_managed_storage'] = self.used_for_managed_storage
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StorageCredentialInfo:
         """Deserializes the StorageCredentialInfo from a dictionary."""
@@ -4803,6 +6459,7 @@ def from_dict(cls, d: Dict[str, any]) -> StorageCredentialInfo:
                    created_by=d.get('created_by', None),
                    databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
                                                              DatabricksGcpServiceAccountResponse),
+                   full_name=d.get('full_name', None),
                    id=d.get('id', None),
                    isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
                    metastore_id=d.get('metastore_id', None),
@@ -4830,6 +6487,13 @@ def as_dict(self) -> dict:
         if self.state is not None: body['state'] = self.state.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SystemSchemaInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.schema is not None: body['schema'] = self.schema
+        if self.state is not None: body['state'] = self.state
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SystemSchemaInfo:
         """Deserializes the SystemSchemaInfo from a dictionary."""
@@ -4866,6 +6530,14 @@ def as_dict(self) -> dict:
         if self.primary_key_constraint: body['primary_key_constraint'] = self.primary_key_constraint.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TableConstraint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.foreign_key_constraint: body['foreign_key_constraint'] = self.foreign_key_constraint
+        if self.named_table_constraint: body['named_table_constraint'] = self.named_table_constraint
+        if self.primary_key_constraint: body['primary_key_constraint'] = self.primary_key_constraint
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableConstraint:
         """Deserializes the TableConstraint from a dictionary."""
@@ -4888,6 +6560,12 @@ def as_dict(self) -> dict:
         if self.table_full_name is not None: body['table_full_name'] = self.table_full_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TableDependency into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.table_full_name is not None: body['table_full_name'] = self.table_full_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableDependency:
         """Deserializes the TableDependency from a dictionary."""
@@ -4905,6 +6583,12 @@ def as_dict(self) -> dict:
         if self.table_exists is not None: body['table_exists'] = self.table_exists
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TableExistsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.table_exists is not None: body['table_exists'] = self.table_exists
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableExistsResponse:
         """Deserializes the TableExistsResponse from a dictionary."""
@@ -5057,6 +6741,48 @@ def as_dict(self) -> dict:
         if self.view_dependencies: body['view_dependencies'] = self.view_dependencies.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TableInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_point is not None: body['access_point'] = self.access_point
+        if self.browse_only is not None: body['browse_only'] = self.browse_only
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.columns: body['columns'] = self.columns
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.data_access_configuration_id is not None:
+            body['data_access_configuration_id'] = self.data_access_configuration_id
+        if self.data_source_format is not None: body['data_source_format'] = self.data_source_format
+        if self.deleted_at is not None: body['deleted_at'] = self.deleted_at
+        if self.delta_runtime_properties_kvpairs:
+            body['delta_runtime_properties_kvpairs'] = self.delta_runtime_properties_kvpairs
+        if self.effective_predictive_optimization_flag:
+            body['effective_predictive_optimization_flag'] = self.effective_predictive_optimization_flag
+        if self.enable_predictive_optimization is not None:
+            body['enable_predictive_optimization'] = self.enable_predictive_optimization
+        if self.encryption_details: body['encryption_details'] = self.encryption_details
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.properties: body['properties'] = self.properties
+        if self.row_filter: body['row_filter'] = self.row_filter
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.sql_path is not None: body['sql_path'] = self.sql_path
+        if self.storage_credential_name is not None:
+            body['storage_credential_name'] = self.storage_credential_name
+        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        if self.table_constraints: body['table_constraints'] = self.table_constraints
+        if self.table_id is not None: body['table_id'] = self.table_id
+        if self.table_type is not None: body['table_type'] = self.table_type
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.view_definition is not None: body['view_definition'] = self.view_definition
+        if self.view_dependencies: body['view_dependencies'] = self.view_dependencies
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableInfo:
         """Deserializes the TableInfo from a dictionary."""
@@ -5119,6 +6845,13 @@ def as_dict(self) -> dict:
         if self.input_column_names: body['input_column_names'] = [v for v in self.input_column_names]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TableRowFilter into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.function_name is not None: body['function_name'] = self.function_name
+        if self.input_column_names: body['input_column_names'] = self.input_column_names
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableRowFilter:
         """Deserializes the TableRowFilter from a dictionary."""
@@ -5140,6 +6873,13 @@ def as_dict(self) -> dict:
         if self.table_type is not None: body['table_type'] = self.table_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TableSummary into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.table_type is not None: body['table_type'] = self.table_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableSummary:
         """Deserializes the TableSummary from a dictionary."""
@@ -5158,6 +6898,52 @@ class TableType(Enum):
     VIEW = 'VIEW'
 
 
+@dataclass
+class TemporaryCredentials:
+    aws_temp_credentials: Optional[AwsCredentials] = None
+    """AWS temporary credentials for API authentication. Read more at
+    https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html."""
+
+    azure_aad: Optional[AzureActiveDirectoryToken] = None
+    """Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed
+    Identity. Read more at
+    https://learn.microsoft.com/en-us/azure/databricks/dev-tools/api/latest/aad/service-prin-aad-token"""
+
+    expiration_time: Optional[int] = None
+    """Server time when the credential will expire, in epoch milliseconds. The API client is advised to
+    cache the credential given this expiration time."""
+
+    gcp_oauth_token: Optional[GcpOauthToken] = None
+    """GCP temporary credentials for API authentication. Read more at
+    https://developers.google.com/identity/protocols/oauth2/service-account"""
+
+    def as_dict(self) -> dict:
+        """Serializes the TemporaryCredentials into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials.as_dict()
+        if self.azure_aad: body['azure_aad'] = self.azure_aad.as_dict()
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TemporaryCredentials into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials
+        if self.azure_aad: body['azure_aad'] = self.azure_aad
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> TemporaryCredentials:
+        """Deserializes the TemporaryCredentials from a dictionary."""
+        return cls(aws_temp_credentials=_from_dict(d, 'aws_temp_credentials', AwsCredentials),
+                   azure_aad=_from_dict(d, 'azure_aad', AzureActiveDirectoryToken),
+                   expiration_time=d.get('expiration_time', None),
+                   gcp_oauth_token=_from_dict(d, 'gcp_oauth_token', GcpOauthToken))
+
+
 @dataclass
 class TriggeredUpdateStatus:
     """Detailed status of an online table. Shown if the online table is in the ONLINE_TRIGGERED_UPDATE
@@ -5184,6 +6970,15 @@ def as_dict(self) -> dict:
             body['triggered_update_progress'] = self.triggered_update_progress.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TriggeredUpdateStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.last_processed_commit_version is not None:
+            body['last_processed_commit_version'] = self.last_processed_commit_version
+        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        if self.triggered_update_progress: body['triggered_update_progress'] = self.triggered_update_progress
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TriggeredUpdateStatus:
         """Deserializes the TriggeredUpdateStatus from a dictionary."""
@@ -5200,6 +6995,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UnassignResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UnassignResponse:
         """Deserializes the UnassignResponse from a dictionary."""
@@ -5214,6 +7014,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateAssignmentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateAssignmentResponse:
         """Deserializes the UpdateAssignmentResponse from a dictionary."""
@@ -5223,6 +7028,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateAssignmentResponse:
 class UpdateBindingsSecurableType(Enum):
 
     CATALOG = 'catalog'
+    CREDENTIAL = 'credential'
     EXTERNAL_LOCATION = 'external_location'
     STORAGE_CREDENTIAL = 'storage_credential'
 
@@ -5244,6 +7050,9 @@ class UpdateCatalog:
     new_name: Optional[str] = None
     """New name for the catalog."""
 
+    options: Optional[Dict[str, str]] = None
+    """A map of key-value properties attached to the securable."""
+
     owner: Optional[str] = None
     """Username of current owner of catalog."""
 
@@ -5259,6 +7068,21 @@ def as_dict(self) -> dict:
         if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
         if self.name is not None: body['name'] = self.name
         if self.new_name is not None: body['new_name'] = self.new_name
+        if self.options: body['options'] = self.options
+        if self.owner is not None: body['owner'] = self.owner
+        if self.properties: body['properties'] = self.properties
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCatalog into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.enable_predictive_optimization is not None:
+            body['enable_predictive_optimization'] = self.enable_predictive_optimization
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
+        if self.name is not None: body['name'] = self.name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.options: body['options'] = self.options
         if self.owner is not None: body['owner'] = self.owner
         if self.properties: body['properties'] = self.properties
         return body
@@ -5272,6 +7096,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateCatalog:
                    isolation_mode=_enum(d, 'isolation_mode', CatalogIsolationMode),
                    name=d.get('name', None),
                    new_name=d.get('new_name', None),
+                   options=d.get('options', None),
                    owner=d.get('owner', None),
                    properties=d.get('properties', None))
 
@@ -5299,6 +7124,15 @@ def as_dict(self) -> dict:
         if self.owner is not None: body['owner'] = self.owner
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateConnection into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.options: body['options'] = self.options
+        if self.owner is not None: body['owner'] = self.owner
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateConnection:
         """Deserializes the UpdateConnection from a dictionary."""
@@ -5308,6 +7142,101 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateConnection:
                    owner=d.get('owner', None))
 
 
+@dataclass
+class UpdateCredentialRequest:
+    aws_iam_role: Optional[AwsIamRole] = None
+    """The AWS IAM role configuration"""
+
+    azure_managed_identity: Optional[AzureManagedIdentity] = None
+    """The Azure managed identity configuration."""
+
+    azure_service_principal: Optional[AzureServicePrincipal] = None
+    """The Azure service principal configuration. Only applicable when purpose is **STORAGE**."""
+
+    comment: Optional[str] = None
+    """Comment associated with the credential."""
+
+    databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None
+    """GCP long-lived credential. Databricks-created Google Cloud Storage service account."""
+
+    force: Optional[bool] = None
+    """Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
+    external locations and external tables (when purpose is **STORAGE**)."""
+
+    isolation_mode: Optional[IsolationMode] = None
+    """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
+
+    name_arg: Optional[str] = None
+    """Name of the credential."""
+
+    new_name: Optional[str] = None
+    """New name of credential."""
+
+    owner: Optional[str] = None
+    """Username of current owner of credential."""
+
+    read_only: Optional[bool] = None
+    """Whether the credential is usable only for read operations. Only applicable when purpose is
+    **STORAGE**."""
+
+    skip_validation: Optional[bool] = None
+    """Supply true to this argument to skip validation of the updated credential."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateCredentialRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
+        if self.azure_service_principal:
+            body['azure_service_principal'] = self.azure_service_principal.as_dict()
+        if self.comment is not None: body['comment'] = self.comment
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account.as_dict()
+        if self.force is not None: body['force'] = self.force
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode.value
+        if self.name_arg is not None: body['name_arg'] = self.name_arg
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCredentialRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
+        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
+        if self.comment is not None: body['comment'] = self.comment
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
+        if self.force is not None: body['force'] = self.force
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
+        if self.name_arg is not None: body['name_arg'] = self.name_arg
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateCredentialRequest:
+        """Deserializes the UpdateCredentialRequest from a dictionary."""
+        return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole),
+                   azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
+                   azure_service_principal=_from_dict(d, 'azure_service_principal', AzureServicePrincipal),
+                   comment=d.get('comment', None),
+                   databricks_gcp_service_account=_from_dict(d, 'databricks_gcp_service_account',
+                                                             DatabricksGcpServiceAccount),
+                   force=d.get('force', None),
+                   isolation_mode=_enum(d, 'isolation_mode', IsolationMode),
+                   name_arg=d.get('name_arg', None),
+                   new_name=d.get('new_name', None),
+                   owner=d.get('owner', None),
+                   read_only=d.get('read_only', None),
+                   skip_validation=d.get('skip_validation', None))
+
+
 @dataclass
 class UpdateExternalLocation:
     access_point: Optional[str] = None
@@ -5331,7 +7260,6 @@ class UpdateExternalLocation:
     """Force update even if changing url invalidates dependent external tables or mounts."""
 
     isolation_mode: Optional[IsolationMode] = None
-    """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
 
     name: Optional[str] = None
     """Name of the external location."""
@@ -5369,6 +7297,24 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateExternalLocation into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_point is not None: body['access_point'] = self.access_point
+        if self.comment is not None: body['comment'] = self.comment
+        if self.credential_name is not None: body['credential_name'] = self.credential_name
+        if self.encryption_details: body['encryption_details'] = self.encryption_details
+        if self.fallback is not None: body['fallback'] = self.fallback
+        if self.force is not None: body['force'] = self.force
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
+        if self.name is not None: body['name'] = self.name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExternalLocation:
         """Deserializes the UpdateExternalLocation from a dictionary."""
@@ -5403,6 +7349,13 @@ def as_dict(self) -> dict:
         if self.owner is not None: body['owner'] = self.owner
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateFunction into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateFunction:
         """Deserializes the UpdateFunction from a dictionary."""
@@ -5454,6 +7407,24 @@ def as_dict(self) -> dict:
             body['storage_root_credential_id'] = self.storage_root_credential_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateMetastore into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.delta_sharing_organization_name is not None:
+            body['delta_sharing_organization_name'] = self.delta_sharing_organization_name
+        if self.delta_sharing_recipient_token_lifetime_in_seconds is not None:
+            body[
+                'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds
+        if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope
+        if self.id is not None: body['id'] = self.id
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.privilege_model_version is not None:
+            body['privilege_model_version'] = self.privilege_model_version
+        if self.storage_root_credential_id is not None:
+            body['storage_root_credential_id'] = self.storage_root_credential_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateMetastore:
         """Deserializes the UpdateMetastore from a dictionary."""
@@ -5488,6 +7459,14 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateMetastoreAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.default_catalog_name is not None: body['default_catalog_name'] = self.default_catalog_name
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateMetastoreAssignment:
         """Deserializes the UpdateMetastoreAssignment from a dictionary."""
@@ -5522,6 +7501,14 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateModelVersionRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateModelVersionRequest:
         """Deserializes the UpdateModelVersionRequest from a dictionary."""
@@ -5592,6 +7579,24 @@ def as_dict(self) -> dict:
         if self.time_series: body['time_series'] = self.time_series.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateMonitor into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.baseline_table_name is not None: body['baseline_table_name'] = self.baseline_table_name
+        if self.custom_metrics: body['custom_metrics'] = self.custom_metrics
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.data_classification_config:
+            body['data_classification_config'] = self.data_classification_config
+        if self.inference_log: body['inference_log'] = self.inference_log
+        if self.notifications: body['notifications'] = self.notifications
+        if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
+        if self.schedule: body['schedule'] = self.schedule
+        if self.slicing_exprs: body['slicing_exprs'] = self.slicing_exprs
+        if self.snapshot: body['snapshot'] = self.snapshot
+        if self.table_name is not None: body['table_name'] = self.table_name
+        if self.time_series: body['time_series'] = self.time_series
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateMonitor:
         """Deserializes the UpdateMonitor from a dictionary."""
@@ -5629,6 +7634,14 @@ def as_dict(self) -> dict:
         if self.securable_type is not None: body['securable_type'] = self.securable_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdatePermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.changes: body['changes'] = self.changes
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.securable_type is not None: body['securable_type'] = self.securable_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdatePermissions:
         """Deserializes the UpdatePermissions from a dictionary."""
@@ -5660,6 +7673,15 @@ def as_dict(self) -> dict:
         if self.owner is not None: body['owner'] = self.owner
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateRegisteredModelRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRegisteredModelRequest:
         """Deserializes the UpdateRegisteredModelRequest from a dictionary."""
@@ -5677,6 +7699,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
         """Deserializes the UpdateResponse from a dictionary."""
@@ -5715,6 +7742,18 @@ def as_dict(self) -> dict:
         if self.properties: body['properties'] = self.properties
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateSchema into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.enable_predictive_optimization is not None:
+            body['enable_predictive_optimization'] = self.enable_predictive_optimization
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.properties: body['properties'] = self.properties
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateSchema:
         """Deserializes the UpdateSchema from a dictionary."""
@@ -5751,7 +7790,6 @@ class UpdateStorageCredential:
     """Force update even if there are dependent external locations or external tables."""
 
     isolation_mode: Optional[IsolationMode] = None
-    """Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
 
     name: Optional[str] = None
     """Name of the storage credential."""
@@ -5788,6 +7826,25 @@ def as_dict(self) -> dict:
         if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateStorageCredential into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
+        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
+        if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token
+        if self.comment is not None: body['comment'] = self.comment
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
+        if self.force is not None: body['force'] = self.force
+        if self.isolation_mode is not None: body['isolation_mode'] = self.isolation_mode
+        if self.name is not None: body['name'] = self.name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateStorageCredential:
         """Deserializes the UpdateStorageCredential from a dictionary."""
@@ -5831,6 +7888,15 @@ def as_dict(self) -> dict:
         if self.owner is not None: body['owner'] = self.owner
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateVolumeRequestContent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateVolumeRequestContent:
         """Deserializes the UpdateVolumeRequestContent from a dictionary."""
@@ -5859,44 +7925,162 @@ def as_dict(self) -> dict:
         if self.unassign_workspaces: body['unassign_workspaces'] = [v for v in self.unassign_workspaces]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateWorkspaceBindings into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.assign_workspaces: body['assign_workspaces'] = self.assign_workspaces
+        if self.name is not None: body['name'] = self.name
+        if self.unassign_workspaces: body['unassign_workspaces'] = self.unassign_workspaces
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceBindings:
+        """Deserializes the UpdateWorkspaceBindings from a dictionary."""
+        return cls(assign_workspaces=d.get('assign_workspaces', None),
+                   name=d.get('name', None),
+                   unassign_workspaces=d.get('unassign_workspaces', None))
+
+
+@dataclass
+class UpdateWorkspaceBindingsParameters:
+    add: Optional[List[WorkspaceBinding]] = None
+    """List of workspace bindings"""
+
+    remove: Optional[List[WorkspaceBinding]] = None
+    """List of workspace bindings"""
+
+    securable_name: Optional[str] = None
+    """The name of the securable."""
+
+    securable_type: Optional[UpdateBindingsSecurableType] = None
+    """The type of the securable to bind to a workspace."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateWorkspaceBindingsParameters into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.add: body['add'] = [v.as_dict() for v in self.add]
+        if self.remove: body['remove'] = [v.as_dict() for v in self.remove]
+        if self.securable_name is not None: body['securable_name'] = self.securable_name
+        if self.securable_type is not None: body['securable_type'] = self.securable_type.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateWorkspaceBindingsParameters into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.add: body['add'] = self.add
+        if self.remove: body['remove'] = self.remove
+        if self.securable_name is not None: body['securable_name'] = self.securable_name
+        if self.securable_type is not None: body['securable_type'] = self.securable_type
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceBindingsParameters:
+        """Deserializes the UpdateWorkspaceBindingsParameters from a dictionary."""
+        return cls(add=_repeated_dict(d, 'add', WorkspaceBinding),
+                   remove=_repeated_dict(d, 'remove', WorkspaceBinding),
+                   securable_name=d.get('securable_name', None),
+                   securable_type=_enum(d, 'securable_type', UpdateBindingsSecurableType))
+
+
+@dataclass
+class ValidateCredentialRequest:
+    aws_iam_role: Optional[AwsIamRole] = None
+    """The AWS IAM role configuration"""
+
+    azure_managed_identity: Optional[AzureManagedIdentity] = None
+    """The Azure managed identity configuration."""
+
+    credential_name: Optional[str] = None
+    """Required. The name of an existing credential or long-lived cloud credential to validate."""
+
+    external_location_name: Optional[str] = None
+    """The name of an existing external location to validate. Only applicable for storage credentials
+    (purpose is **STORAGE**.)"""
+
+    purpose: Optional[CredentialPurpose] = None
+    """The purpose of the credential. This should only be used when the credential is specified."""
+
+    read_only: Optional[bool] = None
+    """Whether the credential is only usable for read operations. Only applicable for storage
+    credentials (purpose is **STORAGE**.)"""
+
+    url: Optional[str] = None
+    """The external location url to validate. Only applicable when purpose is **STORAGE**."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ValidateCredentialRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role.as_dict()
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity.as_dict()
+        if self.credential_name is not None: body['credential_name'] = self.credential_name
+        if self.external_location_name is not None:
+            body['external_location_name'] = self.external_location_name
+        if self.purpose is not None: body['purpose'] = self.purpose.value
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.url is not None: body['url'] = self.url
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ValidateCredentialRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
+        if self.credential_name is not None: body['credential_name'] = self.credential_name
+        if self.external_location_name is not None:
+            body['external_location_name'] = self.external_location_name
+        if self.purpose is not None: body['purpose'] = self.purpose
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceBindings:
-        """Deserializes the UpdateWorkspaceBindings from a dictionary."""
-        return cls(assign_workspaces=d.get('assign_workspaces', None),
-                   name=d.get('name', None),
-                   unassign_workspaces=d.get('unassign_workspaces', None))
+    def from_dict(cls, d: Dict[str, any]) -> ValidateCredentialRequest:
+        """Deserializes the ValidateCredentialRequest from a dictionary."""
+        return cls(aws_iam_role=_from_dict(d, 'aws_iam_role', AwsIamRole),
+                   azure_managed_identity=_from_dict(d, 'azure_managed_identity', AzureManagedIdentity),
+                   credential_name=d.get('credential_name', None),
+                   external_location_name=d.get('external_location_name', None),
+                   purpose=_enum(d, 'purpose', CredentialPurpose),
+                   read_only=d.get('read_only', None),
+                   url=d.get('url', None))
 
 
 @dataclass
-class UpdateWorkspaceBindingsParameters:
-    add: Optional[List[WorkspaceBinding]] = None
-    """List of workspace bindings"""
-
-    remove: Optional[List[WorkspaceBinding]] = None
-    """List of workspace bindings"""
-
-    securable_name: Optional[str] = None
-    """The name of the securable."""
+class ValidateCredentialResponse:
+    is_dir: Optional[bool] = None
+    """Whether the tested location is a directory in cloud storage. Only applicable for when purpose is
+    **STORAGE**."""
 
-    securable_type: Optional[UpdateBindingsSecurableType] = None
-    """The type of the securable to bind to a workspace."""
+    results: Optional[List[CredentialValidationResult]] = None
+    """The results of the validation check."""
 
     def as_dict(self) -> dict:
-        """Serializes the UpdateWorkspaceBindingsParameters into a dictionary suitable for use as a JSON request body."""
+        """Serializes the ValidateCredentialResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.add: body['add'] = [v.as_dict() for v in self.add]
-        if self.remove: body['remove'] = [v.as_dict() for v in self.remove]
-        if self.securable_name is not None: body['securable_name'] = self.securable_name
-        if self.securable_type is not None: body['securable_type'] = self.securable_type.value
+        if self.is_dir is not None: body['isDir'] = self.is_dir
+        if self.results: body['results'] = [v.as_dict() for v in self.results]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ValidateCredentialResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_dir is not None: body['isDir'] = self.is_dir
+        if self.results: body['results'] = self.results
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceBindingsParameters:
-        """Deserializes the UpdateWorkspaceBindingsParameters from a dictionary."""
-        return cls(add=_repeated_dict(d, 'add', WorkspaceBinding),
-                   remove=_repeated_dict(d, 'remove', WorkspaceBinding),
-                   securable_name=d.get('securable_name', None),
-                   securable_type=_enum(d, 'securable_type', UpdateBindingsSecurableType))
+    def from_dict(cls, d: Dict[str, any]) -> ValidateCredentialResponse:
+        """Deserializes the ValidateCredentialResponse from a dictionary."""
+        return cls(is_dir=d.get('isDir', None),
+                   results=_repeated_dict(d, 'results', CredentialValidationResult))
+
+
+class ValidateCredentialResult(Enum):
+    """A enum represents the result of the file operation"""
+
+    FAIL = 'FAIL'
+    PASS = 'PASS'
+    SKIP = 'SKIP'
 
 
 @dataclass
@@ -5946,6 +8130,23 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ValidateStorageCredential into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_iam_role: body['aws_iam_role'] = self.aws_iam_role
+        if self.azure_managed_identity: body['azure_managed_identity'] = self.azure_managed_identity
+        if self.azure_service_principal: body['azure_service_principal'] = self.azure_service_principal
+        if self.cloudflare_api_token: body['cloudflare_api_token'] = self.cloudflare_api_token
+        if self.databricks_gcp_service_account:
+            body['databricks_gcp_service_account'] = self.databricks_gcp_service_account
+        if self.external_location_name is not None:
+            body['external_location_name'] = self.external_location_name
+        if self.read_only is not None: body['read_only'] = self.read_only
+        if self.storage_credential_name is not None:
+            body['storage_credential_name'] = self.storage_credential_name
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ValidateStorageCredential:
         """Deserializes the ValidateStorageCredential from a dictionary."""
@@ -5977,6 +8178,13 @@ def as_dict(self) -> dict:
         if self.results: body['results'] = [v.as_dict() for v in self.results]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ValidateStorageCredentialResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_dir is not None: body['isDir'] = self.is_dir
+        if self.results: body['results'] = self.results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ValidateStorageCredentialResponse:
         """Deserializes the ValidateStorageCredentialResponse from a dictionary."""
@@ -6002,6 +8210,14 @@ def as_dict(self) -> dict:
         if self.result is not None: body['result'] = self.result.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ValidationResult into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        if self.operation is not None: body['operation'] = self.operation
+        if self.result is not None: body['result'] = self.result
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ValidationResult:
         """Deserializes the ValidationResult from a dictionary."""
@@ -6101,6 +8317,28 @@ def as_dict(self) -> dict:
         if self.volume_type is not None: body['volume_type'] = self.volume_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the VolumeInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_point is not None: body['access_point'] = self.access_point
+        if self.browse_only is not None: body['browse_only'] = self.browse_only
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.encryption_details: body['encryption_details'] = self.encryption_details
+        if self.full_name is not None: body['full_name'] = self.full_name
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.volume_id is not None: body['volume_id'] = self.volume_id
+        if self.volume_type is not None: body['volume_type'] = self.volume_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> VolumeInfo:
         """Deserializes the VolumeInfo from a dictionary."""
@@ -6142,6 +8380,13 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspaceBinding into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.binding_type is not None: body['binding_type'] = self.binding_type
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceBinding:
         """Deserializes the WorkspaceBinding from a dictionary."""
@@ -6173,6 +8418,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspaceBindingsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.bindings: body['bindings'] = self.bindings
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceBindingsResponse:
         """Deserializes the WorkspaceBindingsResponse from a dictionary."""
@@ -6731,6 +8983,7 @@ def list(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET', '/api/2.1/unity-catalog/catalogs', query=query, headers=headers)
             if 'catalogs' in json:
@@ -6747,6 +9000,7 @@ def update(self,
                enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None,
                isolation_mode: Optional[CatalogIsolationMode] = None,
                new_name: Optional[str] = None,
+               options: Optional[Dict[str, str]] = None,
                owner: Optional[str] = None,
                properties: Optional[Dict[str, str]] = None) -> CatalogInfo:
         """Update a catalog.
@@ -6764,6 +9018,8 @@ def update(self,
           Whether the current securable is accessible from all workspaces or a specific set of workspaces.
         :param new_name: str (optional)
           New name for the catalog.
+        :param options: Dict[str,str] (optional)
+          A map of key-value properties attached to the securable.
         :param owner: str (optional)
           Username of current owner of catalog.
         :param properties: Dict[str,str] (optional)
@@ -6777,6 +9033,7 @@ def update(self,
             body['enable_predictive_optimization'] = enable_predictive_optimization.value
         if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value
         if new_name is not None: body['new_name'] = new_name
+        if options is not None: body['options'] = options
         if owner is not None: body['owner'] = owner
         if properties is not None: body['properties'] = properties
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
@@ -6895,6 +9152,7 @@ def list(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET', '/api/2.1/unity-catalog/connections', query=query, headers=headers)
             if 'connections' in json:
@@ -6935,6 +9193,322 @@ def update(self,
         return ConnectionInfo.from_dict(res)
 
 
+class CredentialsAPI:
+    """A credential represents an authentication and authorization mechanism for accessing services on your cloud
+    tenant. Each credential is subject to Unity Catalog access-control policies that control which users and
+    groups can access the credential.
+    
+    To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE CREDENTIAL`
+    privilege. The user who creates the credential can delegate ownership to another user or group to manage
+    permissions on it."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def create_credential(self,
+                          name: str,
+                          *,
+                          aws_iam_role: Optional[AwsIamRole] = None,
+                          azure_managed_identity: Optional[AzureManagedIdentity] = None,
+                          azure_service_principal: Optional[AzureServicePrincipal] = None,
+                          comment: Optional[str] = None,
+                          databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None,
+                          purpose: Optional[CredentialPurpose] = None,
+                          read_only: Optional[bool] = None,
+                          skip_validation: Optional[bool] = None) -> CredentialInfo:
+        """Create a credential.
+        
+        Creates a new credential. The type of credential to be created is determined by the **purpose** field,
+        which should be either **SERVICE** or **STORAGE**.
+        
+        The caller must be a metastore admin or have the metastore privilege **CREATE_STORAGE_CREDENTIAL** for
+        storage credentials, or **CREATE_SERVICE_CREDENTIAL** for service credentials.
+        
+        :param name: str
+          The credential name. The name must be unique among storage and service credentials within the
+          metastore.
+        :param aws_iam_role: :class:`AwsIamRole` (optional)
+          The AWS IAM role configuration
+        :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
+          The Azure managed identity configuration.
+        :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
+          The Azure service principal configuration. Only applicable when purpose is **STORAGE**.
+        :param comment: str (optional)
+          Comment associated with the credential.
+        :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional)
+          GCP long-lived credential. Databricks-created Google Cloud Storage service account.
+        :param purpose: :class:`CredentialPurpose` (optional)
+          Indicates the purpose of the credential.
+        :param read_only: bool (optional)
+          Whether the credential is usable only for read operations. Only applicable when purpose is
+          **STORAGE**.
+        :param skip_validation: bool (optional)
+          Optional. Supplying true to this argument skips validation of the created set of credentials.
+        
+        :returns: :class:`CredentialInfo`
+        """
+        body = {}
+        if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict()
+        if azure_managed_identity is not None:
+            body['azure_managed_identity'] = azure_managed_identity.as_dict()
+        if azure_service_principal is not None:
+            body['azure_service_principal'] = azure_service_principal.as_dict()
+        if comment is not None: body['comment'] = comment
+        if databricks_gcp_service_account is not None:
+            body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict()
+        if name is not None: body['name'] = name
+        if purpose is not None: body['purpose'] = purpose.value
+        if read_only is not None: body['read_only'] = read_only
+        if skip_validation is not None: body['skip_validation'] = skip_validation
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST', '/api/2.1/unity-catalog/credentials', body=body, headers=headers)
+        return CredentialInfo.from_dict(res)
+
+    def delete_credential(self, name_arg: str, *, force: Optional[bool] = None):
+        """Delete a credential.
+        
+        Deletes a service or storage credential from the metastore. The caller must be an owner of the
+        credential.
+        
+        :param name_arg: str
+          Name of the credential.
+        :param force: bool (optional)
+          Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
+          external locations and external tables (when purpose is **STORAGE**).
+        
+        
+        """
+
+        query = {}
+        if force is not None: query['force'] = force
+        headers = {'Accept': 'application/json', }
+
+        self._api.do('DELETE', f'/api/2.1/unity-catalog/credentials/{name_arg}', query=query, headers=headers)
+
+    def generate_temporary_service_credential(
+            self,
+            credential_name: str,
+            *,
+            azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions] = None,
+            gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions] = None
+    ) -> TemporaryCredentials:
+        """Generate a temporary service credential.
+        
+        Returns a set of temporary credentials generated using the specified service credential. The caller
+        must be a metastore admin or have the metastore privilege **ACCESS** on the service credential.
+        
+        :param credential_name: str
+          The name of the service credential used to generate a temporary credential
+        :param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional)
+          The Azure cloud options to customize the requested temporary credential
+        :param gcp_options: :class:`GenerateTemporaryServiceCredentialGcpOptions` (optional)
+          The GCP cloud options to customize the requested temporary credential
+        
+        :returns: :class:`TemporaryCredentials`
+        """
+        body = {}
+        if azure_options is not None: body['azure_options'] = azure_options.as_dict()
+        if credential_name is not None: body['credential_name'] = credential_name
+        if gcp_options is not None: body['gcp_options'] = gcp_options.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST',
+                           '/api/2.1/unity-catalog/temporary-service-credentials',
+                           body=body,
+                           headers=headers)
+        return TemporaryCredentials.from_dict(res)
+
+    def get_credential(self, name_arg: str) -> CredentialInfo:
+        """Get a credential.
+        
+        Gets a service or storage credential from the metastore. The caller must be a metastore admin, the
+        owner of the credential, or have any permission on the credential.
+        
+        :param name_arg: str
+          Name of the credential.
+        
+        :returns: :class:`CredentialInfo`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET', f'/api/2.1/unity-catalog/credentials/{name_arg}', headers=headers)
+        return CredentialInfo.from_dict(res)
+
+    def list_credentials(self,
+                         *,
+                         max_results: Optional[int] = None,
+                         page_token: Optional[str] = None,
+                         purpose: Optional[CredentialPurpose] = None) -> Iterator[CredentialInfo]:
+        """List credentials.
+        
+        Gets an array of credentials (as __CredentialInfo__ objects).
+        
+        The array is limited to only the credentials that the caller has permission to access. If the caller
+        is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific
+        ordering of the elements in the array.
+        
+        :param max_results: int (optional)
+          Maximum number of credentials to return. - If not set, the default max page size is used. - When set
+          to a value greater than 0, the page length is the minimum of this value and a server-configured
+          value. - When set to 0, the page length is set to a server-configured value (recommended). - When
+          set to a value less than 0, an invalid parameter error is returned.
+        :param page_token: str (optional)
+          Opaque token to retrieve the next page of results.
+        :param purpose: :class:`CredentialPurpose` (optional)
+          Return only credentials for the specified purpose.
+        
+        :returns: Iterator over :class:`CredentialInfo`
+        """
+
+        query = {}
+        if max_results is not None: query['max_results'] = max_results
+        if page_token is not None: query['page_token'] = page_token
+        if purpose is not None: query['purpose'] = purpose.value
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET', '/api/2.1/unity-catalog/credentials', query=query, headers=headers)
+            if 'credentials' in json:
+                for v in json['credentials']:
+                    yield CredentialInfo.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+    def update_credential(self,
+                          name_arg: str,
+                          *,
+                          aws_iam_role: Optional[AwsIamRole] = None,
+                          azure_managed_identity: Optional[AzureManagedIdentity] = None,
+                          azure_service_principal: Optional[AzureServicePrincipal] = None,
+                          comment: Optional[str] = None,
+                          databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount] = None,
+                          force: Optional[bool] = None,
+                          isolation_mode: Optional[IsolationMode] = None,
+                          new_name: Optional[str] = None,
+                          owner: Optional[str] = None,
+                          read_only: Optional[bool] = None,
+                          skip_validation: Optional[bool] = None) -> CredentialInfo:
+        """Update a credential.
+        
+        Updates a service or storage credential on the metastore.
+        
+        The caller must be the owner of the credential or a metastore admin or have the `MANAGE` permission.
+        If the caller is a metastore admin, only the __owner__ field can be changed.
+        
+        :param name_arg: str
+          Name of the credential.
+        :param aws_iam_role: :class:`AwsIamRole` (optional)
+          The AWS IAM role configuration
+        :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
+          The Azure managed identity configuration.
+        :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
+          The Azure service principal configuration. Only applicable when purpose is **STORAGE**.
+        :param comment: str (optional)
+          Comment associated with the credential.
+        :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional)
+          GCP long-lived credential. Databricks-created Google Cloud Storage service account.
+        :param force: bool (optional)
+          Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
+          external locations and external tables (when purpose is **STORAGE**).
+        :param isolation_mode: :class:`IsolationMode` (optional)
+          Whether the current securable is accessible from all workspaces or a specific set of workspaces.
+        :param new_name: str (optional)
+          New name of credential.
+        :param owner: str (optional)
+          Username of current owner of credential.
+        :param read_only: bool (optional)
+          Whether the credential is usable only for read operations. Only applicable when purpose is
+          **STORAGE**.
+        :param skip_validation: bool (optional)
+          Supply true to this argument to skip validation of the updated credential.
+        
+        :returns: :class:`CredentialInfo`
+        """
+        body = {}
+        if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict()
+        if azure_managed_identity is not None:
+            body['azure_managed_identity'] = azure_managed_identity.as_dict()
+        if azure_service_principal is not None:
+            body['azure_service_principal'] = azure_service_principal.as_dict()
+        if comment is not None: body['comment'] = comment
+        if databricks_gcp_service_account is not None:
+            body['databricks_gcp_service_account'] = databricks_gcp_service_account.as_dict()
+        if force is not None: body['force'] = force
+        if isolation_mode is not None: body['isolation_mode'] = isolation_mode.value
+        if new_name is not None: body['new_name'] = new_name
+        if owner is not None: body['owner'] = owner
+        if read_only is not None: body['read_only'] = read_only
+        if skip_validation is not None: body['skip_validation'] = skip_validation
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH',
+                           f'/api/2.1/unity-catalog/credentials/{name_arg}',
+                           body=body,
+                           headers=headers)
+        return CredentialInfo.from_dict(res)
+
+    def validate_credential(self,
+                            *,
+                            aws_iam_role: Optional[AwsIamRole] = None,
+                            azure_managed_identity: Optional[AzureManagedIdentity] = None,
+                            credential_name: Optional[str] = None,
+                            external_location_name: Optional[str] = None,
+                            purpose: Optional[CredentialPurpose] = None,
+                            read_only: Optional[bool] = None,
+                            url: Optional[str] = None) -> ValidateCredentialResponse:
+        """Validate a credential.
+        
+        Validates a credential.
+        
+        For service credentials (purpose is **SERVICE**), either the __credential_name__ or the cloud-specific
+        credential must be provided.
+        
+        For storage credentials (purpose is **STORAGE**), at least one of __external_location_name__ and
+        __url__ need to be provided. If only one of them is provided, it will be used for validation. And if
+        both are provided, the __url__ will be used for validation, and __external_location_name__ will be
+        ignored when checking overlapping urls. Either the __credential_name__ or the cloud-specific
+        credential must be provided.
+        
+        The caller must be a metastore admin or the credential owner or have the required permission on the
+        metastore and the credential (e.g., **CREATE_EXTERNAL_LOCATION** when purpose is **STORAGE**).
+        
+        :param aws_iam_role: :class:`AwsIamRole` (optional)
+          The AWS IAM role configuration
+        :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
+          The Azure managed identity configuration.
+        :param credential_name: str (optional)
+          Required. The name of an existing credential or long-lived cloud credential to validate.
+        :param external_location_name: str (optional)
+          The name of an existing external location to validate. Only applicable for storage credentials
+          (purpose is **STORAGE**.)
+        :param purpose: :class:`CredentialPurpose` (optional)
+          The purpose of the credential. This should only be used when the credential is specified.
+        :param read_only: bool (optional)
+          Whether the credential is only usable for read operations. Only applicable for storage credentials
+          (purpose is **STORAGE**.)
+        :param url: str (optional)
+          The external location url to validate. Only applicable when purpose is **STORAGE**.
+        
+        :returns: :class:`ValidateCredentialResponse`
+        """
+        body = {}
+        if aws_iam_role is not None: body['aws_iam_role'] = aws_iam_role.as_dict()
+        if azure_managed_identity is not None:
+            body['azure_managed_identity'] = azure_managed_identity.as_dict()
+        if credential_name is not None: body['credential_name'] = credential_name
+        if external_location_name is not None: body['external_location_name'] = external_location_name
+        if purpose is not None: body['purpose'] = purpose.value
+        if read_only is not None: body['read_only'] = read_only
+        if url is not None: body['url'] = url
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST', '/api/2.1/unity-catalog/validate-credentials', body=body, headers=headers)
+        return ValidateCredentialResponse.from_dict(res)
+
+
 class ExternalLocationsAPI:
     """An external location is an object that combines a cloud storage path with a storage credential that
     authorizes access to the cloud storage path. Each external location is subject to Unity Catalog
@@ -7084,6 +9658,7 @@ def list(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET',
                                 '/api/2.1/unity-catalog/external-locations',
@@ -7134,7 +9709,6 @@ def update(self,
         :param force: bool (optional)
           Force update even if changing url invalidates dependent external tables or mounts.
         :param isolation_mode: :class:`IsolationMode` (optional)
-          Whether the current securable is accessible from all workspaces or a specific set of workspaces.
         :param new_name: str (optional)
           New name for the external location.
         :param owner: str (optional)
@@ -7890,25 +10464,61 @@ class OnlineTablesAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self, *, name: Optional[str] = None, spec: Optional[OnlineTableSpec] = None) -> OnlineTable:
+    def wait_get_online_table_active(self,
+                                     name: str,
+                                     timeout=timedelta(minutes=20),
+                                     callback: Optional[Callable[[OnlineTable], None]] = None) -> OnlineTable:
+        deadline = time.time() + timeout.total_seconds()
+        target_states = (ProvisioningInfoState.ACTIVE, )
+        failure_states = (ProvisioningInfoState.FAILED, )
+        status_message = 'polling...'
+        attempt = 1
+        while time.time() < deadline:
+            poll = self.get(name=name)
+            status = poll.unity_catalog_provisioning_state
+            status_message = f'current status: {status}'
+            if status in target_states:
+                return poll
+            if callback:
+                callback(poll)
+            if status in failure_states:
+                msg = f'failed to reach ACTIVE, got {status}: {status_message}'
+                raise OperationFailed(msg)
+            prefix = f"name={name}"
+            sleep = attempt
+            if sleep > 10:
+                # sleep 10s max per attempt
+                sleep = 10
+            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            time.sleep(sleep + random.random())
+            attempt += 1
+        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+
+    def create(self, *, table: Optional[OnlineTable] = None) -> Wait[OnlineTable]:
         """Create an Online Table.
         
         Create a new Online Table.
         
-        :param name: str (optional)
-          Full three-part (catalog, schema, table) name of the table.
-        :param spec: :class:`OnlineTableSpec` (optional)
-          Specification of the online table.
+        :param table: :class:`OnlineTable` (optional)
+          Online Table information.
         
-        :returns: :class:`OnlineTable`
+        :returns:
+          Long-running operation waiter for :class:`OnlineTable`.
+          See :method:wait_get_online_table_active for more details.
         """
-        body = {}
-        if name is not None: body['name'] = name
-        if spec is not None: body['spec'] = spec.as_dict()
+        body = table.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        res = self._api.do('POST', '/api/2.0/online-tables', body=body, headers=headers)
-        return OnlineTable.from_dict(res)
+        op_response = self._api.do('POST', '/api/2.0/online-tables', body=body, headers=headers)
+        return Wait(self.wait_get_online_table_active,
+                    response=OnlineTable.from_dict(op_response),
+                    name=op_response['name'])
+
+    def create_and_wait(self,
+                        *,
+                        table: Optional[OnlineTable] = None,
+                        timeout=timedelta(minutes=20)) -> OnlineTable:
+        return self.create(table=table).result(timeout=timeout)
 
     def delete(self, name: str):
         """Delete an Online Table.
@@ -8782,6 +11392,7 @@ def list(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET', '/api/2.1/unity-catalog/schemas', query=query, headers=headers)
             if 'schemas' in json:
@@ -8971,6 +11582,7 @@ def list(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET',
                                 '/api/2.1/unity-catalog/storage-credentials',
@@ -9019,7 +11631,6 @@ def update(self,
         :param force: bool (optional)
           Force update even if there are dependent external locations or external tables.
         :param isolation_mode: :class:`IsolationMode` (optional)
-          Whether the current securable is accessible from all workspaces or a specific set of workspaces.
         :param new_name: str (optional)
           New name for the storage credential.
         :param owner: str (optional)
@@ -9196,6 +11807,7 @@ def list(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET',
                                 f'/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas',
@@ -9385,6 +11997,7 @@ def list(self,
              max_results: Optional[int] = None,
              omit_columns: Optional[bool] = None,
              omit_properties: Optional[bool] = None,
+             omit_username: Optional[bool] = None,
              page_token: Optional[str] = None) -> Iterator[TableInfo]:
         """List tables.
         
@@ -9414,6 +12027,9 @@ def list(self,
           Whether to omit the columns of the table from the response or not.
         :param omit_properties: bool (optional)
           Whether to omit the properties of the table from the response or not.
+        :param omit_username: bool (optional)
+          Whether to omit the username of the table (e.g. owner, updated_by, created_by) from the response or
+          not.
         :param page_token: str (optional)
           Opaque token to send for the next page of results (pagination).
         
@@ -9429,10 +12045,12 @@ def list(self,
         if max_results is not None: query['max_results'] = max_results
         if omit_columns is not None: query['omit_columns'] = omit_columns
         if omit_properties is not None: query['omit_properties'] = omit_properties
+        if omit_username is not None: query['omit_username'] = omit_username
         if page_token is not None: query['page_token'] = page_token
         if schema_name is not None: query['schema_name'] = schema_name
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET', '/api/2.1/unity-catalog/tables', query=query, headers=headers)
             if 'tables' in json:
@@ -9493,6 +12111,7 @@ def list_summaries(self,
         if table_name_pattern is not None: query['table_name_pattern'] = table_name_pattern
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET', '/api/2.1/unity-catalog/table-summaries', query=query, headers=headers)
             if 'tables' in json:
diff --git a/databricks/sdk/service/cleanrooms.py b/databricks/sdk/service/cleanrooms.py
new file mode 100755
index 000000000..f7a213669
--- /dev/null
+++ b/databricks/sdk/service/cleanrooms.py
@@ -0,0 +1,1283 @@
+# Code generated from OpenAPI specs by Databricks SDK Generator. DO NOT EDIT.
+
+from __future__ import annotations
+
+import logging
+from dataclasses import dataclass
+from enum import Enum
+from typing import Dict, Iterator, List, Optional
+
+from ._internal import _enum, _from_dict, _repeated_dict
+
+_LOG = logging.getLogger('databricks.sdk')
+
+from databricks.sdk.service import catalog, jobs, settings, sharing
+
+# all definitions in this file are in alphabetical order
+
+
+@dataclass
+class CleanRoom:
+    access_restricted: Optional[CleanRoomAccessRestricted] = None
+    """Whether clean room access is restricted due to [CSP]
+    
+    [CSP]: https://docs.databricks.com/en/security/privacy/security-profile.html"""
+
+    comment: Optional[str] = None
+
+    created_at: Optional[int] = None
+    """When the clean room was created, in epoch milliseconds."""
+
+    local_collaborator_alias: Optional[str] = None
+    """The alias of the collaborator tied to the local clean room."""
+
+    name: Optional[str] = None
+    """The name of the clean room. It should follow [UC securable naming requirements].
+    
+    [UC securable naming requirements]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements"""
+
+    output_catalog: Optional[CleanRoomOutputCatalog] = None
+    """Output catalog of the clean room. It is an output only field. Output catalog is manipulated
+    using the separate CreateCleanRoomOutputCatalog API."""
+
+    owner: Optional[str] = None
+    """This is Databricks username of the owner of the local clean room securable for permission
+    management."""
+
+    remote_detailed_info: Optional[CleanRoomRemoteDetail] = None
+    """Central clean room details. During creation, users need to specify cloud_vendor, region, and
+    collaborators.global_metastore_id. This field will not be filled in the ListCleanRooms call."""
+
+    status: Optional[CleanRoomStatusEnum] = None
+    """Clean room status."""
+
+    updated_at: Optional[int] = None
+    """When the clean room was last updated, in epoch milliseconds."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoom into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.access_restricted is not None: body['access_restricted'] = self.access_restricted.value
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.local_collaborator_alias is not None:
+            body['local_collaborator_alias'] = self.local_collaborator_alias
+        if self.name is not None: body['name'] = self.name
+        if self.output_catalog: body['output_catalog'] = self.output_catalog.as_dict()
+        if self.owner is not None: body['owner'] = self.owner
+        if self.remote_detailed_info: body['remote_detailed_info'] = self.remote_detailed_info.as_dict()
+        if self.status is not None: body['status'] = self.status.value
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoom into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_restricted is not None: body['access_restricted'] = self.access_restricted
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.local_collaborator_alias is not None:
+            body['local_collaborator_alias'] = self.local_collaborator_alias
+        if self.name is not None: body['name'] = self.name
+        if self.output_catalog: body['output_catalog'] = self.output_catalog
+        if self.owner is not None: body['owner'] = self.owner
+        if self.remote_detailed_info: body['remote_detailed_info'] = self.remote_detailed_info
+        if self.status is not None: body['status'] = self.status
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoom:
+        """Deserializes the CleanRoom from a dictionary."""
+        return cls(access_restricted=_enum(d, 'access_restricted', CleanRoomAccessRestricted),
+                   comment=d.get('comment', None),
+                   created_at=d.get('created_at', None),
+                   local_collaborator_alias=d.get('local_collaborator_alias', None),
+                   name=d.get('name', None),
+                   output_catalog=_from_dict(d, 'output_catalog', CleanRoomOutputCatalog),
+                   owner=d.get('owner', None),
+                   remote_detailed_info=_from_dict(d, 'remote_detailed_info', CleanRoomRemoteDetail),
+                   status=_enum(d, 'status', CleanRoomStatusEnum),
+                   updated_at=d.get('updated_at', None))
+
+
+class CleanRoomAccessRestricted(Enum):
+
+    CSP_MISMATCH = 'CSP_MISMATCH'
+    NO_RESTRICTION = 'NO_RESTRICTION'
+
+
+@dataclass
+class CleanRoomAsset:
+    """Metadata of the clean room asset"""
+
+    added_at: Optional[int] = None
+    """When the asset is added to the clean room, in epoch milliseconds."""
+
+    asset_type: Optional[CleanRoomAssetAssetType] = None
+    """The type of the asset."""
+
+    foreign_table: Optional[CleanRoomAssetForeignTable] = None
+    """Foreign table details available to all collaborators of the clean room. Present if and only if
+    **asset_type** is **FOREIGN_TABLE**"""
+
+    foreign_table_local_details: Optional[CleanRoomAssetForeignTableLocalDetails] = None
+    """Local details for a foreign that are only available to its owner. Present if and only if
+    **asset_type** is **FOREIGN_TABLE**"""
+
+    name: Optional[str] = None
+    """A fully qualified name that uniquely identifies the asset within the clean room. This is also
+    the name displayed in the clean room UI.
+    
+    For UC securable assets (tables, volumes, etc.), the format is
+    *shared_catalog*.*shared_schema*.*asset_name*
+    
+    For notebooks, the name is the notebook file name."""
+
+    notebook: Optional[CleanRoomAssetNotebook] = None
+    """Notebook details available to all collaborators of the clean room. Present if and only if
+    **asset_type** is **NOTEBOOK_FILE**"""
+
+    owner_collaborator_alias: Optional[str] = None
+    """The alias of the collaborator who owns this asset"""
+
+    status: Optional[CleanRoomAssetStatusEnum] = None
+    """Status of the asset"""
+
+    table: Optional[CleanRoomAssetTable] = None
+    """Table details available to all collaborators of the clean room. Present if and only if
+    **asset_type** is **TABLE**"""
+
+    table_local_details: Optional[CleanRoomAssetTableLocalDetails] = None
+    """Local details for a table that are only available to its owner. Present if and only if
+    **asset_type** is **TABLE**"""
+
+    view: Optional[CleanRoomAssetView] = None
+    """View details available to all collaborators of the clean room. Present if and only if
+    **asset_type** is **VIEW**"""
+
+    view_local_details: Optional[CleanRoomAssetViewLocalDetails] = None
+    """Local details for a view that are only available to its owner. Present if and only if
+    **asset_type** is **VIEW**"""
+
+    volume_local_details: Optional[CleanRoomAssetVolumeLocalDetails] = None
+    """Local details for a volume that are only available to its owner. Present if and only if
+    **asset_type** is **VOLUME**"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomAsset into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.added_at is not None: body['added_at'] = self.added_at
+        if self.asset_type is not None: body['asset_type'] = self.asset_type.value
+        if self.foreign_table: body['foreign_table'] = self.foreign_table.as_dict()
+        if self.foreign_table_local_details:
+            body['foreign_table_local_details'] = self.foreign_table_local_details.as_dict()
+        if self.name is not None: body['name'] = self.name
+        if self.notebook: body['notebook'] = self.notebook.as_dict()
+        if self.owner_collaborator_alias is not None:
+            body['owner_collaborator_alias'] = self.owner_collaborator_alias
+        if self.status is not None: body['status'] = self.status.value
+        if self.table: body['table'] = self.table.as_dict()
+        if self.table_local_details: body['table_local_details'] = self.table_local_details.as_dict()
+        if self.view: body['view'] = self.view.as_dict()
+        if self.view_local_details: body['view_local_details'] = self.view_local_details.as_dict()
+        if self.volume_local_details: body['volume_local_details'] = self.volume_local_details.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomAsset into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.added_at is not None: body['added_at'] = self.added_at
+        if self.asset_type is not None: body['asset_type'] = self.asset_type
+        if self.foreign_table: body['foreign_table'] = self.foreign_table
+        if self.foreign_table_local_details:
+            body['foreign_table_local_details'] = self.foreign_table_local_details
+        if self.name is not None: body['name'] = self.name
+        if self.notebook: body['notebook'] = self.notebook
+        if self.owner_collaborator_alias is not None:
+            body['owner_collaborator_alias'] = self.owner_collaborator_alias
+        if self.status is not None: body['status'] = self.status
+        if self.table: body['table'] = self.table
+        if self.table_local_details: body['table_local_details'] = self.table_local_details
+        if self.view: body['view'] = self.view
+        if self.view_local_details: body['view_local_details'] = self.view_local_details
+        if self.volume_local_details: body['volume_local_details'] = self.volume_local_details
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAsset:
+        """Deserializes the CleanRoomAsset from a dictionary."""
+        return cls(added_at=d.get('added_at', None),
+                   asset_type=_enum(d, 'asset_type', CleanRoomAssetAssetType),
+                   foreign_table=_from_dict(d, 'foreign_table', CleanRoomAssetForeignTable),
+                   foreign_table_local_details=_from_dict(d, 'foreign_table_local_details',
+                                                          CleanRoomAssetForeignTableLocalDetails),
+                   name=d.get('name', None),
+                   notebook=_from_dict(d, 'notebook', CleanRoomAssetNotebook),
+                   owner_collaborator_alias=d.get('owner_collaborator_alias', None),
+                   status=_enum(d, 'status', CleanRoomAssetStatusEnum),
+                   table=_from_dict(d, 'table', CleanRoomAssetTable),
+                   table_local_details=_from_dict(d, 'table_local_details', CleanRoomAssetTableLocalDetails),
+                   view=_from_dict(d, 'view', CleanRoomAssetView),
+                   view_local_details=_from_dict(d, 'view_local_details', CleanRoomAssetViewLocalDetails),
+                   volume_local_details=_from_dict(d, 'volume_local_details',
+                                                   CleanRoomAssetVolumeLocalDetails))
+
+
+class CleanRoomAssetAssetType(Enum):
+
+    FOREIGN_TABLE = 'FOREIGN_TABLE'
+    NOTEBOOK_FILE = 'NOTEBOOK_FILE'
+    TABLE = 'TABLE'
+    VIEW = 'VIEW'
+    VOLUME = 'VOLUME'
+
+
+@dataclass
+class CleanRoomAssetForeignTable:
+    columns: Optional[List[catalog.ColumnInfo]] = None
+    """The metadata information of the columns in the foreign table"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomAssetForeignTable into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomAssetForeignTable into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.columns: body['columns'] = self.columns
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetForeignTable:
+        """Deserializes the CleanRoomAssetForeignTable from a dictionary."""
+        return cls(columns=_repeated_dict(d, 'columns', catalog.ColumnInfo))
+
+
+@dataclass
+class CleanRoomAssetForeignTableLocalDetails:
+    local_name: Optional[str] = None
+    """The fully qualified name of the foreign table in its owner's local metastore, in the format of
+    *catalog*.*schema*.*foreign_table_name*"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomAssetForeignTableLocalDetails into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.local_name is not None: body['local_name'] = self.local_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomAssetForeignTableLocalDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.local_name is not None: body['local_name'] = self.local_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetForeignTableLocalDetails:
+        """Deserializes the CleanRoomAssetForeignTableLocalDetails from a dictionary."""
+        return cls(local_name=d.get('local_name', None))
+
+
+@dataclass
+class CleanRoomAssetNotebook:
+    etag: Optional[str] = None
+    """Server generated checksum that represents the notebook version."""
+
+    notebook_content: Optional[str] = None
+    """Base 64 representation of the notebook contents. This is the same format as returned by
+    :method:workspace/export with the format of **HTML**."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomAssetNotebook into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        if self.notebook_content is not None: body['notebook_content'] = self.notebook_content
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomAssetNotebook into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        if self.notebook_content is not None: body['notebook_content'] = self.notebook_content
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetNotebook:
+        """Deserializes the CleanRoomAssetNotebook from a dictionary."""
+        return cls(etag=d.get('etag', None), notebook_content=d.get('notebook_content', None))
+
+
+class CleanRoomAssetStatusEnum(Enum):
+
+    ACTIVE = 'ACTIVE'
+    PENDING = 'PENDING'
+    PERMISSION_DENIED = 'PERMISSION_DENIED'
+
+
+@dataclass
+class CleanRoomAssetTable:
+    columns: Optional[List[catalog.ColumnInfo]] = None
+    """The metadata information of the columns in the table"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomAssetTable into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomAssetTable into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.columns: body['columns'] = self.columns
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetTable:
+        """Deserializes the CleanRoomAssetTable from a dictionary."""
+        return cls(columns=_repeated_dict(d, 'columns', catalog.ColumnInfo))
+
+
+@dataclass
+class CleanRoomAssetTableLocalDetails:
+    local_name: Optional[str] = None
+    """The fully qualified name of the table in its owner's local metastore, in the format of
+    *catalog*.*schema*.*table_name*"""
+
+    partitions: Optional[List[sharing.PartitionSpecificationPartition]] = None
+    """Partition filtering specification for a shared table."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomAssetTableLocalDetails into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.local_name is not None: body['local_name'] = self.local_name
+        if self.partitions: body['partitions'] = [v.as_dict() for v in self.partitions]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomAssetTableLocalDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.local_name is not None: body['local_name'] = self.local_name
+        if self.partitions: body['partitions'] = self.partitions
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetTableLocalDetails:
+        """Deserializes the CleanRoomAssetTableLocalDetails from a dictionary."""
+        return cls(local_name=d.get('local_name', None),
+                   partitions=_repeated_dict(d, 'partitions', sharing.PartitionSpecificationPartition))
+
+
+@dataclass
+class CleanRoomAssetView:
+    columns: Optional[List[catalog.ColumnInfo]] = None
+    """The metadata information of the columns in the view"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomAssetView into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomAssetView into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.columns: body['columns'] = self.columns
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetView:
+        """Deserializes the CleanRoomAssetView from a dictionary."""
+        return cls(columns=_repeated_dict(d, 'columns', catalog.ColumnInfo))
+
+
+@dataclass
+class CleanRoomAssetViewLocalDetails:
+    local_name: Optional[str] = None
+    """The fully qualified name of the view in its owner's local metastore, in the format of
+    *catalog*.*schema*.*view_name*"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomAssetViewLocalDetails into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.local_name is not None: body['local_name'] = self.local_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomAssetViewLocalDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.local_name is not None: body['local_name'] = self.local_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetViewLocalDetails:
+        """Deserializes the CleanRoomAssetViewLocalDetails from a dictionary."""
+        return cls(local_name=d.get('local_name', None))
+
+
+@dataclass
+class CleanRoomAssetVolumeLocalDetails:
+    local_name: Optional[str] = None
+    """The fully qualified name of the volume in its owner's local metastore, in the format of
+    *catalog*.*schema*.*volume_name*"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomAssetVolumeLocalDetails into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.local_name is not None: body['local_name'] = self.local_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomAssetVolumeLocalDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.local_name is not None: body['local_name'] = self.local_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetVolumeLocalDetails:
+        """Deserializes the CleanRoomAssetVolumeLocalDetails from a dictionary."""
+        return cls(local_name=d.get('local_name', None))
+
+
+@dataclass
+class CleanRoomCollaborator:
+    """Publicly visible clean room collaborator."""
+
+    collaborator_alias: str
+    """Collaborator alias specified by the clean room creator. It is unique across all collaborators of
+    this clean room, and used to derive multiple values internally such as catalog alias and clean
+    room name for single metastore clean rooms. It should follow [UC securable naming requirements].
+    
+    [UC securable naming requirements]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements"""
+
+    display_name: Optional[str] = None
+    """Generated display name for the collaborator. In the case of a single metastore clean room, it is
+    the clean room name. For x-metastore clean rooms, it is the organization name of the metastore.
+    It is not restricted to these values and could change in the future"""
+
+    global_metastore_id: Optional[str] = None
+    """The global Unity Catalog metastore id of the collaborator. The identifier is of format
+    cloud:region:metastore-uuid."""
+
+    invite_recipient_email: Optional[str] = None
+    """Email of the user who is receiving the clean room "invitation". It should be empty for the
+    creator of the clean room, and non-empty for the invitees of the clean room. It is only returned
+    in the output when clean room creator calls GET"""
+
+    invite_recipient_workspace_id: Optional[int] = None
+    """Workspace ID of the user who is receiving the clean room "invitation". Must be specified if
+    invite_recipient_email is specified. It should be empty when the collaborator is the creator of
+    the clean room."""
+
+    organization_name: Optional[str] = None
+    """[Organization name](:method:metastores/list#metastores-delta_sharing_organization_name)
+    configured in the metastore"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomCollaborator into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
+        if self.invite_recipient_email is not None:
+            body['invite_recipient_email'] = self.invite_recipient_email
+        if self.invite_recipient_workspace_id is not None:
+            body['invite_recipient_workspace_id'] = self.invite_recipient_workspace_id
+        if self.organization_name is not None: body['organization_name'] = self.organization_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomCollaborator into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
+        if self.invite_recipient_email is not None:
+            body['invite_recipient_email'] = self.invite_recipient_email
+        if self.invite_recipient_workspace_id is not None:
+            body['invite_recipient_workspace_id'] = self.invite_recipient_workspace_id
+        if self.organization_name is not None: body['organization_name'] = self.organization_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomCollaborator:
+        """Deserializes the CleanRoomCollaborator from a dictionary."""
+        return cls(collaborator_alias=d.get('collaborator_alias', None),
+                   display_name=d.get('display_name', None),
+                   global_metastore_id=d.get('global_metastore_id', None),
+                   invite_recipient_email=d.get('invite_recipient_email', None),
+                   invite_recipient_workspace_id=d.get('invite_recipient_workspace_id', None),
+                   organization_name=d.get('organization_name', None))
+
+
+@dataclass
+class CleanRoomNotebookTaskRun:
+    """Stores information about a single task run."""
+
+    collaborator_job_run_info: Optional[CollaboratorJobRunInfo] = None
+    """Job run info of the task in the runner's local workspace. This field is only included in the
+    LIST API. if the task was run within the same workspace the API is being called. If the task run
+    was in a different workspace under the same metastore, only the workspace_id is included."""
+
+    notebook_job_run_state: Optional[jobs.CleanRoomTaskRunState] = None
+    """State of the task run."""
+
+    notebook_name: Optional[str] = None
+    """Asset name of the notebook executed in this task run."""
+
+    output_schema_expiration_time: Optional[int] = None
+    """Expiration time of the output schema of the task run (if any), in epoch milliseconds."""
+
+    output_schema_name: Optional[str] = None
+    """Name of the output schema associated with the clean rooms notebook task run."""
+
+    run_duration: Optional[int] = None
+    """Duration of the task run, in milliseconds."""
+
+    start_time: Optional[int] = None
+    """When the task run started, in epoch milliseconds."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomNotebookTaskRun into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.collaborator_job_run_info:
+            body['collaborator_job_run_info'] = self.collaborator_job_run_info.as_dict()
+        if self.notebook_job_run_state: body['notebook_job_run_state'] = self.notebook_job_run_state.as_dict()
+        if self.notebook_name is not None: body['notebook_name'] = self.notebook_name
+        if self.output_schema_expiration_time is not None:
+            body['output_schema_expiration_time'] = self.output_schema_expiration_time
+        if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
+        if self.run_duration is not None: body['run_duration'] = self.run_duration
+        if self.start_time is not None: body['start_time'] = self.start_time
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomNotebookTaskRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.collaborator_job_run_info: body['collaborator_job_run_info'] = self.collaborator_job_run_info
+        if self.notebook_job_run_state: body['notebook_job_run_state'] = self.notebook_job_run_state
+        if self.notebook_name is not None: body['notebook_name'] = self.notebook_name
+        if self.output_schema_expiration_time is not None:
+            body['output_schema_expiration_time'] = self.output_schema_expiration_time
+        if self.output_schema_name is not None: body['output_schema_name'] = self.output_schema_name
+        if self.run_duration is not None: body['run_duration'] = self.run_duration
+        if self.start_time is not None: body['start_time'] = self.start_time
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomNotebookTaskRun:
+        """Deserializes the CleanRoomNotebookTaskRun from a dictionary."""
+        return cls(collaborator_job_run_info=_from_dict(d, 'collaborator_job_run_info',
+                                                        CollaboratorJobRunInfo),
+                   notebook_job_run_state=_from_dict(d, 'notebook_job_run_state', jobs.CleanRoomTaskRunState),
+                   notebook_name=d.get('notebook_name', None),
+                   output_schema_expiration_time=d.get('output_schema_expiration_time', None),
+                   output_schema_name=d.get('output_schema_name', None),
+                   run_duration=d.get('run_duration', None),
+                   start_time=d.get('start_time', None))
+
+
+@dataclass
+class CleanRoomOutputCatalog:
+    catalog_name: Optional[str] = None
+    """The name of the output catalog in UC. It should follow [UC securable naming requirements]. The
+    field will always exist if status is CREATED.
+    
+    [UC securable naming requirements]: https://docs.databricks.com/en/data-governance/unity-catalog/index.html#securable-object-naming-requirements"""
+
+    status: Optional[CleanRoomOutputCatalogOutputCatalogStatus] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomOutputCatalog into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.status is not None: body['status'] = self.status.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomOutputCatalog into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.status is not None: body['status'] = self.status
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomOutputCatalog:
+        """Deserializes the CleanRoomOutputCatalog from a dictionary."""
+        return cls(catalog_name=d.get('catalog_name', None),
+                   status=_enum(d, 'status', CleanRoomOutputCatalogOutputCatalogStatus))
+
+
+class CleanRoomOutputCatalogOutputCatalogStatus(Enum):
+
+    CREATED = 'CREATED'
+    NOT_CREATED = 'NOT_CREATED'
+    NOT_ELIGIBLE = 'NOT_ELIGIBLE'
+
+
+@dataclass
+class CleanRoomRemoteDetail:
+    """Publicly visible central clean room details."""
+
+    central_clean_room_id: Optional[str] = None
+    """Central clean room ID."""
+
+    cloud_vendor: Optional[str] = None
+    """Cloud vendor (aws,azure,gcp) of the central clean room."""
+
+    collaborators: Optional[List[CleanRoomCollaborator]] = None
+    """Collaborators in the central clean room. There should one and only one collaborator in the list
+    that satisfies the owner condition:
+    
+    1. It has the creator's global_metastore_id (determined by caller of CreateCleanRoom).
+    
+    2. Its invite_recipient_email is empty."""
+
+    compliance_security_profile: Optional[ComplianceSecurityProfile] = None
+    """The compliance security profile used to process regulated data following compliance standards."""
+
+    creator: Optional[CleanRoomCollaborator] = None
+    """Collaborator who creates the clean room."""
+
+    egress_network_policy: Optional[settings.EgressNetworkPolicy] = None
+    """Egress network policy to apply to the central clean room workspace."""
+
+    region: Optional[str] = None
+    """Region of the central clean room."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomRemoteDetail into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.central_clean_room_id is not None: body['central_clean_room_id'] = self.central_clean_room_id
+        if self.cloud_vendor is not None: body['cloud_vendor'] = self.cloud_vendor
+        if self.collaborators: body['collaborators'] = [v.as_dict() for v in self.collaborators]
+        if self.compliance_security_profile:
+            body['compliance_security_profile'] = self.compliance_security_profile.as_dict()
+        if self.creator: body['creator'] = self.creator.as_dict()
+        if self.egress_network_policy: body['egress_network_policy'] = self.egress_network_policy.as_dict()
+        if self.region is not None: body['region'] = self.region
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomRemoteDetail into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.central_clean_room_id is not None: body['central_clean_room_id'] = self.central_clean_room_id
+        if self.cloud_vendor is not None: body['cloud_vendor'] = self.cloud_vendor
+        if self.collaborators: body['collaborators'] = self.collaborators
+        if self.compliance_security_profile:
+            body['compliance_security_profile'] = self.compliance_security_profile
+        if self.creator: body['creator'] = self.creator
+        if self.egress_network_policy: body['egress_network_policy'] = self.egress_network_policy
+        if self.region is not None: body['region'] = self.region
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomRemoteDetail:
+        """Deserializes the CleanRoomRemoteDetail from a dictionary."""
+        return cls(central_clean_room_id=d.get('central_clean_room_id', None),
+                   cloud_vendor=d.get('cloud_vendor', None),
+                   collaborators=_repeated_dict(d, 'collaborators', CleanRoomCollaborator),
+                   compliance_security_profile=_from_dict(d, 'compliance_security_profile',
+                                                          ComplianceSecurityProfile),
+                   creator=_from_dict(d, 'creator', CleanRoomCollaborator),
+                   egress_network_policy=_from_dict(d, 'egress_network_policy', settings.EgressNetworkPolicy),
+                   region=d.get('region', None))
+
+
+class CleanRoomStatusEnum(Enum):
+
+    ACTIVE = 'ACTIVE'
+    DELETED = 'DELETED'
+    FAILED = 'FAILED'
+    PROVISIONING = 'PROVISIONING'
+
+
+@dataclass
+class CollaboratorJobRunInfo:
+    collaborator_alias: Optional[str] = None
+    """Alias of the collaborator that triggered the task run."""
+
+    collaborator_job_id: Optional[int] = None
+    """Job ID of the task run in the collaborator's workspace."""
+
+    collaborator_job_run_id: Optional[int] = None
+    """Job run ID of the task run in the collaborator's workspace."""
+
+    collaborator_task_run_id: Optional[int] = None
+    """Task run ID of the task run in the collaborator's workspace."""
+
+    collaborator_workspace_id: Optional[int] = None
+    """ID of the collaborator's workspace that triggered the task run."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CollaboratorJobRunInfo into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias
+        if self.collaborator_job_id is not None: body['collaborator_job_id'] = self.collaborator_job_id
+        if self.collaborator_job_run_id is not None:
+            body['collaborator_job_run_id'] = self.collaborator_job_run_id
+        if self.collaborator_task_run_id is not None:
+            body['collaborator_task_run_id'] = self.collaborator_task_run_id
+        if self.collaborator_workspace_id is not None:
+            body['collaborator_workspace_id'] = self.collaborator_workspace_id
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CollaboratorJobRunInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.collaborator_alias is not None: body['collaborator_alias'] = self.collaborator_alias
+        if self.collaborator_job_id is not None: body['collaborator_job_id'] = self.collaborator_job_id
+        if self.collaborator_job_run_id is not None:
+            body['collaborator_job_run_id'] = self.collaborator_job_run_id
+        if self.collaborator_task_run_id is not None:
+            body['collaborator_task_run_id'] = self.collaborator_task_run_id
+        if self.collaborator_workspace_id is not None:
+            body['collaborator_workspace_id'] = self.collaborator_workspace_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CollaboratorJobRunInfo:
+        """Deserializes the CollaboratorJobRunInfo from a dictionary."""
+        return cls(collaborator_alias=d.get('collaborator_alias', None),
+                   collaborator_job_id=d.get('collaborator_job_id', None),
+                   collaborator_job_run_id=d.get('collaborator_job_run_id', None),
+                   collaborator_task_run_id=d.get('collaborator_task_run_id', None),
+                   collaborator_workspace_id=d.get('collaborator_workspace_id', None))
+
+
+@dataclass
+class ComplianceSecurityProfile:
+    """The compliance security profile used to process regulated data following compliance standards."""
+
+    compliance_standards: Optional[List[settings.ComplianceStandard]] = None
+    """The list of compliance standards that the compliance security profile is configured to enforce."""
+
+    is_enabled: Optional[bool] = None
+    """Whether the compliance security profile is enabled."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ComplianceSecurityProfile into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.compliance_standards:
+            body['compliance_standards'] = [v.as_dict() for v in self.compliance_standards]
+        if self.is_enabled is not None: body['is_enabled'] = self.is_enabled
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ComplianceSecurityProfile into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.compliance_standards: body['compliance_standards'] = self.compliance_standards
+        if self.is_enabled is not None: body['is_enabled'] = self.is_enabled
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ComplianceSecurityProfile:
+        """Deserializes the ComplianceSecurityProfile from a dictionary."""
+        return cls(compliance_standards=_repeated_dict(d, 'compliance_standards',
+                                                       settings.ComplianceStandard),
+                   is_enabled=d.get('is_enabled', None))
+
+
+@dataclass
+class CreateCleanRoomOutputCatalogResponse:
+    output_catalog: Optional[CleanRoomOutputCatalog] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the CreateCleanRoomOutputCatalogResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.output_catalog: body['output_catalog'] = self.output_catalog.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCleanRoomOutputCatalogResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.output_catalog: body['output_catalog'] = self.output_catalog
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CreateCleanRoomOutputCatalogResponse:
+        """Deserializes the CreateCleanRoomOutputCatalogResponse from a dictionary."""
+        return cls(output_catalog=_from_dict(d, 'output_catalog', CleanRoomOutputCatalog))
+
+
+@dataclass
+class DeleteCleanRoomAssetResponse:
+    """Response for delete clean room request. Using an empty message since the generic Empty proto
+    does not externd UnshadedMessageMarker."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteCleanRoomAssetResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteCleanRoomAssetResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteCleanRoomAssetResponse:
+        """Deserializes the DeleteCleanRoomAssetResponse from a dictionary."""
+        return cls()
+
+
+@dataclass
+class DeleteResponse:
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
+        """Deserializes the DeleteResponse from a dictionary."""
+        return cls()
+
+
+@dataclass
+class ListCleanRoomAssetsResponse:
+    assets: Optional[List[CleanRoomAsset]] = None
+    """Assets in the clean room."""
+
+    next_page_token: Optional[str] = None
+    """Opaque token to retrieve the next page of results. Absent if there are no more pages. page_token
+    should be set to this value for the next request (for the next page of results)."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListCleanRoomAssetsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.assets: body['assets'] = [v.as_dict() for v in self.assets]
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListCleanRoomAssetsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.assets: body['assets'] = self.assets
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListCleanRoomAssetsResponse:
+        """Deserializes the ListCleanRoomAssetsResponse from a dictionary."""
+        return cls(assets=_repeated_dict(d, 'assets', CleanRoomAsset),
+                   next_page_token=d.get('next_page_token', None))
+
+
+@dataclass
+class ListCleanRoomNotebookTaskRunsResponse:
+    next_page_token: Optional[str] = None
+    """Opaque token to retrieve the next page of results. Absent if there are no more pages. page_token
+    should be set to this value for the next request (for the next page of results)."""
+
+    runs: Optional[List[CleanRoomNotebookTaskRun]] = None
+    """Name of the clean room."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListCleanRoomNotebookTaskRunsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.runs: body['runs'] = [v.as_dict() for v in self.runs]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListCleanRoomNotebookTaskRunsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.runs: body['runs'] = self.runs
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListCleanRoomNotebookTaskRunsResponse:
+        """Deserializes the ListCleanRoomNotebookTaskRunsResponse from a dictionary."""
+        return cls(next_page_token=d.get('next_page_token', None),
+                   runs=_repeated_dict(d, 'runs', CleanRoomNotebookTaskRun))
+
+
+@dataclass
+class ListCleanRoomsResponse:
+    clean_rooms: Optional[List[CleanRoom]] = None
+
+    next_page_token: Optional[str] = None
+    """Opaque token to retrieve the next page of results. Absent if there are no more pages. page_token
+    should be set to this value for the next request (for the next page of results)."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ListCleanRoomsResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.clean_rooms: body['clean_rooms'] = [v.as_dict() for v in self.clean_rooms]
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListCleanRoomsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.clean_rooms: body['clean_rooms'] = self.clean_rooms
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListCleanRoomsResponse:
+        """Deserializes the ListCleanRoomsResponse from a dictionary."""
+        return cls(clean_rooms=_repeated_dict(d, 'clean_rooms', CleanRoom),
+                   next_page_token=d.get('next_page_token', None))
+
+
+@dataclass
+class UpdateCleanRoomRequest:
+    clean_room: Optional[CleanRoom] = None
+
+    name: Optional[str] = None
+    """Name of the clean room."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateCleanRoomRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.clean_room: body['clean_room'] = self.clean_room.as_dict()
+        if self.name is not None: body['name'] = self.name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCleanRoomRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.clean_room: body['clean_room'] = self.clean_room
+        if self.name is not None: body['name'] = self.name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateCleanRoomRequest:
+        """Deserializes the UpdateCleanRoomRequest from a dictionary."""
+        return cls(clean_room=_from_dict(d, 'clean_room', CleanRoom), name=d.get('name', None))
+
+
+class CleanRoomAssetsAPI:
+    """Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the
+    clean room."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def create(self, clean_room_name: str, *, asset: Optional[CleanRoomAsset] = None) -> CleanRoomAsset:
+        """Create an asset.
+        
+        Create a clean room asset —share an asset like a notebook or table into the clean room. For each UC
+        asset that is added through this method, the clean room owner must also have enough privilege on the
+        asset to consume it. The privilege must be maintained indefinitely for the clean room to be able to
+        access the asset. Typically, you should use a group as the clean room owner.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset: :class:`CleanRoomAsset` (optional)
+          Metadata of the clean room asset
+        
+        :returns: :class:`CleanRoomAsset`
+        """
+        body = asset.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST',
+                           f'/api/2.0/clean-rooms/{clean_room_name}/assets',
+                           body=body,
+                           headers=headers)
+        return CleanRoomAsset.from_dict(res)
+
+    def delete(self, clean_room_name: str, asset_type: CleanRoomAssetAssetType, asset_full_name: str):
+        """Delete an asset.
+        
+        Delete a clean room asset - unshare/remove the asset from the clean room
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset_type: :class:`CleanRoomAssetAssetType`
+          The type of the asset.
+        :param asset_full_name: str
+          The fully qualified name of the asset, it is same as the name field in CleanRoomAsset.
+        
+        
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        self._api.do('DELETE',
+                     f'/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{asset_full_name}',
+                     headers=headers)
+
+    def get(self, clean_room_name: str, asset_type: CleanRoomAssetAssetType,
+            asset_full_name: str) -> CleanRoomAsset:
+        """Get an asset.
+        
+        Get the details of a clean room asset by its type and full name.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset_type: :class:`CleanRoomAssetAssetType`
+          The type of the asset.
+        :param asset_full_name: str
+          The fully qualified name of the asset, it is same as the name field in CleanRoomAsset.
+        
+        :returns: :class:`CleanRoomAsset`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'GET',
+            f'/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{asset_full_name}',
+            headers=headers)
+        return CleanRoomAsset.from_dict(res)
+
+    def list(self, clean_room_name: str, *, page_token: Optional[str] = None) -> Iterator[CleanRoomAsset]:
+        """List assets.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`CleanRoomAsset`
+        """
+
+        query = {}
+        if page_token is not None: query['page_token'] = page_token
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET',
+                                f'/api/2.0/clean-rooms/{clean_room_name}/assets',
+                                query=query,
+                                headers=headers)
+            if 'assets' in json:
+                for v in json['assets']:
+                    yield CleanRoomAsset.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+    def update(self,
+               clean_room_name: str,
+               asset_type: CleanRoomAssetAssetType,
+               name: str,
+               *,
+               asset: Optional[CleanRoomAsset] = None) -> CleanRoomAsset:
+        """Update an asset.
+        
+        Update a clean room asset. For example, updating the content of a notebook; changing the shared
+        partitions of a table; etc.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset_type: :class:`CleanRoomAssetAssetType`
+          The type of the asset.
+        :param name: str
+          A fully qualified name that uniquely identifies the asset within the clean room. This is also the
+          name displayed in the clean room UI.
+          
+          For UC securable assets (tables, volumes, etc.), the format is
+          *shared_catalog*.*shared_schema*.*asset_name*
+          
+          For notebooks, the name is the notebook file name.
+        :param asset: :class:`CleanRoomAsset` (optional)
+          Metadata of the clean room asset
+        
+        :returns: :class:`CleanRoomAsset`
+        """
+        body = asset.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH',
+                           f'/api/2.0/clean-rooms/{clean_room_name}/assets/{asset_type.value}/{name}',
+                           body=body,
+                           headers=headers)
+        return CleanRoomAsset.from_dict(res)
+
+
+class CleanRoomTaskRunsAPI:
+    """Clean room task runs are the executions of notebooks in a clean room."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def list(self,
+             clean_room_name: str,
+             *,
+             notebook_name: Optional[str] = None,
+             page_size: Optional[int] = None,
+             page_token: Optional[str] = None) -> Iterator[CleanRoomNotebookTaskRun]:
+        """List notebook task runs.
+        
+        List all the historical notebook task runs in a clean room.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param notebook_name: str (optional)
+          Notebook name
+        :param page_size: int (optional)
+          The maximum number of task runs to return
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`CleanRoomNotebookTaskRun`
+        """
+
+        query = {}
+        if notebook_name is not None: query['notebook_name'] = notebook_name
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET',
+                                f'/api/2.0/clean-rooms/{clean_room_name}/runs',
+                                query=query,
+                                headers=headers)
+            if 'runs' in json:
+                for v in json['runs']:
+                    yield CleanRoomNotebookTaskRun.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+
+class CleanRoomsAPI:
+    """A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting
+    environment where multiple parties can work together on sensitive enterprise data without direct access to
+    each other’s data."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def create(self, *, clean_room: Optional[CleanRoom] = None) -> CleanRoom:
+        """Create a clean room.
+        
+        Create a new clean room with the specified collaborators. This method is asynchronous; the returned
+        name field inside the clean_room field can be used to poll the clean room status, using the
+        :method:cleanrooms/get method. When this method returns, the clean room will be in a PROVISIONING
+        state, with only name, owner, comment, created_at and status populated. The clean room will be usable
+        once it enters an ACTIVE state.
+        
+        The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore.
+        
+        :param clean_room: :class:`CleanRoom` (optional)
+        
+        :returns: :class:`CleanRoom`
+        """
+        body = clean_room.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST', '/api/2.0/clean-rooms', body=body, headers=headers)
+        return CleanRoom.from_dict(res)
+
+    def create_output_catalog(
+            self,
+            clean_room_name: str,
+            *,
+            output_catalog: Optional[CleanRoomOutputCatalog] = None) -> CreateCleanRoomOutputCatalogResponse:
+        """Create an output catalog.
+        
+        Create the output catalog of the clean room.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param output_catalog: :class:`CleanRoomOutputCatalog` (optional)
+        
+        :returns: :class:`CreateCleanRoomOutputCatalogResponse`
+        """
+        body = output_catalog.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST',
+                           f'/api/2.0/clean-rooms/{clean_room_name}/output-catalogs',
+                           body=body,
+                           headers=headers)
+        return CreateCleanRoomOutputCatalogResponse.from_dict(res)
+
+    def delete(self, name: str):
+        """Delete a clean room.
+        
+        Delete a clean room. After deletion, the clean room will be removed from the metastore. If the other
+        collaborators have not deleted the clean room, they will still have the clean room in their metastore,
+        but it will be in a DELETED state and no operations other than deletion can be performed on it.
+        
+        :param name: str
+          Name of the clean room.
+        
+        
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        self._api.do('DELETE', f'/api/2.0/clean-rooms/{name}', headers=headers)
+
+    def get(self, name: str) -> CleanRoom:
+        """Get a clean room.
+        
+        Get the details of a clean room given its name.
+        
+        :param name: str
+        
+        :returns: :class:`CleanRoom`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET', f'/api/2.0/clean-rooms/{name}', headers=headers)
+        return CleanRoom.from_dict(res)
+
+    def list(self,
+             *,
+             page_size: Optional[int] = None,
+             page_token: Optional[str] = None) -> Iterator[CleanRoom]:
+        """List clean rooms.
+        
+        Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are
+        returned.
+        
+        :param page_size: int (optional)
+          Maximum number of clean rooms to return (i.e., the page length). Defaults to 100.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`CleanRoom`
+        """
+
+        query = {}
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET', '/api/2.0/clean-rooms', query=query, headers=headers)
+            if 'clean_rooms' in json:
+                for v in json['clean_rooms']:
+                    yield CleanRoom.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+    def update(self, name: str, *, clean_room: Optional[CleanRoom] = None) -> CleanRoom:
+        """Update a clean room.
+        
+        Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM**
+        privilege, or be metastore admin.
+        
+        When the caller is a metastore admin, only the __owner__ field can be updated.
+        
+        :param name: str
+          Name of the clean room.
+        :param clean_room: :class:`CleanRoom` (optional)
+        
+        :returns: :class:`CleanRoom`
+        """
+        body = {}
+        if clean_room is not None: body['clean_room'] = clean_room.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH', f'/api/2.0/clean-rooms/{name}', body=body, headers=headers)
+        return CleanRoom.from_dict(res)
diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py
index fabd258d0..63a971b73 100755
--- a/databricks/sdk/service/compute.py
+++ b/databricks/sdk/service/compute.py
@@ -55,6 +55,16 @@ def as_dict(self) -> dict:
         if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AddInstanceProfile into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.is_meta_instance_profile is not None:
+            body['is_meta_instance_profile'] = self.is_meta_instance_profile
+        if self.skip_validation is not None: body['skip_validation'] = self.skip_validation
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AddInstanceProfile:
         """Deserializes the AddInstanceProfile from a dictionary."""
@@ -72,6 +82,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AddResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AddResponse:
         """Deserializes the AddResponse from a dictionary."""
@@ -90,6 +105,12 @@ def as_dict(self) -> dict:
         if self.destination is not None: body['destination'] = self.destination
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Adlsgen2Info into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination is not None: body['destination'] = self.destination
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Adlsgen2Info:
         """Deserializes the Adlsgen2Info from a dictionary."""
@@ -113,6 +134,13 @@ def as_dict(self) -> dict:
         if self.min_workers is not None: body['min_workers'] = self.min_workers
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AutoScale into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.max_workers is not None: body['max_workers'] = self.max_workers
+        if self.min_workers is not None: body['min_workers'] = self.min_workers
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AutoScale:
         """Deserializes the AutoScale from a dictionary."""
@@ -216,6 +244,22 @@ def as_dict(self) -> dict:
         if self.zone_id is not None: body['zone_id'] = self.zone_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AwsAttributes into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.availability is not None: body['availability'] = self.availability
+        if self.ebs_volume_count is not None: body['ebs_volume_count'] = self.ebs_volume_count
+        if self.ebs_volume_iops is not None: body['ebs_volume_iops'] = self.ebs_volume_iops
+        if self.ebs_volume_size is not None: body['ebs_volume_size'] = self.ebs_volume_size
+        if self.ebs_volume_throughput is not None: body['ebs_volume_throughput'] = self.ebs_volume_throughput
+        if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type
+        if self.first_on_demand is not None: body['first_on_demand'] = self.first_on_demand
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.spot_bid_price_percent is not None:
+            body['spot_bid_price_percent'] = self.spot_bid_price_percent
+        if self.zone_id is not None: body['zone_id'] = self.zone_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsAttributes:
         """Deserializes the AwsAttributes from a dictionary."""
@@ -275,6 +319,15 @@ def as_dict(self) -> dict:
         if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AzureAttributes into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.availability is not None: body['availability'] = self.availability
+        if self.first_on_demand is not None: body['first_on_demand'] = self.first_on_demand
+        if self.log_analytics_info: body['log_analytics_info'] = self.log_analytics_info
+        if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureAttributes:
         """Deserializes the AzureAttributes from a dictionary."""
@@ -310,6 +363,14 @@ def as_dict(self) -> dict:
         if self.context_id is not None: body['contextId'] = self.context_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelCommand into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['clusterId'] = self.cluster_id
+        if self.command_id is not None: body['commandId'] = self.command_id
+        if self.context_id is not None: body['contextId'] = self.context_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelCommand:
         """Deserializes the CancelCommand from a dictionary."""
@@ -326,6 +387,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelResponse:
         """Deserializes the CancelResponse from a dictionary."""
@@ -347,6 +413,13 @@ def as_dict(self) -> dict:
         if self.owner_username is not None: body['owner_username'] = self.owner_username
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ChangeClusterOwner into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.owner_username is not None: body['owner_username'] = self.owner_username
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ChangeClusterOwner:
         """Deserializes the ChangeClusterOwner from a dictionary."""
@@ -361,6 +434,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ChangeClusterOwnerResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ChangeClusterOwnerResponse:
         """Deserializes the ChangeClusterOwnerResponse from a dictionary."""
@@ -382,6 +460,13 @@ def as_dict(self) -> dict:
         if self.notebooks is not None: body['notebooks'] = self.notebooks
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClientsTypes into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.jobs is not None: body['jobs'] = self.jobs
+        if self.notebooks is not None: body['notebooks'] = self.notebooks
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClientsTypes:
         """Deserializes the ClientsTypes from a dictionary."""
@@ -399,6 +484,12 @@ def as_dict(self) -> dict:
         if self.source_cluster_id is not None: body['source_cluster_id'] = self.source_cluster_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CloneCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.source_cluster_id is not None: body['source_cluster_id'] = self.source_cluster_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CloneCluster:
         """Deserializes the CloneCluster from a dictionary."""
@@ -415,6 +506,12 @@ def as_dict(self) -> dict:
         if self.status: body['status'] = [v.value for v in self.status]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CloudProviderNodeInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.status: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CloudProviderNodeInfo:
         """Deserializes the CloudProviderNodeInfo from a dictionary."""
@@ -451,6 +548,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAccessControlRequest:
         """Deserializes the ClusterAccessControlRequest from a dictionary."""
@@ -488,6 +595,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAccessControlResponse:
         """Deserializes the ClusterAccessControlResponse from a dictionary."""
@@ -519,11 +637,11 @@ class ClusterAttributes:
     a set of default values will be used."""
 
     cluster_log_conf: Optional[ClusterLogConf] = None
-    """The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-    destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster.
-    If the conf is given, the logs will be delivered to the destination every `5 mins`. The
-    destination of driver logs is `$destination/$clusterId/driver`, while the destination of
-    executor logs is `$destination/$clusterId/executor`."""
+    """The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+    destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+    specified for one cluster. If the conf is given, the logs will be delivered to the destination
+    every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the
+    destination of executor logs is `$destination/$clusterId/executor`."""
 
     cluster_name: Optional[str] = None
     """Cluster name requested by the user. This doesn't have to be unique. If not specified at
@@ -541,13 +659,19 @@ class ClusterAttributes:
     data_security_mode: Optional[DataSecurityMode] = None
     """Data security mode decides what data governance model to use when accessing data from a cluster.
     
-    * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features
-    are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively
-    used by a single user specified in `single_user_name`. Most programming languages, cluster
-    features and data governance features are available in this mode. * `USER_ISOLATION`: A secure
-    cluster that can be shared by multiple users. Cluster users are fully isolated so that they
-    cannot see each other's data and credentials. Most data governance features are supported in
-    this mode. But programming languages and cluster features might be limited.
+    The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+    choose the most appropriate access mode depending on your compute configuration. *
+    `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`:
+    Alias for `SINGLE_USER`.
+    
+    The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
+    multiple users sharing the cluster. Data governance features are not available in this mode. *
+    `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
+    `single_user_name`. Most programming languages, cluster features and data governance features
+    are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
+    users. Cluster users are fully isolated so that they cannot see each other's data and
+    credentials. Most data governance features are supported in this mode. But programming languages
+    and cluster features might be limited.
     
     The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
     future Databricks Runtime versions:
@@ -588,6 +712,20 @@ class ClusterAttributes:
     instance_pool_id: Optional[str] = None
     """The optional ID of the instance pool to which the cluster belongs."""
 
+    is_single_node: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    When set to true, Databricks will automatically set single node related `custom_tags`,
+    `spark_conf`, and `num_workers`"""
+
+    kind: Optional[Kind] = None
+    """The kind of compute described by this compute specification.
+    
+    Depending on `kind`, different validations and default values will be applied.
+    
+    The first usage of this value is for the simple cluster form where it sets `kind =
+    CLASSIC_PREVIEW`."""
+
     node_type_id: Optional[str] = None
     """This field encodes, through a single value, the resources available to each of the Spark nodes
     in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
@@ -632,6 +770,12 @@ class ClusterAttributes:
     private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can
     be specified."""
 
+    use_ml_runtime: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    `effective_spark_version` is determined by `spark_version` (DBR release), this field
+    `use_ml_runtime`, and whether `node_type_id` is gpu node or not."""
+
     workload_type: Optional[WorkloadType] = None
 
     def as_dict(self) -> dict:
@@ -655,6 +799,8 @@ def as_dict(self) -> dict:
         if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
         if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind.value
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
         if self.policy_id is not None: body['policy_id'] = self.policy_id
         if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine.value
@@ -663,9 +809,45 @@ def as_dict(self) -> dict:
         if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
         if self.spark_version is not None: body['spark_version'] = self.spark_version
         if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
         if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterAttributes into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.autotermination_minutes is not None:
+            body['autotermination_minutes'] = self.autotermination_minutes
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
+        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode
+        if self.docker_image: body['docker_image'] = self.docker_image
+        if self.driver_instance_pool_id is not None:
+            body['driver_instance_pool_id'] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+        if self.enable_local_disk_encryption is not None:
+            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.init_scripts: body['init_scripts'] = self.init_scripts
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine
+        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
+        if self.spark_conf: body['spark_conf'] = self.spark_conf
+        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
+        if self.spark_version is not None: body['spark_version'] = self.spark_version
+        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
+        if self.workload_type: body['workload_type'] = self.workload_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAttributes:
         """Deserializes the ClusterAttributes from a dictionary."""
@@ -684,6 +866,8 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterAttributes:
                    gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes),
                    init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo),
                    instance_pool_id=d.get('instance_pool_id', None),
+                   is_single_node=d.get('is_single_node', None),
+                   kind=_enum(d, 'kind', Kind),
                    node_type_id=d.get('node_type_id', None),
                    policy_id=d.get('policy_id', None),
                    runtime_engine=_enum(d, 'runtime_engine', RuntimeEngine),
@@ -692,6 +876,7 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterAttributes:
                    spark_env_vars=d.get('spark_env_vars', None),
                    spark_version=d.get('spark_version', None),
                    ssh_public_keys=d.get('ssh_public_keys', None),
+                   use_ml_runtime=d.get('use_ml_runtime', None),
                    workload_type=_from_dict(d, 'workload_type', WorkloadType))
 
 
@@ -716,6 +901,14 @@ def as_dict(self) -> dict:
         if self.violations: body['violations'] = self.violations
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterCompliance into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
+        if self.violations: body['violations'] = self.violations
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterCompliance:
         """Deserializes the ClusterCompliance from a dictionary."""
@@ -754,11 +947,11 @@ class ClusterDetails:
     while each new cluster has a globally unique id."""
 
     cluster_log_conf: Optional[ClusterLogConf] = None
-    """The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-    destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster.
-    If the conf is given, the logs will be delivered to the destination every `5 mins`. The
-    destination of driver logs is `$destination/$clusterId/driver`, while the destination of
-    executor logs is `$destination/$clusterId/executor`."""
+    """The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+    destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+    specified for one cluster. If the conf is given, the logs will be delivered to the destination
+    every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the
+    destination of executor logs is `$destination/$clusterId/executor`."""
 
     cluster_log_status: Optional[LogSyncStatus] = None
     """Cluster log delivery status."""
@@ -790,13 +983,19 @@ class ClusterDetails:
     data_security_mode: Optional[DataSecurityMode] = None
     """Data security mode decides what data governance model to use when accessing data from a cluster.
     
-    * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features
-    are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively
-    used by a single user specified in `single_user_name`. Most programming languages, cluster
-    features and data governance features are available in this mode. * `USER_ISOLATION`: A secure
-    cluster that can be shared by multiple users. Cluster users are fully isolated so that they
-    cannot see each other's data and credentials. Most data governance features are supported in
-    this mode. But programming languages and cluster features might be limited.
+    The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+    choose the most appropriate access mode depending on your compute configuration. *
+    `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`:
+    Alias for `SINGLE_USER`.
+    
+    The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
+    multiple users sharing the cluster. Data governance features are not available in this mode. *
+    `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
+    `single_user_name`. Most programming languages, cluster features and data governance features
+    are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
+    users. Cluster users are fully isolated so that they cannot see each other's data and
+    credentials. Most data governance features are supported in this mode. But programming languages
+    and cluster features might be limited.
     
     The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
     future Databricks Runtime versions:
@@ -881,10 +1080,24 @@ class ClusterDetails:
     """[PROD-2198] An APC attribute only. This field is missing in the API docs and the unforked databricks
     sdk so it needed to be added here"""
 
+    is_single_node: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    When set to true, Databricks will automatically set single node related `custom_tags`,
+    `spark_conf`, and `num_workers`"""
+
     jdbc_port: Optional[int] = None
     """Port on which Spark JDBC server is listening, in the driver nod. No service will be listeningon
     on this port in executor nodes."""
 
+    kind: Optional[Kind] = None
+    """The kind of compute described by this compute specification.
+    
+    Depending on `kind`, different validations and default values will be applied.
+    
+    The first usage of this value is for the simple cluster form where it sets `kind =
+    CLASSIC_PREVIEW`."""
+
     last_activity_time: Optional[int] = None
     """[PROD-2198] An APC attribute only. This field is missing in the API docs and the unforked databricks
     sdk so it needed to be added here"""
@@ -981,6 +1194,12 @@ class ClusterDetails:
     """Information about why the cluster was terminated. This field only appears when the cluster is in
     a `TERMINATING` or `TERMINATED` state."""
 
+    use_ml_runtime: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    `effective_spark_version` is determined by `spark_version` (DBR release), this field
+    `use_ml_runtime`, and whether `node_type_id` is gpu node or not."""
+
     workload_type: Optional[WorkloadType] = None
 
     def as_dict(self) -> dict:
@@ -1014,7 +1233,9 @@ def as_dict(self) -> dict:
         if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
         if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
         if self.jdbc_port is not None: body['jdbc_port'] = self.jdbc_port
+        if self.kind is not None: body['kind'] = self.kind.value
         if self.last_restarted_time is not None: body['last_restarted_time'] = self.last_restarted_time
         if self.last_state_loss_time is not None: body['last_state_loss_time'] = self.last_state_loss_time
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
@@ -1033,9 +1254,66 @@ def as_dict(self) -> dict:
         if self.state_message is not None: body['state_message'] = self.state_message
         if self.terminated_time is not None: body['terminated_time'] = self.terminated_time
         if self.termination_reason: body['termination_reason'] = self.termination_reason.as_dict()
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
         if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.autoscale: body['autoscale'] = self.autoscale
+        if self.autotermination_minutes is not None:
+            body['autotermination_minutes'] = self.autotermination_minutes
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.cluster_cores is not None: body['cluster_cores'] = self.cluster_cores
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
+        if self.cluster_log_status: body['cluster_log_status'] = self.cluster_log_status
+        if self.cluster_memory_mb is not None: body['cluster_memory_mb'] = self.cluster_memory_mb
+        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
+        if self.cluster_source is not None: body['cluster_source'] = self.cluster_source
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode
+        if self.default_tags: body['default_tags'] = self.default_tags
+        if self.docker_image: body['docker_image'] = self.docker_image
+        if self.driver: body['driver'] = self.driver
+        if self.driver_instance_pool_id is not None:
+            body['driver_instance_pool_id'] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+        if self.enable_local_disk_encryption is not None:
+            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
+        if self.executors: body['executors'] = self.executors
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.init_scripts: body['init_scripts'] = self.init_scripts
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.jdbc_port is not None: body['jdbc_port'] = self.jdbc_port
+        if self.kind is not None: body['kind'] = self.kind
+        if self.last_restarted_time is not None: body['last_restarted_time'] = self.last_restarted_time
+        if self.last_state_loss_time is not None: body['last_state_loss_time'] = self.last_state_loss_time
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine
+        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
+        if self.spark_conf: body['spark_conf'] = self.spark_conf
+        if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id
+        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
+        if self.spark_version is not None: body['spark_version'] = self.spark_version
+        if self.spec: body['spec'] = self.spec
+        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.state is not None: body['state'] = self.state
+        if self.state_message is not None: body['state_message'] = self.state_message
+        if self.terminated_time is not None: body['terminated_time'] = self.terminated_time
+        if self.termination_reason: body['termination_reason'] = self.termination_reason
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
+        if self.workload_type: body['workload_type'] = self.workload_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterDetails:
         """Deserializes the ClusterDetails from a dictionary."""
@@ -1064,7 +1342,9 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterDetails:
                    gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes),
                    init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo),
                    instance_pool_id=d.get('instance_pool_id', None),
+                   is_single_node=d.get('is_single_node', None),
                    jdbc_port=d.get('jdbc_port', None),
+                   kind=_enum(d, 'kind', Kind),
                    last_restarted_time=d.get('last_restarted_time', None),
                    last_state_loss_time=d.get('last_state_loss_time', None),
                    node_type_id=d.get('node_type_id', None),
@@ -1083,6 +1363,7 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterDetails:
                    state_message=d.get('state_message', None),
                    terminated_time=d.get('terminated_time', None),
                    termination_reason=_from_dict(d, 'termination_reason', TerminationReason),
+                   use_ml_runtime=d.get('use_ml_runtime', None),
                    workload_type=_from_dict(d, 'workload_type', WorkloadType))
 
 
@@ -1114,6 +1395,16 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterEvent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.data_plane_event_details: body['data_plane_event_details'] = self.data_plane_event_details
+        if self.details: body['details'] = self.details
+        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterEvent:
         """Deserializes the ClusterEvent from a dictionary."""
@@ -1139,6 +1430,13 @@ def as_dict(self) -> dict:
         if self.library_statuses: body['library_statuses'] = [v.as_dict() for v in self.library_statuses]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterLibraryStatuses into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.library_statuses: body['library_statuses'] = self.library_statuses
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterLibraryStatuses:
         """Deserializes the ClusterLibraryStatuses from a dictionary."""
@@ -1158,17 +1456,32 @@ class ClusterLogConf:
     access s3, please make sure the cluster iam role in `instance_profile_arn` has permission to
     write data to the s3 destination."""
 
+    volumes: Optional[VolumesStorageInfo] = None
+    """destination needs to be provided. e.g. `{ "volumes" : { "destination" :
+    "/Volumes/catalog/schema/volume/cluster_log" } }`"""
+
     def as_dict(self) -> dict:
         """Serializes the ClusterLogConf into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.dbfs: body['dbfs'] = self.dbfs.as_dict()
         if self.s3: body['s3'] = self.s3.as_dict()
+        if self.volumes: body['volumes'] = self.volumes.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterLogConf into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dbfs: body['dbfs'] = self.dbfs
+        if self.s3: body['s3'] = self.s3
+        if self.volumes: body['volumes'] = self.volumes
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterLogConf:
         """Deserializes the ClusterLogConf from a dictionary."""
-        return cls(dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo), s3=_from_dict(d, 's3', S3StorageInfo))
+        return cls(dbfs=_from_dict(d, 'dbfs', DbfsStorageInfo),
+                   s3=_from_dict(d, 's3', S3StorageInfo),
+                   volumes=_from_dict(d, 'volumes', VolumesStorageInfo))
 
 
 @dataclass
@@ -1188,6 +1501,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPermission:
         """Deserializes the ClusterPermission from a dictionary."""
@@ -1221,6 +1542,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPermissions:
         """Deserializes the ClusterPermissions from a dictionary."""
@@ -1243,6 +1572,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPermissionsDescription:
         """Deserializes the ClusterPermissionsDescription from a dictionary."""
@@ -1265,6 +1601,13 @@ def as_dict(self) -> dict:
         if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPermissionsRequest:
         """Deserializes the ClusterPermissionsRequest from a dictionary."""
@@ -1296,6 +1639,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPolicyAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyAccessControlRequest:
         """Deserializes the ClusterPolicyAccessControlRequest from a dictionary."""
@@ -1333,6 +1686,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPolicyAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyAccessControlResponse:
         """Deserializes the ClusterPolicyAccessControlResponse from a dictionary."""
@@ -1360,6 +1724,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPolicyPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyPermission:
         """Deserializes the ClusterPolicyPermission from a dictionary."""
@@ -1391,6 +1763,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPolicyPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyPermissions:
         """Deserializes the ClusterPolicyPermissions from a dictionary."""
@@ -1414,6 +1794,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPolicyPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyPermissionsDescription:
         """Deserializes the ClusterPolicyPermissionsDescription from a dictionary."""
@@ -1436,6 +1823,13 @@ def as_dict(self) -> dict:
         if self.cluster_policy_id is not None: body['cluster_policy_id'] = self.cluster_policy_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterPolicyPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.cluster_policy_id is not None: body['cluster_policy_id'] = self.cluster_policy_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterPolicyPermissionsRequest:
         """Deserializes the ClusterPolicyPermissionsRequest from a dictionary."""
@@ -1470,6 +1864,14 @@ def as_dict(self) -> dict:
         if self.previous_value is not None: body['previous_value'] = self.previous_value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterSettingsChange into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.field is not None: body['field'] = self.field
+        if self.new_value is not None: body['new_value'] = self.new_value
+        if self.previous_value is not None: body['previous_value'] = self.previous_value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterSettingsChange:
         """Deserializes the ClusterSettingsChange from a dictionary."""
@@ -1501,6 +1903,13 @@ def as_dict(self) -> dict:
         if self.num_workers is not None: body['num_workers'] = self.num_workers
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterSize into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.autoscale: body['autoscale'] = self.autoscale
+        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterSize:
         """Deserializes the ClusterSize from a dictionary."""
@@ -1545,11 +1954,11 @@ class ClusterSpec:
     a set of default values will be used."""
 
     cluster_log_conf: Optional[ClusterLogConf] = None
-    """The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-    destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster.
-    If the conf is given, the logs will be delivered to the destination every `5 mins`. The
-    destination of driver logs is `$destination/$clusterId/driver`, while the destination of
-    executor logs is `$destination/$clusterId/executor`."""
+    """The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+    destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+    specified for one cluster. If the conf is given, the logs will be delivered to the destination
+    every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the
+    destination of executor logs is `$destination/$clusterId/executor`."""
 
     cluster_name: Optional[str] = None
     """Cluster name requested by the user. This doesn't have to be unique. If not specified at
@@ -1567,13 +1976,19 @@ class ClusterSpec:
     data_security_mode: Optional[DataSecurityMode] = None
     """Data security mode decides what data governance model to use when accessing data from a cluster.
     
-    * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features
-    are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively
-    used by a single user specified in `single_user_name`. Most programming languages, cluster
-    features and data governance features are available in this mode. * `USER_ISOLATION`: A secure
-    cluster that can be shared by multiple users. Cluster users are fully isolated so that they
-    cannot see each other's data and credentials. Most data governance features are supported in
-    this mode. But programming languages and cluster features might be limited.
+    The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+    choose the most appropriate access mode depending on your compute configuration. *
+    `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`:
+    Alias for `SINGLE_USER`.
+    
+    The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
+    multiple users sharing the cluster. Data governance features are not available in this mode. *
+    `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
+    `single_user_name`. Most programming languages, cluster features and data governance features
+    are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
+    users. Cluster users are fully isolated so that they cannot see each other's data and
+    credentials. Most data governance features are supported in this mode. But programming languages
+    and cluster features might be limited.
     
     The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
     future Databricks Runtime versions:
@@ -1614,6 +2029,20 @@ class ClusterSpec:
     instance_pool_id: Optional[str] = None
     """The optional ID of the instance pool to which the cluster belongs."""
 
+    is_single_node: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    When set to true, Databricks will automatically set single node related `custom_tags`,
+    `spark_conf`, and `num_workers`"""
+
+    kind: Optional[Kind] = None
+    """The kind of compute described by this compute specification.
+    
+    Depending on `kind`, different validations and default values will be applied.
+    
+    The first usage of this value is for the simple cluster form where it sets `kind =
+    CLASSIC_PREVIEW`."""
+
     node_type_id: Optional[str] = None
     """This field encodes, through a single value, the resources available to each of the Spark nodes
     in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
@@ -1672,6 +2101,12 @@ class ClusterSpec:
     private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can
     be specified."""
 
+    use_ml_runtime: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    `effective_spark_version` is determined by `spark_version` (DBR release), this field
+    `use_ml_runtime`, and whether `node_type_id` is gpu node or not."""
+
     workload_type: Optional[WorkloadType] = None
 
     def as_dict(self) -> dict:
@@ -1698,6 +2133,8 @@ def as_dict(self) -> dict:
         if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
         if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind.value
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
         if self.num_workers is not None: body['num_workers'] = self.num_workers
         if self.policy_id is not None: body['policy_id'] = self.policy_id
@@ -1707,29 +2144,71 @@ def as_dict(self) -> dict:
         if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
         if self.spark_version is not None: body['spark_version'] = self.spark_version
         if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
         if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
         return body
 
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ClusterSpec:
-        """Deserializes the ClusterSpec from a dictionary."""
-        return cls(apply_policy_default_values=d.get('apply_policy_default_values', None),
-                   autoscale=_from_dict(d, 'autoscale', AutoScale),
-                   autotermination_minutes=d.get('autotermination_minutes', None),
-                   aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes),
-                   azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes),
-                   cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf),
-                   cluster_name=d.get('cluster_name', None),
-                   custom_tags=d.get('custom_tags', None),
-                   data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode),
-                   docker_image=_from_dict(d, 'docker_image', DockerImage),
-                   driver_instance_pool_id=d.get('driver_instance_pool_id', None),
-                   driver_node_type_id=d.get('driver_node_type_id', None),
-                   enable_elastic_disk=d.get('enable_elastic_disk', None),
-                   enable_local_disk_encryption=d.get('enable_local_disk_encryption', None),
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apply_policy_default_values is not None:
+            body['apply_policy_default_values'] = self.apply_policy_default_values
+        if self.autoscale: body['autoscale'] = self.autoscale
+        if self.autotermination_minutes is not None:
+            body['autotermination_minutes'] = self.autotermination_minutes
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
+        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode
+        if self.docker_image: body['docker_image'] = self.docker_image
+        if self.driver_instance_pool_id is not None:
+            body['driver_instance_pool_id'] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+        if self.enable_local_disk_encryption is not None:
+            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.init_scripts: body['init_scripts'] = self.init_scripts
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine
+        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
+        if self.spark_conf: body['spark_conf'] = self.spark_conf
+        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
+        if self.spark_version is not None: body['spark_version'] = self.spark_version
+        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
+        if self.workload_type: body['workload_type'] = self.workload_type
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ClusterSpec:
+        """Deserializes the ClusterSpec from a dictionary."""
+        return cls(apply_policy_default_values=d.get('apply_policy_default_values', None),
+                   autoscale=_from_dict(d, 'autoscale', AutoScale),
+                   autotermination_minutes=d.get('autotermination_minutes', None),
+                   aws_attributes=_from_dict(d, 'aws_attributes', AwsAttributes),
+                   azure_attributes=_from_dict(d, 'azure_attributes', AzureAttributes),
+                   cluster_log_conf=_from_dict(d, 'cluster_log_conf', ClusterLogConf),
+                   cluster_name=d.get('cluster_name', None),
+                   custom_tags=d.get('custom_tags', None),
+                   data_security_mode=_enum(d, 'data_security_mode', DataSecurityMode),
+                   docker_image=_from_dict(d, 'docker_image', DockerImage),
+                   driver_instance_pool_id=d.get('driver_instance_pool_id', None),
+                   driver_node_type_id=d.get('driver_node_type_id', None),
+                   enable_elastic_disk=d.get('enable_elastic_disk', None),
+                   enable_local_disk_encryption=d.get('enable_local_disk_encryption', None),
                    gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes),
                    init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo),
                    instance_pool_id=d.get('instance_pool_id', None),
+                   is_single_node=d.get('is_single_node', None),
+                   kind=_enum(d, 'kind', Kind),
                    node_type_id=d.get('node_type_id', None),
                    num_workers=d.get('num_workers', None),
                    policy_id=d.get('policy_id', None),
@@ -1739,6 +2218,7 @@ def from_dict(cls, d: Dict[str, any]) -> ClusterSpec:
                    spark_env_vars=d.get('spark_env_vars', None),
                    spark_version=d.get('spark_version', None),
                    ssh_public_keys=d.get('ssh_public_keys', None),
+                   use_ml_runtime=d.get('use_ml_runtime', None),
                    workload_type=_from_dict(d, 'workload_type', WorkloadType))
 
 
@@ -1764,6 +2244,15 @@ def as_dict(self) -> dict:
         if self.language is not None: body['language'] = self.language.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Command into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['clusterId'] = self.cluster_id
+        if self.command is not None: body['command'] = self.command
+        if self.context_id is not None: body['contextId'] = self.context_id
+        if self.language is not None: body['language'] = self.language
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Command:
         """Deserializes the Command from a dictionary."""
@@ -1799,6 +2288,14 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CommandStatusResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.results: body['results'] = self.results
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CommandStatusResponse:
         """Deserializes the CommandStatusResponse from a dictionary."""
@@ -1827,6 +2324,13 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ContextStatusResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ContextStatusResponse:
         """Deserializes the ContextStatusResponse from a dictionary."""
@@ -1866,11 +2370,11 @@ class CreateCluster:
     cluster."""
 
     cluster_log_conf: Optional[ClusterLogConf] = None
-    """The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-    destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster.
-    If the conf is given, the logs will be delivered to the destination every `5 mins`. The
-    destination of driver logs is `$destination/$clusterId/driver`, while the destination of
-    executor logs is `$destination/$clusterId/executor`."""
+    """The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+    destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+    specified for one cluster. If the conf is given, the logs will be delivered to the destination
+    every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the
+    destination of executor logs is `$destination/$clusterId/executor`."""
 
     cluster_name: Optional[str] = None
     """Cluster name requested by the user. This doesn't have to be unique. If not specified at
@@ -1888,13 +2392,19 @@ class CreateCluster:
     data_security_mode: Optional[DataSecurityMode] = None
     """Data security mode decides what data governance model to use when accessing data from a cluster.
     
-    * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features
-    are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively
-    used by a single user specified in `single_user_name`. Most programming languages, cluster
-    features and data governance features are available in this mode. * `USER_ISOLATION`: A secure
-    cluster that can be shared by multiple users. Cluster users are fully isolated so that they
-    cannot see each other's data and credentials. Most data governance features are supported in
-    this mode. But programming languages and cluster features might be limited.
+    The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+    choose the most appropriate access mode depending on your compute configuration. *
+    `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`:
+    Alias for `SINGLE_USER`.
+    
+    The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
+    multiple users sharing the cluster. Data governance features are not available in this mode. *
+    `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
+    `single_user_name`. Most programming languages, cluster features and data governance features
+    are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
+    users. Cluster users are fully isolated so that they cannot see each other's data and
+    credentials. Most data governance features are supported in this mode. But programming languages
+    and cluster features might be limited.
     
     The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
     future Databricks Runtime versions:
@@ -1935,6 +2445,20 @@ class CreateCluster:
     instance_pool_id: Optional[str] = None
     """The optional ID of the instance pool to which the cluster belongs."""
 
+    is_single_node: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    When set to true, Databricks will automatically set single node related `custom_tags`,
+    `spark_conf`, and `num_workers`"""
+
+    kind: Optional[Kind] = None
+    """The kind of compute described by this compute specification.
+    
+    Depending on `kind`, different validations and default values will be applied.
+    
+    The first usage of this value is for the simple cluster form where it sets `kind =
+    CLASSIC_PREVIEW`."""
+
     node_type_id: Optional[str] = None
     """This field encodes, through a single value, the resources available to each of the Spark nodes
     in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
@@ -1989,6 +2513,12 @@ class CreateCluster:
     private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can
     be specified."""
 
+    use_ml_runtime: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    `effective_spark_version` is determined by `spark_version` (DBR release), this field
+    `use_ml_runtime`, and whether `node_type_id` is gpu node or not."""
+
     workload_type: Optional[WorkloadType] = None
 
     def as_dict(self) -> dict:
@@ -2016,6 +2546,8 @@ def as_dict(self) -> dict:
         if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
         if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind.value
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
         if self.num_workers is not None: body['num_workers'] = self.num_workers
         if self.policy_id is not None: body['policy_id'] = self.policy_id
@@ -2025,9 +2557,50 @@ def as_dict(self) -> dict:
         if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
         if self.spark_version is not None: body['spark_version'] = self.spark_version
         if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
         if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apply_policy_default_values is not None:
+            body['apply_policy_default_values'] = self.apply_policy_default_values
+        if self.autoscale: body['autoscale'] = self.autoscale
+        if self.autotermination_minutes is not None:
+            body['autotermination_minutes'] = self.autotermination_minutes
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.clone_from: body['clone_from'] = self.clone_from
+        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
+        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode
+        if self.docker_image: body['docker_image'] = self.docker_image
+        if self.driver_instance_pool_id is not None:
+            body['driver_instance_pool_id'] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+        if self.enable_local_disk_encryption is not None:
+            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.init_scripts: body['init_scripts'] = self.init_scripts
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine
+        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
+        if self.spark_conf: body['spark_conf'] = self.spark_conf
+        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
+        if self.spark_version is not None: body['spark_version'] = self.spark_version
+        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
+        if self.workload_type: body['workload_type'] = self.workload_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCluster:
         """Deserializes the CreateCluster from a dictionary."""
@@ -2049,6 +2622,8 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCluster:
                    gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes),
                    init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo),
                    instance_pool_id=d.get('instance_pool_id', None),
+                   is_single_node=d.get('is_single_node', None),
+                   kind=_enum(d, 'kind', Kind),
                    node_type_id=d.get('node_type_id', None),
                    num_workers=d.get('num_workers', None),
                    policy_id=d.get('policy_id', None),
@@ -2058,6 +2633,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCluster:
                    spark_env_vars=d.get('spark_env_vars', None),
                    spark_version=d.get('spark_version', None),
                    ssh_public_keys=d.get('ssh_public_keys', None),
+                   use_ml_runtime=d.get('use_ml_runtime', None),
                    workload_type=_from_dict(d, 'workload_type', WorkloadType))
 
 
@@ -2071,6 +2647,12 @@ def as_dict(self) -> dict:
         if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateClusterResponse:
         """Deserializes the CreateClusterResponse from a dictionary."""
@@ -2091,6 +2673,13 @@ def as_dict(self) -> dict:
         if self.language is not None: body['language'] = self.language.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateContext into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['clusterId'] = self.cluster_id
+        if self.language is not None: body['language'] = self.language
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateContext:
         """Deserializes the CreateContext from a dictionary."""
@@ -2180,6 +2769,25 @@ def as_dict(self) -> dict:
             body['preloaded_spark_versions'] = [v for v in self.preloaded_spark_versions]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateInstancePool into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.disk_spec: body['disk_spec'] = self.disk_spec
+        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.idle_instance_autotermination_minutes is not None:
+            body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
+        if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name
+        if self.max_capacity is not None: body['max_capacity'] = self.max_capacity
+        if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.preloaded_docker_images: body['preloaded_docker_images'] = self.preloaded_docker_images
+        if self.preloaded_spark_versions: body['preloaded_spark_versions'] = self.preloaded_spark_versions
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateInstancePool:
         """Deserializes the CreateInstancePool from a dictionary."""
@@ -2209,6 +2817,12 @@ def as_dict(self) -> dict:
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateInstancePoolResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateInstancePoolResponse:
         """Deserializes the CreateInstancePoolResponse from a dictionary."""
@@ -2266,6 +2880,19 @@ def as_dict(self) -> dict:
         if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreatePolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.definition is not None: body['definition'] = self.definition
+        if self.description is not None: body['description'] = self.description
+        if self.libraries: body['libraries'] = self.libraries
+        if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user
+        if self.name is not None: body['name'] = self.name
+        if self.policy_family_definition_overrides is not None:
+            body['policy_family_definition_overrides'] = self.policy_family_definition_overrides
+        if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePolicy:
         """Deserializes the CreatePolicy from a dictionary."""
@@ -2289,6 +2916,12 @@ def as_dict(self) -> dict:
         if self.policy_id is not None: body['policy_id'] = self.policy_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreatePolicyResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePolicyResponse:
         """Deserializes the CreatePolicyResponse from a dictionary."""
@@ -2306,6 +2939,12 @@ def as_dict(self) -> dict:
         if self.script_id is not None: body['script_id'] = self.script_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.script_id is not None: body['script_id'] = self.script_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateResponse:
         """Deserializes the CreateResponse from a dictionary."""
@@ -2322,12 +2961,54 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Created into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Created:
         """Deserializes the Created from a dictionary."""
         return cls(id=d.get('id', None))
 
 
+@dataclass
+class CustomPolicyTag:
+    key: str
+    """The key of the tag. - Must be unique among all custom tags of the same policy - Cannot be
+    “budget-policy-name”, “budget-policy-id” or "budget-policy-resolution-result" - these
+    tags are preserved.
+    
+    - Follows the regex pattern defined in cluster-common/conf/src/ClusterTagConstraints.scala
+    (https://src.dev.databricks.com/databricks/universe@1647196627c8dc7b4152ad098a94b86484b93a6c/-/blob/cluster-common/conf/src/ClusterTagConstraints.scala?L17)"""
+
+    value: Optional[str] = None
+    """The value of the tag.
+    
+    - Follows the regex pattern defined in cluster-common/conf/src/ClusterTagConstraints.scala
+    (https://src.dev.databricks.com/databricks/universe@1647196627c8dc7b4152ad098a94b86484b93a6c/-/blob/cluster-common/conf/src/ClusterTagConstraints.scala?L24)"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CustomPolicyTag into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CustomPolicyTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CustomPolicyTag:
+        """Deserializes the CustomPolicyTag from a dictionary."""
+        return cls(key=d.get('key', None), value=d.get('value', None))
+
+
 @dataclass
 class DataPlaneEventDetails:
     event_type: Optional[DataPlaneEventDetailsEventType] = None
@@ -2351,6 +3032,15 @@ def as_dict(self) -> dict:
         if self.timestamp is not None: body['timestamp'] = self.timestamp
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DataPlaneEventDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.event_type is not None: body['event_type'] = self.event_type
+        if self.executor_failures is not None: body['executor_failures'] = self.executor_failures
+        if self.host_id is not None: body['host_id'] = self.host_id
+        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DataPlaneEventDetails:
         """Deserializes the DataPlaneEventDetails from a dictionary."""
@@ -2370,13 +3060,19 @@ class DataPlaneEventDetailsEventType(Enum):
 class DataSecurityMode(Enum):
     """Data security mode decides what data governance model to use when accessing data from a cluster.
     
-    * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features
-    are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively
-    used by a single user specified in `single_user_name`. Most programming languages, cluster
-    features and data governance features are available in this mode. * `USER_ISOLATION`: A secure
-    cluster that can be shared by multiple users. Cluster users are fully isolated so that they
-    cannot see each other's data and credentials. Most data governance features are supported in
-    this mode. But programming languages and cluster features might be limited.
+    The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+    choose the most appropriate access mode depending on your compute configuration. *
+    `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`:
+    Alias for `SINGLE_USER`.
+    
+    The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
+    multiple users sharing the cluster. Data governance features are not available in this mode. *
+    `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
+    `single_user_name`. Most programming languages, cluster features and data governance features
+    are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
+    users. Cluster users are fully isolated so that they cannot see each other's data and
+    credentials. Most data governance features are supported in this mode. But programming languages
+    and cluster features might be limited.
     
     The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
     future Databricks Runtime versions:
@@ -2387,6 +3083,9 @@ class DataSecurityMode(Enum):
     Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that
     doesn’t have UC nor passthrough enabled."""
 
+    DATA_SECURITY_MODE_AUTO = 'DATA_SECURITY_MODE_AUTO'
+    DATA_SECURITY_MODE_DEDICATED = 'DATA_SECURITY_MODE_DEDICATED'
+    DATA_SECURITY_MODE_STANDARD = 'DATA_SECURITY_MODE_STANDARD'
     LEGACY_PASSTHROUGH = 'LEGACY_PASSTHROUGH'
     LEGACY_SINGLE_USER = 'LEGACY_SINGLE_USER'
     LEGACY_SINGLE_USER_STANDARD = 'LEGACY_SINGLE_USER_STANDARD'
@@ -2407,6 +3106,12 @@ def as_dict(self) -> dict:
         if self.destination is not None: body['destination'] = self.destination
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DbfsStorageInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination is not None: body['destination'] = self.destination
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DbfsStorageInfo:
         """Deserializes the DbfsStorageInfo from a dictionary."""
@@ -2424,6 +3129,12 @@ def as_dict(self) -> dict:
         if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteCluster:
         """Deserializes the DeleteCluster from a dictionary."""
@@ -2438,6 +3149,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteClusterResponse:
         """Deserializes the DeleteClusterResponse from a dictionary."""
@@ -2455,6 +3171,12 @@ def as_dict(self) -> dict:
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteInstancePool into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteInstancePool:
         """Deserializes the DeleteInstancePool from a dictionary."""
@@ -2469,6 +3191,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteInstancePoolResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteInstancePoolResponse:
         """Deserializes the DeleteInstancePoolResponse from a dictionary."""
@@ -2486,6 +3213,12 @@ def as_dict(self) -> dict:
         if self.policy_id is not None: body['policy_id'] = self.policy_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeletePolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeletePolicy:
         """Deserializes the DeletePolicy from a dictionary."""
@@ -2500,6 +3233,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeletePolicyResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeletePolicyResponse:
         """Deserializes the DeletePolicyResponse from a dictionary."""
@@ -2514,6 +3252,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -2533,6 +3276,13 @@ def as_dict(self) -> dict:
         if self.context_id is not None: body['contextId'] = self.context_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DestroyContext into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['clusterId'] = self.cluster_id
+        if self.context_id is not None: body['contextId'] = self.context_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DestroyContext:
         """Deserializes the DestroyContext from a dictionary."""
@@ -2547,6 +3297,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DestroyResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DestroyResponse:
         """Deserializes the DestroyResponse from a dictionary."""
@@ -2595,6 +3350,16 @@ def as_dict(self) -> dict:
         if self.disk_type: body['disk_type'] = self.disk_type.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DiskSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.disk_count is not None: body['disk_count'] = self.disk_count
+        if self.disk_iops is not None: body['disk_iops'] = self.disk_iops
+        if self.disk_size is not None: body['disk_size'] = self.disk_size
+        if self.disk_throughput is not None: body['disk_throughput'] = self.disk_throughput
+        if self.disk_type: body['disk_type'] = self.disk_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DiskSpec:
         """Deserializes the DiskSpec from a dictionary."""
@@ -2619,6 +3384,14 @@ def as_dict(self) -> dict:
         if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DiskType into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.azure_disk_volume_type is not None:
+            body['azure_disk_volume_type'] = self.azure_disk_volume_type
+        if self.ebs_volume_type is not None: body['ebs_volume_type'] = self.ebs_volume_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DiskType:
         """Deserializes the DiskType from a dictionary."""
@@ -2653,6 +3426,13 @@ def as_dict(self) -> dict:
         if self.username is not None: body['username'] = self.username
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DockerBasicAuth into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.password is not None: body['password'] = self.password
+        if self.username is not None: body['username'] = self.username
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DockerBasicAuth:
         """Deserializes the DockerBasicAuth from a dictionary."""
@@ -2673,6 +3453,13 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DockerImage into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.basic_auth: body['basic_auth'] = self.basic_auth
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DockerImage:
         """Deserializes the DockerImage from a dictionary."""
@@ -2689,7 +3476,7 @@ class EbsVolumeType(Enum):
 @dataclass
 class EditCluster:
     cluster_id: str
-    """ID of the cluser"""
+    """ID of the cluster"""
 
     spark_version: str
     """The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can
@@ -2718,11 +3505,11 @@ class EditCluster:
     a set of default values will be used."""
 
     cluster_log_conf: Optional[ClusterLogConf] = None
-    """The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-    destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster.
-    If the conf is given, the logs will be delivered to the destination every `5 mins`. The
-    destination of driver logs is `$destination/$clusterId/driver`, while the destination of
-    executor logs is `$destination/$clusterId/executor`."""
+    """The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+    destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+    specified for one cluster. If the conf is given, the logs will be delivered to the destination
+    every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the
+    destination of executor logs is `$destination/$clusterId/executor`."""
 
     cluster_name: Optional[str] = None
     """Cluster name requested by the user. This doesn't have to be unique. If not specified at
@@ -2740,13 +3527,19 @@ class EditCluster:
     data_security_mode: Optional[DataSecurityMode] = None
     """Data security mode decides what data governance model to use when accessing data from a cluster.
     
-    * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features
-    are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively
-    used by a single user specified in `single_user_name`. Most programming languages, cluster
-    features and data governance features are available in this mode. * `USER_ISOLATION`: A secure
-    cluster that can be shared by multiple users. Cluster users are fully isolated so that they
-    cannot see each other's data and credentials. Most data governance features are supported in
-    this mode. But programming languages and cluster features might be limited.
+    The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+    choose the most appropriate access mode depending on your compute configuration. *
+    `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`:
+    Alias for `SINGLE_USER`.
+    
+    The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
+    multiple users sharing the cluster. Data governance features are not available in this mode. *
+    `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
+    `single_user_name`. Most programming languages, cluster features and data governance features
+    are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
+    users. Cluster users are fully isolated so that they cannot see each other's data and
+    credentials. Most data governance features are supported in this mode. But programming languages
+    and cluster features might be limited.
     
     The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
     future Databricks Runtime versions:
@@ -2787,6 +3580,20 @@ class EditCluster:
     instance_pool_id: Optional[str] = None
     """The optional ID of the instance pool to which the cluster belongs."""
 
+    is_single_node: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    When set to true, Databricks will automatically set single node related `custom_tags`,
+    `spark_conf`, and `num_workers`"""
+
+    kind: Optional[Kind] = None
+    """The kind of compute described by this compute specification.
+    
+    Depending on `kind`, different validations and default values will be applied.
+    
+    The first usage of this value is for the simple cluster form where it sets `kind =
+    CLASSIC_PREVIEW`."""
+
     node_type_id: Optional[str] = None
     """This field encodes, through a single value, the resources available to each of the Spark nodes
     in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
@@ -2841,6 +3648,12 @@ class EditCluster:
     private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can
     be specified."""
 
+    use_ml_runtime: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    `effective_spark_version` is determined by `spark_version` (DBR release), this field
+    `use_ml_runtime`, and whether `node_type_id` is gpu node or not."""
+
     workload_type: Optional[WorkloadType] = None
 
     def as_dict(self) -> dict:
@@ -2868,6 +3681,8 @@ def as_dict(self) -> dict:
         if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
         if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind.value
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
         if self.num_workers is not None: body['num_workers'] = self.num_workers
         if self.policy_id is not None: body['policy_id'] = self.policy_id
@@ -2877,9 +3692,50 @@ def as_dict(self) -> dict:
         if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
         if self.spark_version is not None: body['spark_version'] = self.spark_version
         if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
         if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apply_policy_default_values is not None:
+            body['apply_policy_default_values'] = self.apply_policy_default_values
+        if self.autoscale: body['autoscale'] = self.autoscale
+        if self.autotermination_minutes is not None:
+            body['autotermination_minutes'] = self.autotermination_minutes
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
+        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode
+        if self.docker_image: body['docker_image'] = self.docker_image
+        if self.driver_instance_pool_id is not None:
+            body['driver_instance_pool_id'] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+        if self.enable_local_disk_encryption is not None:
+            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.init_scripts: body['init_scripts'] = self.init_scripts
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine
+        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
+        if self.spark_conf: body['spark_conf'] = self.spark_conf
+        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
+        if self.spark_version is not None: body['spark_version'] = self.spark_version
+        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
+        if self.workload_type: body['workload_type'] = self.workload_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditCluster:
         """Deserializes the EditCluster from a dictionary."""
@@ -2901,6 +3757,8 @@ def from_dict(cls, d: Dict[str, any]) -> EditCluster:
                    gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes),
                    init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo),
                    instance_pool_id=d.get('instance_pool_id', None),
+                   is_single_node=d.get('is_single_node', None),
+                   kind=_enum(d, 'kind', Kind),
                    node_type_id=d.get('node_type_id', None),
                    num_workers=d.get('num_workers', None),
                    policy_id=d.get('policy_id', None),
@@ -2910,6 +3768,7 @@ def from_dict(cls, d: Dict[str, any]) -> EditCluster:
                    spark_env_vars=d.get('spark_env_vars', None),
                    spark_version=d.get('spark_version', None),
                    ssh_public_keys=d.get('ssh_public_keys', None),
+                   use_ml_runtime=d.get('use_ml_runtime', None),
                    workload_type=_from_dict(d, 'workload_type', WorkloadType))
 
 
@@ -2921,6 +3780,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditClusterResponse:
         """Deserializes the EditClusterResponse from a dictionary."""
@@ -2976,6 +3840,19 @@ def as_dict(self) -> dict:
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditInstancePool into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.idle_instance_autotermination_minutes is not None:
+            body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name
+        if self.max_capacity is not None: body['max_capacity'] = self.max_capacity
+        if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditInstancePool:
         """Deserializes the EditInstancePool from a dictionary."""
@@ -2996,6 +3873,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditInstancePoolResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditInstancePoolResponse:
         """Deserializes the EditInstancePoolResponse from a dictionary."""
@@ -3057,6 +3939,20 @@ def as_dict(self) -> dict:
         if self.policy_id is not None: body['policy_id'] = self.policy_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.definition is not None: body['definition'] = self.definition
+        if self.description is not None: body['description'] = self.description
+        if self.libraries: body['libraries'] = self.libraries
+        if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user
+        if self.name is not None: body['name'] = self.name
+        if self.policy_family_definition_overrides is not None:
+            body['policy_family_definition_overrides'] = self.policy_family_definition_overrides
+        if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditPolicy:
         """Deserializes the EditPolicy from a dictionary."""
@@ -3078,6 +3974,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditPolicyResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditPolicyResponse:
         """Deserializes the EditPolicyResponse from a dictionary."""
@@ -3092,6 +3993,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditResponse:
         """Deserializes the EditResponse from a dictionary."""
@@ -3114,6 +4020,13 @@ def as_dict(self) -> dict:
         if self.validate_only is not None: body['validate_only'] = self.validate_only
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EnforceClusterComplianceRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.validate_only is not None: body['validate_only'] = self.validate_only
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnforceClusterComplianceRequest:
         """Deserializes the EnforceClusterComplianceRequest from a dictionary."""
@@ -3137,6 +4050,13 @@ def as_dict(self) -> dict:
         if self.has_changes is not None: body['has_changes'] = self.has_changes
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EnforceClusterComplianceResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.changes: body['changes'] = self.changes
+        if self.has_changes is not None: body['has_changes'] = self.has_changes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnforceClusterComplianceResponse:
         """Deserializes the EnforceClusterComplianceResponse from a dictionary."""
@@ -3169,6 +4089,13 @@ def as_dict(self) -> dict:
         if self.dependencies: body['dependencies'] = [v for v in self.dependencies]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Environment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.client is not None: body['client'] = self.client
+        if self.dependencies: body['dependencies'] = self.dependencies
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Environment:
         """Deserializes the Environment from a dictionary."""
@@ -3266,6 +4193,32 @@ def as_dict(self) -> dict:
         if self.user is not None: body['user'] = self.user
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EventDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.attributes: body['attributes'] = self.attributes
+        if self.cause is not None: body['cause'] = self.cause
+        if self.cluster_size: body['cluster_size'] = self.cluster_size
+        if self.current_num_vcpus is not None: body['current_num_vcpus'] = self.current_num_vcpus
+        if self.current_num_workers is not None: body['current_num_workers'] = self.current_num_workers
+        if self.did_not_expand_reason is not None: body['did_not_expand_reason'] = self.did_not_expand_reason
+        if self.disk_size is not None: body['disk_size'] = self.disk_size
+        if self.driver_state_message is not None: body['driver_state_message'] = self.driver_state_message
+        if self.enable_termination_for_node_blocklisted is not None:
+            body['enable_termination_for_node_blocklisted'] = self.enable_termination_for_node_blocklisted
+        if self.free_space is not None: body['free_space'] = self.free_space
+        if self.init_scripts: body['init_scripts'] = self.init_scripts
+        if self.instance_id is not None: body['instance_id'] = self.instance_id
+        if self.job_run_name is not None: body['job_run_name'] = self.job_run_name
+        if self.previous_attributes: body['previous_attributes'] = self.previous_attributes
+        if self.previous_cluster_size: body['previous_cluster_size'] = self.previous_cluster_size
+        if self.previous_disk_size is not None: body['previous_disk_size'] = self.previous_disk_size
+        if self.reason: body['reason'] = self.reason
+        if self.target_num_vcpus is not None: body['target_num_vcpus'] = self.target_num_vcpus
+        if self.target_num_workers is not None: body['target_num_workers'] = self.target_num_workers
+        if self.user is not None: body['user'] = self.user
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EventDetails:
         """Deserializes the EventDetails from a dictionary."""
@@ -3303,8 +4256,10 @@ class EventDetailsCause(Enum):
 
 class EventType(Enum):
 
-    # [PROD-2198] Test data in the backend has an event type that was missing here
     ADD_NODES_FAILED = 'ADD_NODES_FAILED'
+    AUTOMATIC_CLUSTER_UPDATE = 'AUTOMATIC_CLUSTER_UPDATE'
+    AUTOSCALING_BACKOFF = 'AUTOSCALING_BACKOFF'
+    AUTOSCALING_FAILED = 'AUTOSCALING_FAILED'
     AUTOSCALING_STATS_REPORT = 'AUTOSCALING_STATS_REPORT'
     CREATING = 'CREATING'
     DBFS_DOWN = 'DBFS_DOWN'
@@ -3378,6 +4333,19 @@ def as_dict(self) -> dict:
         if self.zone_id is not None: body['zone_id'] = self.zone_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GcpAttributes into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.availability is not None: body['availability'] = self.availability
+        if self.boot_disk_size is not None: body['boot_disk_size'] = self.boot_disk_size
+        if self.google_service_account is not None:
+            body['google_service_account'] = self.google_service_account
+        if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count
+        if self.use_preemptible_executors is not None:
+            body['use_preemptible_executors'] = self.use_preemptible_executors
+        if self.zone_id is not None: body['zone_id'] = self.zone_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GcpAttributes:
         """Deserializes the GcpAttributes from a dictionary."""
@@ -3409,6 +4377,12 @@ def as_dict(self) -> dict:
         if self.destination is not None: body['destination'] = self.destination
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GcsStorageInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination is not None: body['destination'] = self.destination
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GcsStorageInfo:
         """Deserializes the GcsStorageInfo from a dictionary."""
@@ -3433,6 +4407,13 @@ def as_dict(self) -> dict:
         if self.violations: body['violations'] = self.violations
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetClusterComplianceResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
+        if self.violations: body['violations'] = self.violations
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetClusterComplianceResponse:
         """Deserializes the GetClusterComplianceResponse from a dictionary."""
@@ -3450,6 +4431,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetClusterPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetClusterPermissionLevelsResponse:
         """Deserializes the GetClusterPermissionLevelsResponse from a dictionary."""
@@ -3467,6 +4454,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetClusterPolicyPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetClusterPolicyPermissionLevelsResponse:
         """Deserializes the GetClusterPolicyPermissionLevelsResponse from a dictionary."""
@@ -3500,15 +4493,27 @@ class GetEvents:
     """The start time in epoch milliseconds. If empty, returns events starting from the beginning of
     time."""
 
-    def as_dict(self) -> dict:
-        """Serializes the GetEvents into a dictionary suitable for use as a JSON request body."""
+    def as_dict(self) -> dict:
+        """Serializes the GetEvents into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.end_time is not None: body['end_time'] = self.end_time
+        if self.event_types: body['event_types'] = [v.value for v in self.event_types]
+        if self.limit is not None: body['limit'] = self.limit
+        if self.offset is not None: body['offset'] = self.offset
+        if self.order is not None: body['order'] = self.order.value
+        if self.start_time is not None: body['start_time'] = self.start_time
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetEvents into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
         if self.end_time is not None: body['end_time'] = self.end_time
-        if self.event_types: body['event_types'] = [v.value for v in self.event_types]
+        if self.event_types: body['event_types'] = self.event_types
         if self.limit is not None: body['limit'] = self.limit
         if self.offset is not None: body['offset'] = self.offset
-        if self.order is not None: body['order'] = self.order.value
+        if self.order is not None: body['order'] = self.order
         if self.start_time is not None: body['start_time'] = self.start_time
         return body
 
@@ -3551,6 +4556,14 @@ def as_dict(self) -> dict:
         if self.total_count is not None: body['total_count'] = self.total_count
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetEventsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.events: body['events'] = self.events
+        if self.next_page: body['next_page'] = self.next_page
+        if self.total_count is not None: body['total_count'] = self.total_count
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetEventsResponse:
         """Deserializes the GetEventsResponse from a dictionary."""
@@ -3670,6 +4683,30 @@ def as_dict(self) -> dict:
         if self.status: body['status'] = self.status.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetInstancePool into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.default_tags: body['default_tags'] = self.default_tags
+        if self.disk_spec: body['disk_spec'] = self.disk_spec
+        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.idle_instance_autotermination_minutes is not None:
+            body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name
+        if self.max_capacity is not None: body['max_capacity'] = self.max_capacity
+        if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.preloaded_docker_images: body['preloaded_docker_images'] = self.preloaded_docker_images
+        if self.preloaded_spark_versions: body['preloaded_spark_versions'] = self.preloaded_spark_versions
+        if self.state is not None: body['state'] = self.state
+        if self.stats: body['stats'] = self.stats
+        if self.status: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetInstancePool:
         """Deserializes the GetInstancePool from a dictionary."""
@@ -3704,6 +4741,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetInstancePoolPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetInstancePoolPermissionLevelsResponse:
         """Deserializes the GetInstancePoolPermissionLevelsResponse from a dictionary."""
@@ -3722,6 +4765,12 @@ def as_dict(self) -> dict:
         if self.versions: body['versions'] = [v.as_dict() for v in self.versions]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetSparkVersionsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.versions: body['versions'] = self.versions
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetSparkVersionsResponse:
         """Deserializes the GetSparkVersionsResponse from a dictionary."""
@@ -3759,6 +4808,15 @@ def as_dict(self) -> dict:
         if self.script is not None: body['script'] = self.script
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GlobalInitScriptCreateRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.name is not None: body['name'] = self.name
+        if self.position is not None: body['position'] = self.position
+        if self.script is not None: body['script'] = self.script
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GlobalInitScriptCreateRequest:
         """Deserializes the GlobalInitScriptCreateRequest from a dictionary."""
@@ -3808,6 +4866,19 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GlobalInitScriptDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.name is not None: body['name'] = self.name
+        if self.position is not None: body['position'] = self.position
+        if self.script_id is not None: body['script_id'] = self.script_id
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GlobalInitScriptDetails:
         """Deserializes the GlobalInitScriptDetails from a dictionary."""
@@ -3865,6 +4936,20 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GlobalInitScriptDetailsWithContent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.name is not None: body['name'] = self.name
+        if self.position is not None: body['position'] = self.position
+        if self.script is not None: body['script'] = self.script
+        if self.script_id is not None: body['script_id'] = self.script_id
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GlobalInitScriptDetailsWithContent:
         """Deserializes the GlobalInitScriptDetailsWithContent from a dictionary."""
@@ -3914,6 +4999,16 @@ def as_dict(self) -> dict:
         if self.script_id is not None: body['script_id'] = self.script_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GlobalInitScriptUpdateRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.name is not None: body['name'] = self.name
+        if self.position is not None: body['position'] = self.position
+        if self.script is not None: body['script'] = self.script
+        if self.script_id is not None: body['script_id'] = self.script_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GlobalInitScriptUpdateRequest:
         """Deserializes the GlobalInitScriptUpdateRequest from a dictionary."""
@@ -3943,6 +5038,14 @@ def as_dict(self) -> dict:
         if self.reported_for_node is not None: body['reported_for_node'] = self.reported_for_node
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InitScriptEventDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster: body['cluster'] = self.cluster
+        if self.global_: body['global'] = self.global_
+        if self.reported_for_node is not None: body['reported_for_node'] = self.reported_for_node
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InitScriptEventDetails:
         """Deserializes the InitScriptEventDetails from a dictionary."""
@@ -3971,6 +5074,15 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InitScriptExecutionDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.error_message is not None: body['error_message'] = self.error_message
+        if self.execution_duration_seconds is not None:
+            body['execution_duration_seconds'] = self.execution_duration_seconds
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InitScriptExecutionDetails:
         """Deserializes the InitScriptExecutionDetails from a dictionary."""
@@ -4033,6 +5145,18 @@ def as_dict(self) -> dict:
         if self.workspace: body['workspace'] = self.workspace.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InitScriptInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.abfss: body['abfss'] = self.abfss
+        if self.dbfs: body['dbfs'] = self.dbfs
+        if self.file: body['file'] = self.file
+        if self.gcs: body['gcs'] = self.gcs
+        if self.s3: body['s3'] = self.s3
+        if self.volumes: body['volumes'] = self.volumes
+        if self.workspace: body['workspace'] = self.workspace
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InitScriptInfo:
         """Deserializes the InitScriptInfo from a dictionary."""
@@ -4060,6 +5184,13 @@ def as_dict(self) -> dict:
         if self.script: body['script'] = self.script.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InitScriptInfoAndExecutionDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.execution_details: body['execution_details'] = self.execution_details
+        if self.script: body['script'] = self.script
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InitScriptInfoAndExecutionDetails:
         """Deserializes the InitScriptInfoAndExecutionDetails from a dictionary."""
@@ -4082,6 +5213,13 @@ def as_dict(self) -> dict:
         if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstallLibraries into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.libraries: body['libraries'] = self.libraries
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstallLibraries:
         """Deserializes the InstallLibraries from a dictionary."""
@@ -4096,6 +5234,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstallLibrariesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstallLibrariesResponse:
         """Deserializes the InstallLibrariesResponse from a dictionary."""
@@ -4126,6 +5269,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolAccessControlRequest:
         """Deserializes the InstancePoolAccessControlRequest from a dictionary."""
@@ -4163,6 +5316,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolAccessControlResponse:
         """Deserializes the InstancePoolAccessControlResponse from a dictionary."""
@@ -4284,6 +5448,30 @@ def as_dict(self) -> dict:
         if self.status: body['status'] = self.status.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolAndStats into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.default_tags: body['default_tags'] = self.default_tags
+        if self.disk_spec: body['disk_spec'] = self.disk_spec
+        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.idle_instance_autotermination_minutes is not None:
+            body['idle_instance_autotermination_minutes'] = self.idle_instance_autotermination_minutes
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.instance_pool_name is not None: body['instance_pool_name'] = self.instance_pool_name
+        if self.max_capacity is not None: body['max_capacity'] = self.max_capacity
+        if self.min_idle_instances is not None: body['min_idle_instances'] = self.min_idle_instances
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.preloaded_docker_images: body['preloaded_docker_images'] = self.preloaded_docker_images
+        if self.preloaded_spark_versions: body['preloaded_spark_versions'] = self.preloaded_spark_versions
+        if self.state is not None: body['state'] = self.state
+        if self.stats: body['stats'] = self.stats
+        if self.status: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolAndStats:
         """Deserializes the InstancePoolAndStats from a dictionary."""
@@ -4343,6 +5531,15 @@ def as_dict(self) -> dict:
         if self.zone_id is not None: body['zone_id'] = self.zone_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolAwsAttributes into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.availability is not None: body['availability'] = self.availability
+        if self.spot_bid_price_percent is not None:
+            body['spot_bid_price_percent'] = self.spot_bid_price_percent
+        if self.zone_id is not None: body['zone_id'] = self.zone_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolAwsAttributes:
         """Deserializes the InstancePoolAwsAttributes from a dictionary."""
@@ -4378,6 +5575,13 @@ def as_dict(self) -> dict:
         if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolAzureAttributes into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.availability is not None: body['availability'] = self.availability
+        if self.spot_bid_max_price is not None: body['spot_bid_max_price'] = self.spot_bid_max_price
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolAzureAttributes:
         """Deserializes the InstancePoolAzureAttributes from a dictionary."""
@@ -4429,6 +5633,14 @@ def as_dict(self) -> dict:
         if self.zone_id is not None: body['zone_id'] = self.zone_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolGcpAttributes into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.gcp_availability is not None: body['gcp_availability'] = self.gcp_availability
+        if self.local_ssd_count is not None: body['local_ssd_count'] = self.local_ssd_count
+        if self.zone_id is not None: body['zone_id'] = self.zone_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolGcpAttributes:
         """Deserializes the InstancePoolGcpAttributes from a dictionary."""
@@ -4454,6 +5666,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolPermission:
         """Deserializes the InstancePoolPermission from a dictionary."""
@@ -4486,6 +5706,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolPermissions:
         """Deserializes the InstancePoolPermissions from a dictionary."""
@@ -4509,6 +5737,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolPermissionsDescription:
         """Deserializes the InstancePoolPermissionsDescription from a dictionary."""
@@ -4531,6 +5766,13 @@ def as_dict(self) -> dict:
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolPermissionsRequest:
         """Deserializes the InstancePoolPermissionsRequest from a dictionary."""
@@ -4570,6 +5812,15 @@ def as_dict(self) -> dict:
         if self.used_count is not None: body['used_count'] = self.used_count
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolStats into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.idle_count is not None: body['idle_count'] = self.idle_count
+        if self.pending_idle_count is not None: body['pending_idle_count'] = self.pending_idle_count
+        if self.pending_used_count is not None: body['pending_used_count'] = self.pending_used_count
+        if self.used_count is not None: body['used_count'] = self.used_count
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolStats:
         """Deserializes the InstancePoolStats from a dictionary."""
@@ -4593,6 +5844,12 @@ def as_dict(self) -> dict:
             body['pending_instance_errors'] = [v.as_dict() for v in self.pending_instance_errors]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstancePoolStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.pending_instance_errors: body['pending_instance_errors'] = self.pending_instance_errors
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstancePoolStatus:
         """Deserializes the InstancePoolStatus from a dictionary."""
@@ -4628,6 +5885,15 @@ def as_dict(self) -> dict:
             body['is_meta_instance_profile'] = self.is_meta_instance_profile
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstanceProfile into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.iam_role_arn is not None: body['iam_role_arn'] = self.iam_role_arn
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.is_meta_instance_profile is not None:
+            body['is_meta_instance_profile'] = self.is_meta_instance_profile
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstanceProfile:
         """Deserializes the InstanceProfile from a dictionary."""
@@ -4636,6 +5902,17 @@ def from_dict(cls, d: Dict[str, any]) -> InstanceProfile:
                    is_meta_instance_profile=d.get('is_meta_instance_profile', None))
 
 
+class Kind(Enum):
+    """The kind of compute described by this compute specification.
+    
+    Depending on `kind`, different validations and default values will be applied.
+    
+    The first usage of this value is for the simple cluster form where it sets `kind =
+    CLASSIC_PREVIEW`."""
+
+    CLASSIC_PREVIEW = 'CLASSIC_PREVIEW'
+
+
 class Language(Enum):
 
     PYTHON = 'python'
@@ -4690,6 +5967,18 @@ def as_dict(self) -> dict:
         if self.whl is not None: body['whl'] = self.whl
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Library into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cran: body['cran'] = self.cran
+        if self.egg is not None: body['egg'] = self.egg
+        if self.jar is not None: body['jar'] = self.jar
+        if self.maven: body['maven'] = self.maven
+        if self.pypi: body['pypi'] = self.pypi
+        if self.requirements is not None: body['requirements'] = self.requirements
+        if self.whl is not None: body['whl'] = self.whl
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Library:
         """Deserializes the Library from a dictionary."""
@@ -4728,6 +6017,16 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LibraryFullStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_library_for_all_clusters is not None:
+            body['is_library_for_all_clusters'] = self.is_library_for_all_clusters
+        if self.library: body['library'] = self.library
+        if self.messages: body['messages'] = self.messages
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LibraryFullStatus:
         """Deserializes the LibraryFullStatus from a dictionary."""
@@ -4761,6 +6060,12 @@ def as_dict(self) -> dict:
         if self.statuses: body['statuses'] = [v.as_dict() for v in self.statuses]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAllClusterLibraryStatusesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.statuses: body['statuses'] = self.statuses
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAllClusterLibraryStatusesResponse:
         """Deserializes the ListAllClusterLibraryStatusesResponse from a dictionary."""
@@ -4782,6 +6087,13 @@ def as_dict(self) -> dict:
         if self.zones: body['zones'] = [v for v in self.zones]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAvailableZonesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.default_zone is not None: body['default_zone'] = self.default_zone
+        if self.zones: body['zones'] = self.zones
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAvailableZonesResponse:
         """Deserializes the ListAvailableZonesResponse from a dictionary."""
@@ -4809,6 +6121,14 @@ def as_dict(self) -> dict:
         if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListClusterCompliancesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.clusters: body['clusters'] = self.clusters
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListClusterCompliancesResponse:
         """Deserializes the ListClusterCompliancesResponse from a dictionary."""
@@ -4840,6 +6160,15 @@ def as_dict(self) -> dict:
         if self.policy_id is not None: body['policy_id'] = self.policy_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListClustersFilterBy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_sources: body['cluster_sources'] = self.cluster_sources
+        if self.cluster_states: body['cluster_states'] = self.cluster_states
+        if self.is_pinned is not None: body['is_pinned'] = self.is_pinned
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListClustersFilterBy:
         """Deserializes the ListClustersFilterBy from a dictionary."""
@@ -4870,6 +6199,14 @@ def as_dict(self) -> dict:
         if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListClustersResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.clusters: body['clusters'] = self.clusters
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListClustersResponse:
         """Deserializes the ListClustersResponse from a dictionary."""
@@ -4894,6 +6231,13 @@ def as_dict(self) -> dict:
         if self.field is not None: body['field'] = self.field.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListClustersSortBy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.direction is not None: body['direction'] = self.direction
+        if self.field is not None: body['field'] = self.field
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListClustersSortBy:
         """Deserializes the ListClustersSortBy from a dictionary."""
@@ -4926,6 +6270,12 @@ def as_dict(self) -> dict:
         if self.scripts: body['scripts'] = [v.as_dict() for v in self.scripts]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListGlobalInitScriptsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.scripts: body['scripts'] = self.scripts
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListGlobalInitScriptsResponse:
         """Deserializes the ListGlobalInitScriptsResponse from a dictionary."""
@@ -4942,6 +6292,12 @@ def as_dict(self) -> dict:
         if self.instance_pools: body['instance_pools'] = [v.as_dict() for v in self.instance_pools]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListInstancePools into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.instance_pools: body['instance_pools'] = self.instance_pools
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListInstancePools:
         """Deserializes the ListInstancePools from a dictionary."""
@@ -4959,6 +6315,12 @@ def as_dict(self) -> dict:
         if self.instance_profiles: body['instance_profiles'] = [v.as_dict() for v in self.instance_profiles]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListInstanceProfilesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.instance_profiles: body['instance_profiles'] = self.instance_profiles
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListInstanceProfilesResponse:
         """Deserializes the ListInstanceProfilesResponse from a dictionary."""
@@ -4976,6 +6338,12 @@ def as_dict(self) -> dict:
         if self.node_types: body['node_types'] = [v.as_dict() for v in self.node_types]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListNodeTypesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.node_types: body['node_types'] = self.node_types
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListNodeTypesResponse:
         """Deserializes the ListNodeTypesResponse from a dictionary."""
@@ -4993,6 +6361,12 @@ def as_dict(self) -> dict:
         if self.policies: body['policies'] = [v.as_dict() for v in self.policies]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListPoliciesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.policies: body['policies'] = self.policies
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListPoliciesResponse:
         """Deserializes the ListPoliciesResponse from a dictionary."""
@@ -5015,6 +6389,13 @@ def as_dict(self) -> dict:
         if self.policy_families: body['policy_families'] = [v.as_dict() for v in self.policy_families]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListPolicyFamiliesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.policy_families: body['policy_families'] = self.policy_families
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListPolicyFamiliesResponse:
         """Deserializes the ListPolicyFamiliesResponse from a dictionary."""
@@ -5046,6 +6427,12 @@ def as_dict(self) -> dict:
         if self.destination is not None: body['destination'] = self.destination
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LocalFileInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination is not None: body['destination'] = self.destination
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LocalFileInfo:
         """Deserializes the LocalFileInfo from a dictionary."""
@@ -5069,6 +6456,15 @@ def as_dict(self) -> dict:
             body['log_analytics_workspace_id'] = self.log_analytics_workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogAnalyticsInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.log_analytics_primary_key is not None:
+            body['log_analytics_primary_key'] = self.log_analytics_primary_key
+        if self.log_analytics_workspace_id is not None:
+            body['log_analytics_workspace_id'] = self.log_analytics_workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogAnalyticsInfo:
         """Deserializes the LogAnalyticsInfo from a dictionary."""
@@ -5093,6 +6489,13 @@ def as_dict(self) -> dict:
         if self.last_exception is not None: body['last_exception'] = self.last_exception
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogSyncStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.last_attempted is not None: body['last_attempted'] = self.last_attempted
+        if self.last_exception is not None: body['last_exception'] = self.last_exception
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogSyncStatus:
         """Deserializes the LogSyncStatus from a dictionary."""
@@ -5122,6 +6525,14 @@ def as_dict(self) -> dict:
         if self.repo is not None: body['repo'] = self.repo
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MavenLibrary into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.coordinates is not None: body['coordinates'] = self.coordinates
+        if self.exclusions: body['exclusions'] = self.exclusions
+        if self.repo is not None: body['repo'] = self.repo
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MavenLibrary:
         """Deserializes the MavenLibrary from a dictionary."""
@@ -5142,8 +6553,19 @@ class NodeInstanceType:
 
     local_nvme_disks: Optional[int] = None
 
-    def as_dict(self) -> dict:
-        """Serializes the NodeInstanceType into a dictionary suitable for use as a JSON request body."""
+    def as_dict(self) -> dict:
+        """Serializes the NodeInstanceType into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id
+        if self.local_disk_size_gb is not None: body['local_disk_size_gb'] = self.local_disk_size_gb
+        if self.local_disks is not None: body['local_disks'] = self.local_disks
+        if self.local_nvme_disk_size_gb is not None:
+            body['local_nvme_disk_size_gb'] = self.local_nvme_disk_size_gb
+        if self.local_nvme_disks is not None: body['local_nvme_disks'] = self.local_nvme_disks
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NodeInstanceType into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id
         if self.local_disk_size_gb is not None: body['local_disk_size_gb'] = self.local_disk_size_gb
@@ -5247,6 +6669,34 @@ def as_dict(self) -> dict:
         if self.supports_elastic_disk is not None: body['supports_elastic_disk'] = self.supports_elastic_disk
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NodeType into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.category is not None: body['category'] = self.category
+        if self.description is not None: body['description'] = self.description
+        if self.display_order is not None: body['display_order'] = self.display_order
+        if self.instance_type_id is not None: body['instance_type_id'] = self.instance_type_id
+        if self.is_deprecated is not None: body['is_deprecated'] = self.is_deprecated
+        if self.is_encrypted_in_transit is not None:
+            body['is_encrypted_in_transit'] = self.is_encrypted_in_transit
+        if self.is_graviton is not None: body['is_graviton'] = self.is_graviton
+        if self.is_hidden is not None: body['is_hidden'] = self.is_hidden
+        if self.is_io_cache_enabled is not None: body['is_io_cache_enabled'] = self.is_io_cache_enabled
+        if self.memory_mb is not None: body['memory_mb'] = self.memory_mb
+        if self.node_info: body['node_info'] = self.node_info
+        if self.node_instance_type: body['node_instance_type'] = self.node_instance_type
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.num_cores is not None: body['num_cores'] = self.num_cores
+        if self.num_gpus is not None: body['num_gpus'] = self.num_gpus
+        if self.photon_driver_capable is not None: body['photon_driver_capable'] = self.photon_driver_capable
+        if self.photon_worker_capable is not None: body['photon_worker_capable'] = self.photon_worker_capable
+        if self.support_cluster_tags is not None: body['support_cluster_tags'] = self.support_cluster_tags
+        if self.support_ebs_volumes is not None: body['support_ebs_volumes'] = self.support_ebs_volumes
+        if self.support_port_forwarding is not None:
+            body['support_port_forwarding'] = self.support_port_forwarding
+        if self.supports_elastic_disk is not None: body['supports_elastic_disk'] = self.supports_elastic_disk
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NodeType:
         """Deserializes the NodeType from a dictionary."""
@@ -5286,6 +6736,13 @@ def as_dict(self) -> dict:
         if self.message is not None: body['message'] = self.message
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PendingInstanceError into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.instance_id is not None: body['instance_id'] = self.instance_id
+        if self.message is not None: body['message'] = self.message
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PendingInstanceError:
         """Deserializes the PendingInstanceError from a dictionary."""
@@ -5303,6 +6760,12 @@ def as_dict(self) -> dict:
         if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PermanentDeleteCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermanentDeleteCluster:
         """Deserializes the PermanentDeleteCluster from a dictionary."""
@@ -5317,6 +6780,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PermanentDeleteClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermanentDeleteClusterResponse:
         """Deserializes the PermanentDeleteClusterResponse from a dictionary."""
@@ -5334,6 +6802,12 @@ def as_dict(self) -> dict:
         if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PinCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PinCluster:
         """Deserializes the PinCluster from a dictionary."""
@@ -5348,6 +6822,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PinClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PinClusterResponse:
         """Deserializes the PinClusterResponse from a dictionary."""
@@ -5425,6 +6904,23 @@ def as_dict(self) -> dict:
         if self.policy_id is not None: body['policy_id'] = self.policy_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Policy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at_timestamp is not None: body['created_at_timestamp'] = self.created_at_timestamp
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.definition is not None: body['definition'] = self.definition
+        if self.description is not None: body['description'] = self.description
+        if self.is_default is not None: body['is_default'] = self.is_default
+        if self.libraries: body['libraries'] = self.libraries
+        if self.max_clusters_per_user is not None: body['max_clusters_per_user'] = self.max_clusters_per_user
+        if self.name is not None: body['name'] = self.name
+        if self.policy_family_definition_overrides is not None:
+            body['policy_family_definition_overrides'] = self.policy_family_definition_overrides
+        if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Policy:
         """Deserializes the Policy from a dictionary."""
@@ -5466,6 +6962,15 @@ def as_dict(self) -> dict:
         if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PolicyFamily into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.definition is not None: body['definition'] = self.definition
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.policy_family_id is not None: body['policy_family_id'] = self.policy_family_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PolicyFamily:
         """Deserializes the PolicyFamily from a dictionary."""
@@ -5491,6 +6996,13 @@ def as_dict(self) -> dict:
         if self.repo is not None: body['repo'] = self.repo
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PythonPyPiLibrary into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.package is not None: body['package'] = self.package
+        if self.repo is not None: body['repo'] = self.repo
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PythonPyPiLibrary:
         """Deserializes the PythonPyPiLibrary from a dictionary."""
@@ -5512,6 +7024,13 @@ def as_dict(self) -> dict:
         if self.repo is not None: body['repo'] = self.repo
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RCranLibrary into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.package is not None: body['package'] = self.package
+        if self.repo is not None: body['repo'] = self.repo
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RCranLibrary:
         """Deserializes the RCranLibrary from a dictionary."""
@@ -5529,6 +7048,12 @@ def as_dict(self) -> dict:
         if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RemoveInstanceProfile into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RemoveInstanceProfile:
         """Deserializes the RemoveInstanceProfile from a dictionary."""
@@ -5543,6 +7068,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RemoveResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RemoveResponse:
         """Deserializes the RemoveResponse from a dictionary."""
@@ -5576,6 +7106,14 @@ def as_dict(self) -> dict:
         if self.num_workers is not None: body['num_workers'] = self.num_workers
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResizeCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.autoscale: body['autoscale'] = self.autoscale
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResizeCluster:
         """Deserializes the ResizeCluster from a dictionary."""
@@ -5592,6 +7130,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResizeClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResizeClusterResponse:
         """Deserializes the ResizeClusterResponse from a dictionary."""
@@ -5613,6 +7156,13 @@ def as_dict(self) -> dict:
         if self.restart_user is not None: body['restart_user'] = self.restart_user
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestartCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.restart_user is not None: body['restart_user'] = self.restart_user
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestartCluster:
         """Deserializes the RestartCluster from a dictionary."""
@@ -5627,6 +7177,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestartClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestartClusterResponse:
         """Deserializes the RestartClusterResponse from a dictionary."""
@@ -5686,6 +7241,21 @@ def as_dict(self) -> dict:
         if self.truncated is not None: body['truncated'] = self.truncated
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Results into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cause is not None: body['cause'] = self.cause
+        if self.data: body['data'] = self.data
+        if self.file_name is not None: body['fileName'] = self.file_name
+        if self.file_names: body['fileNames'] = self.file_names
+        if self.is_json_schema is not None: body['isJsonSchema'] = self.is_json_schema
+        if self.pos is not None: body['pos'] = self.pos
+        if self.result_type is not None: body['resultType'] = self.result_type
+        if self.schema: body['schema'] = self.schema
+        if self.summary is not None: body['summary'] = self.summary
+        if self.truncated is not None: body['truncated'] = self.truncated
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Results:
         """Deserializes the Results from a dictionary."""
@@ -5762,6 +7332,18 @@ def as_dict(self) -> dict:
         if self.region is not None: body['region'] = self.region
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the S3StorageInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.canned_acl is not None: body['canned_acl'] = self.canned_acl
+        if self.destination is not None: body['destination'] = self.destination
+        if self.enable_encryption is not None: body['enable_encryption'] = self.enable_encryption
+        if self.encryption_type is not None: body['encryption_type'] = self.encryption_type
+        if self.endpoint is not None: body['endpoint'] = self.endpoint
+        if self.kms_key is not None: body['kms_key'] = self.kms_key
+        if self.region is not None: body['region'] = self.region
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> S3StorageInfo:
         """Deserializes the S3StorageInfo from a dictionary."""
@@ -5818,6 +7400,18 @@ def as_dict(self) -> dict:
         if self.start_timestamp is not None: body['start_timestamp'] = self.start_timestamp
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SparkNode into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.host_private_ip is not None: body['host_private_ip'] = self.host_private_ip
+        if self.instance_id is not None: body['instance_id'] = self.instance_id
+        if self.node_aws_attributes: body['node_aws_attributes'] = self.node_aws_attributes
+        if self.node_id is not None: body['node_id'] = self.node_id
+        if self.private_ip is not None: body['private_ip'] = self.private_ip
+        if self.public_dns is not None: body['public_dns'] = self.public_dns
+        if self.start_timestamp is not None: body['start_timestamp'] = self.start_timestamp
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparkNode:
         """Deserializes the SparkNode from a dictionary."""
@@ -5841,6 +7435,12 @@ def as_dict(self) -> dict:
         if self.is_spot is not None: body['is_spot'] = self.is_spot
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SparkNodeAwsAttributes into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_spot is not None: body['is_spot'] = self.is_spot
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparkNodeAwsAttributes:
         """Deserializes the SparkNodeAwsAttributes from a dictionary."""
@@ -5865,6 +7465,13 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SparkVersion into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparkVersion:
         """Deserializes the SparkVersion from a dictionary."""
@@ -5882,6 +7489,12 @@ def as_dict(self) -> dict:
         if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StartCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StartCluster:
         """Deserializes the StartCluster from a dictionary."""
@@ -5896,6 +7509,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StartClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StartClusterResponse:
         """Deserializes the StartClusterResponse from a dictionary."""
@@ -5934,6 +7552,14 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TerminationReason into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.code is not None: body['code'] = self.code
+        if self.parameters: body['parameters'] = self.parameters
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TerminationReason:
         """Deserializes the TerminationReason from a dictionary."""
@@ -6051,6 +7677,13 @@ def as_dict(self) -> dict:
         if self.libraries: body['libraries'] = [v.as_dict() for v in self.libraries]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UninstallLibraries into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.libraries: body['libraries'] = self.libraries
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UninstallLibraries:
         """Deserializes the UninstallLibraries from a dictionary."""
@@ -6065,6 +7698,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UninstallLibrariesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UninstallLibrariesResponse:
         """Deserializes the UninstallLibrariesResponse from a dictionary."""
@@ -6082,6 +7720,12 @@ def as_dict(self) -> dict:
         if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UnpinCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UnpinCluster:
         """Deserializes the UnpinCluster from a dictionary."""
@@ -6096,6 +7740,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UnpinClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UnpinClusterResponse:
         """Deserializes the UnpinClusterResponse from a dictionary."""
@@ -6124,6 +7773,14 @@ def as_dict(self) -> dict:
         if self.update_mask is not None: body['update_mask'] = self.update_mask
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster: body['cluster'] = self.cluster
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.update_mask is not None: body['update_mask'] = self.update_mask
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCluster:
         """Deserializes the UpdateCluster from a dictionary."""
@@ -6153,11 +7810,11 @@ class UpdateClusterResource:
     a set of default values will be used."""
 
     cluster_log_conf: Optional[ClusterLogConf] = None
-    """The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-    destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster.
-    If the conf is given, the logs will be delivered to the destination every `5 mins`. The
-    destination of driver logs is `$destination/$clusterId/driver`, while the destination of
-    executor logs is `$destination/$clusterId/executor`."""
+    """The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+    destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+    specified for one cluster. If the conf is given, the logs will be delivered to the destination
+    every `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the
+    destination of executor logs is `$destination/$clusterId/executor`."""
 
     cluster_name: Optional[str] = None
     """Cluster name requested by the user. This doesn't have to be unique. If not specified at
@@ -6175,13 +7832,19 @@ class UpdateClusterResource:
     data_security_mode: Optional[DataSecurityMode] = None
     """Data security mode decides what data governance model to use when accessing data from a cluster.
     
-    * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features
-    are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively
-    used by a single user specified in `single_user_name`. Most programming languages, cluster
-    features and data governance features are available in this mode. * `USER_ISOLATION`: A secure
-    cluster that can be shared by multiple users. Cluster users are fully isolated so that they
-    cannot see each other's data and credentials. Most data governance features are supported in
-    this mode. But programming languages and cluster features might be limited.
+    The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+    choose the most appropriate access mode depending on your compute configuration. *
+    `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`:
+    Alias for `SINGLE_USER`.
+    
+    The following modes can be used regardless of `kind`. * `NONE`: No security isolation for
+    multiple users sharing the cluster. Data governance features are not available in this mode. *
+    `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in
+    `single_user_name`. Most programming languages, cluster features and data governance features
+    are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple
+    users. Cluster users are fully isolated so that they cannot see each other's data and
+    credentials. Most data governance features are supported in this mode. But programming languages
+    and cluster features might be limited.
     
     The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
     future Databricks Runtime versions:
@@ -6222,6 +7885,20 @@ class UpdateClusterResource:
     instance_pool_id: Optional[str] = None
     """The optional ID of the instance pool to which the cluster belongs."""
 
+    is_single_node: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    When set to true, Databricks will automatically set single node related `custom_tags`,
+    `spark_conf`, and `num_workers`"""
+
+    kind: Optional[Kind] = None
+    """The kind of compute described by this compute specification.
+    
+    Depending on `kind`, different validations and default values will be applied.
+    
+    The first usage of this value is for the simple cluster form where it sets `kind =
+    CLASSIC_PREVIEW`."""
+
     node_type_id: Optional[str] = None
     """This field encodes, through a single value, the resources available to each of the Spark nodes
     in this cluster. For example, the Spark nodes can be provisioned and optimized for memory or
@@ -6280,6 +7957,12 @@ class UpdateClusterResource:
     private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can
     be specified."""
 
+    use_ml_runtime: Optional[bool] = None
+    """This field can only be used with `kind`.
+    
+    `effective_spark_version` is determined by `spark_version` (DBR release), this field
+    `use_ml_runtime`, and whether `node_type_id` is gpu node or not."""
+
     workload_type: Optional[WorkloadType] = None
 
     def as_dict(self) -> dict:
@@ -6304,6 +7987,8 @@ def as_dict(self) -> dict:
         if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes.as_dict()
         if self.init_scripts: body['init_scripts'] = [v.as_dict() for v in self.init_scripts]
         if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind.value
         if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
         if self.num_workers is not None: body['num_workers'] = self.num_workers
         if self.policy_id is not None: body['policy_id'] = self.policy_id
@@ -6313,9 +7998,47 @@ def as_dict(self) -> dict:
         if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
         if self.spark_version is not None: body['spark_version'] = self.spark_version
         if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
         if self.workload_type: body['workload_type'] = self.workload_type.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateClusterResource into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.autoscale: body['autoscale'] = self.autoscale
+        if self.autotermination_minutes is not None:
+            body['autotermination_minutes'] = self.autotermination_minutes
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
+        if self.cluster_name is not None: body['cluster_name'] = self.cluster_name
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.data_security_mode is not None: body['data_security_mode'] = self.data_security_mode
+        if self.docker_image: body['docker_image'] = self.docker_image
+        if self.driver_instance_pool_id is not None:
+            body['driver_instance_pool_id'] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
+        if self.enable_elastic_disk is not None: body['enable_elastic_disk'] = self.enable_elastic_disk
+        if self.enable_local_disk_encryption is not None:
+            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.init_scripts: body['init_scripts'] = self.init_scripts
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.is_single_node is not None: body['is_single_node'] = self.is_single_node
+        if self.kind is not None: body['kind'] = self.kind
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.runtime_engine is not None: body['runtime_engine'] = self.runtime_engine
+        if self.single_user_name is not None: body['single_user_name'] = self.single_user_name
+        if self.spark_conf: body['spark_conf'] = self.spark_conf
+        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
+        if self.spark_version is not None: body['spark_version'] = self.spark_version
+        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+        if self.use_ml_runtime is not None: body['use_ml_runtime'] = self.use_ml_runtime
+        if self.workload_type: body['workload_type'] = self.workload_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateClusterResource:
         """Deserializes the UpdateClusterResource from a dictionary."""
@@ -6335,6 +8058,8 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateClusterResource:
                    gcp_attributes=_from_dict(d, 'gcp_attributes', GcpAttributes),
                    init_scripts=_repeated_dict(d, 'init_scripts', InitScriptInfo),
                    instance_pool_id=d.get('instance_pool_id', None),
+                   is_single_node=d.get('is_single_node', None),
+                   kind=_enum(d, 'kind', Kind),
                    node_type_id=d.get('node_type_id', None),
                    num_workers=d.get('num_workers', None),
                    policy_id=d.get('policy_id', None),
@@ -6344,6 +8069,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateClusterResource:
                    spark_env_vars=d.get('spark_env_vars', None),
                    spark_version=d.get('spark_version', None),
                    ssh_public_keys=d.get('ssh_public_keys', None),
+                   use_ml_runtime=d.get('use_ml_runtime', None),
                    workload_type=_from_dict(d, 'workload_type', WorkloadType))
 
 
@@ -6355,6 +8081,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateClusterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateClusterResponse:
         """Deserializes the UpdateClusterResponse from a dictionary."""
@@ -6369,6 +8100,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
         """Deserializes the UpdateResponse from a dictionary."""
@@ -6378,7 +8114,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
 @dataclass
 class VolumesStorageInfo:
     destination: str
-    """Unity Catalog Volumes file destination, e.g. `/Volumes/my-init.sh`"""
+    """Unity Catalog volumes file destination, e.g. `/Volumes/catalog/schema/volume/dir/file`"""
 
     def as_dict(self) -> dict:
         """Serializes the VolumesStorageInfo into a dictionary suitable for use as a JSON request body."""
@@ -6386,6 +8122,12 @@ def as_dict(self) -> dict:
         if self.destination is not None: body['destination'] = self.destination
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the VolumesStorageInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination is not None: body['destination'] = self.destination
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> VolumesStorageInfo:
         """Deserializes the VolumesStorageInfo from a dictionary."""
@@ -6403,6 +8145,12 @@ def as_dict(self) -> dict:
         if self.clients: body['clients'] = self.clients.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkloadType into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.clients: body['clients'] = self.clients
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkloadType:
         """Deserializes the WorkloadType from a dictionary."""
@@ -6420,6 +8168,12 @@ def as_dict(self) -> dict:
         if self.destination is not None: body['destination'] = self.destination
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspaceStorageInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination is not None: body['destination'] = self.destination
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceStorageInfo:
         """Deserializes the WorkspaceStorageInfo from a dictionary."""
@@ -6676,7 +8430,8 @@ def set_permissions(
     ) -> ClusterPolicyPermissions:
         """Set cluster policy permissions.
         
-        Sets permissions on a cluster policy. Cluster policies can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param cluster_policy_id: str
           The cluster policy for which to get or manage permissions.
@@ -6853,6 +8608,8 @@ def create(self,
                gcp_attributes: Optional[GcpAttributes] = None,
                init_scripts: Optional[List[InitScriptInfo]] = None,
                instance_pool_id: Optional[str] = None,
+               is_single_node: Optional[bool] = None,
+               kind: Optional[Kind] = None,
                node_type_id: Optional[str] = None,
                num_workers: Optional[int] = None,
                policy_id: Optional[str] = None,
@@ -6861,6 +8618,7 @@ def create(self,
                spark_conf: Optional[Dict[str, str]] = None,
                spark_env_vars: Optional[Dict[str, str]] = None,
                ssh_public_keys: Optional[List[str]] = None,
+               use_ml_runtime: Optional[bool] = None,
                workload_type: Optional[WorkloadType] = None) -> Wait[ClusterDetails]:
         """Create new cluster.
         
@@ -6898,11 +8656,11 @@ def create(self,
         :param clone_from: :class:`CloneCluster` (optional)
           When specified, this clones libraries from a source cluster during the creation of a new cluster.
         :param cluster_log_conf: :class:`ClusterLogConf` (optional)
-          The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-          destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. If
-          the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of
-          driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is
-          `$destination/$clusterId/executor`.
+          The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+          destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+          specified for one cluster. If the conf is given, the logs will be delivered to the destination every
+          `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination
+          of executor logs is `$destination/$clusterId/executor`.
         :param cluster_name: str (optional)
           Cluster name requested by the user. This doesn't have to be unique. If not specified at creation,
           the cluster name will be an empty string.
@@ -6916,13 +8674,19 @@ def create(self,
         :param data_security_mode: :class:`DataSecurityMode` (optional)
           Data security mode decides what data governance model to use when accessing data from a cluster.
           
-          * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are
-          not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a
-          single user specified in `single_user_name`. Most programming languages, cluster features and data
-          governance features are available in this mode. * `USER_ISOLATION`: A secure cluster that can be
-          shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data
-          and credentials. Most data governance features are supported in this mode. But programming languages
-          and cluster features might be limited.
+          The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+          choose the most appropriate access mode depending on your compute configuration. *
+          `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias
+          for `SINGLE_USER`.
+          
+          The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple
+          users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`:
+          A secure cluster that can only be exclusively used by a single user specified in `single_user_name`.
+          Most programming languages, cluster features and data governance features are available in this
+          mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are
+          fully isolated so that they cannot see each other's data and credentials. Most data governance
+          features are supported in this mode. But programming languages and cluster features might be
+          limited.
           
           The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
           future Databricks Runtime versions:
@@ -6954,6 +8718,17 @@ def create(self,
           logs are sent to `//init_scripts`.
         :param instance_pool_id: str (optional)
           The optional ID of the instance pool to which the cluster belongs.
+        :param is_single_node: bool (optional)
+          This field can only be used with `kind`.
+          
+          When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`,
+          and `num_workers`
+        :param kind: :class:`Kind` (optional)
+          The kind of compute described by this compute specification.
+          
+          Depending on `kind`, different validations and default values will be applied.
+          
+          The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
         :param node_type_id: str (optional)
           This field encodes, through a single value, the resources available to each of the Spark nodes in
           this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
@@ -7000,6 +8775,11 @@ def create(self,
           SSH public key contents that will be added to each Spark node in this cluster. The corresponding
           private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be
           specified.
+        :param use_ml_runtime: bool (optional)
+          This field can only be used with `kind`.
+          
+          `effective_spark_version` is determined by `spark_version` (DBR release), this field
+          `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
         :param workload_type: :class:`WorkloadType` (optional)
         
         :returns:
@@ -7027,6 +8807,8 @@ def create(self,
         if gcp_attributes is not None: body['gcp_attributes'] = gcp_attributes.as_dict()
         if init_scripts is not None: body['init_scripts'] = [v.as_dict() for v in init_scripts]
         if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id
+        if is_single_node is not None: body['is_single_node'] = is_single_node
+        if kind is not None: body['kind'] = kind.value
         if node_type_id is not None: body['node_type_id'] = node_type_id
         if num_workers is not None: body['num_workers'] = num_workers
         if policy_id is not None: body['policy_id'] = policy_id
@@ -7036,6 +8818,7 @@ def create(self,
         if spark_env_vars is not None: body['spark_env_vars'] = spark_env_vars
         if spark_version is not None: body['spark_version'] = spark_version
         if ssh_public_keys is not None: body['ssh_public_keys'] = [v for v in ssh_public_keys]
+        if use_ml_runtime is not None: body['use_ml_runtime'] = use_ml_runtime
         if workload_type is not None: body['workload_type'] = workload_type.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
@@ -7066,6 +8849,8 @@ def create_and_wait(
         gcp_attributes: Optional[GcpAttributes] = None,
         init_scripts: Optional[List[InitScriptInfo]] = None,
         instance_pool_id: Optional[str] = None,
+        is_single_node: Optional[bool] = None,
+        kind: Optional[Kind] = None,
         node_type_id: Optional[str] = None,
         num_workers: Optional[int] = None,
         policy_id: Optional[str] = None,
@@ -7074,6 +8859,7 @@ def create_and_wait(
         spark_conf: Optional[Dict[str, str]] = None,
         spark_env_vars: Optional[Dict[str, str]] = None,
         ssh_public_keys: Optional[List[str]] = None,
+        use_ml_runtime: Optional[bool] = None,
         workload_type: Optional[WorkloadType] = None,
         timeout=timedelta(minutes=20)) -> ClusterDetails:
         return self.create(apply_policy_default_values=apply_policy_default_values,
@@ -7094,6 +8880,8 @@ def create_and_wait(
                            gcp_attributes=gcp_attributes,
                            init_scripts=init_scripts,
                            instance_pool_id=instance_pool_id,
+                           is_single_node=is_single_node,
+                           kind=kind,
                            node_type_id=node_type_id,
                            num_workers=num_workers,
                            policy_id=policy_id,
@@ -7103,6 +8891,7 @@ def create_and_wait(
                            spark_env_vars=spark_env_vars,
                            spark_version=spark_version,
                            ssh_public_keys=ssh_public_keys,
+                           use_ml_runtime=use_ml_runtime,
                            workload_type=workload_type).result(timeout=timeout)
 
     def delete(self, cluster_id: str) -> Wait[ClusterDetails]:
@@ -7152,6 +8941,8 @@ def edit(self,
              gcp_attributes: Optional[GcpAttributes] = None,
              init_scripts: Optional[List[InitScriptInfo]] = None,
              instance_pool_id: Optional[str] = None,
+             is_single_node: Optional[bool] = None,
+             kind: Optional[Kind] = None,
              node_type_id: Optional[str] = None,
              num_workers: Optional[int] = None,
              policy_id: Optional[str] = None,
@@ -7160,6 +8951,7 @@ def edit(self,
              spark_conf: Optional[Dict[str, str]] = None,
              spark_env_vars: Optional[Dict[str, str]] = None,
              ssh_public_keys: Optional[List[str]] = None,
+             use_ml_runtime: Optional[bool] = None,
              workload_type: Optional[WorkloadType] = None) -> Wait[ClusterDetails]:
         """Update cluster configuration.
         
@@ -7176,7 +8968,7 @@ def edit(self,
         Clusters created by the Databricks Jobs service cannot be edited.
         
         :param cluster_id: str
-          ID of the cluser
+          ID of the cluster
         :param spark_version: str
           The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be
           retrieved by using the :method:clusters/sparkVersions API call.
@@ -7197,11 +8989,11 @@ def edit(self,
           Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a
           set of default values will be used.
         :param cluster_log_conf: :class:`ClusterLogConf` (optional)
-          The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-          destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. If
-          the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of
-          driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is
-          `$destination/$clusterId/executor`.
+          The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+          destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+          specified for one cluster. If the conf is given, the logs will be delivered to the destination every
+          `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination
+          of executor logs is `$destination/$clusterId/executor`.
         :param cluster_name: str (optional)
           Cluster name requested by the user. This doesn't have to be unique. If not specified at creation,
           the cluster name will be an empty string.
@@ -7215,13 +9007,19 @@ def edit(self,
         :param data_security_mode: :class:`DataSecurityMode` (optional)
           Data security mode decides what data governance model to use when accessing data from a cluster.
           
-          * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are
-          not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a
-          single user specified in `single_user_name`. Most programming languages, cluster features and data
-          governance features are available in this mode. * `USER_ISOLATION`: A secure cluster that can be
-          shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data
-          and credentials. Most data governance features are supported in this mode. But programming languages
-          and cluster features might be limited.
+          The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+          choose the most appropriate access mode depending on your compute configuration. *
+          `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias
+          for `SINGLE_USER`.
+          
+          The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple
+          users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`:
+          A secure cluster that can only be exclusively used by a single user specified in `single_user_name`.
+          Most programming languages, cluster features and data governance features are available in this
+          mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are
+          fully isolated so that they cannot see each other's data and credentials. Most data governance
+          features are supported in this mode. But programming languages and cluster features might be
+          limited.
           
           The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
           future Databricks Runtime versions:
@@ -7253,6 +9051,17 @@ def edit(self,
           logs are sent to `//init_scripts`.
         :param instance_pool_id: str (optional)
           The optional ID of the instance pool to which the cluster belongs.
+        :param is_single_node: bool (optional)
+          This field can only be used with `kind`.
+          
+          When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`,
+          and `num_workers`
+        :param kind: :class:`Kind` (optional)
+          The kind of compute described by this compute specification.
+          
+          Depending on `kind`, different validations and default values will be applied.
+          
+          The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
         :param node_type_id: str (optional)
           This field encodes, through a single value, the resources available to each of the Spark nodes in
           this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
@@ -7299,6 +9108,11 @@ def edit(self,
           SSH public key contents that will be added to each Spark node in this cluster. The corresponding
           private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be
           specified.
+        :param use_ml_runtime: bool (optional)
+          This field can only be used with `kind`.
+          
+          `effective_spark_version` is determined by `spark_version` (DBR release), this field
+          `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
         :param workload_type: :class:`WorkloadType` (optional)
         
         :returns:
@@ -7326,6 +9140,8 @@ def edit(self,
         if gcp_attributes is not None: body['gcp_attributes'] = gcp_attributes.as_dict()
         if init_scripts is not None: body['init_scripts'] = [v.as_dict() for v in init_scripts]
         if instance_pool_id is not None: body['instance_pool_id'] = instance_pool_id
+        if is_single_node is not None: body['is_single_node'] = is_single_node
+        if kind is not None: body['kind'] = kind.value
         if node_type_id is not None: body['node_type_id'] = node_type_id
         if num_workers is not None: body['num_workers'] = num_workers
         if policy_id is not None: body['policy_id'] = policy_id
@@ -7335,6 +9151,7 @@ def edit(self,
         if spark_env_vars is not None: body['spark_env_vars'] = spark_env_vars
         if spark_version is not None: body['spark_version'] = spark_version
         if ssh_public_keys is not None: body['ssh_public_keys'] = [v for v in ssh_public_keys]
+        if use_ml_runtime is not None: body['use_ml_runtime'] = use_ml_runtime
         if workload_type is not None: body['workload_type'] = workload_type.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
@@ -7365,6 +9182,8 @@ def edit_and_wait(
         gcp_attributes: Optional[GcpAttributes] = None,
         init_scripts: Optional[List[InitScriptInfo]] = None,
         instance_pool_id: Optional[str] = None,
+        is_single_node: Optional[bool] = None,
+        kind: Optional[Kind] = None,
         node_type_id: Optional[str] = None,
         num_workers: Optional[int] = None,
         policy_id: Optional[str] = None,
@@ -7373,6 +9192,7 @@ def edit_and_wait(
         spark_conf: Optional[Dict[str, str]] = None,
         spark_env_vars: Optional[Dict[str, str]] = None,
         ssh_public_keys: Optional[List[str]] = None,
+        use_ml_runtime: Optional[bool] = None,
         workload_type: Optional[WorkloadType] = None,
         timeout=timedelta(minutes=20)) -> ClusterDetails:
         return self.edit(apply_policy_default_values=apply_policy_default_values,
@@ -7393,6 +9213,8 @@ def edit_and_wait(
                          gcp_attributes=gcp_attributes,
                          init_scripts=init_scripts,
                          instance_pool_id=instance_pool_id,
+                         is_single_node=is_single_node,
+                         kind=kind,
                          node_type_id=node_type_id,
                          num_workers=num_workers,
                          policy_id=policy_id,
@@ -7402,6 +9224,7 @@ def edit_and_wait(
                          spark_env_vars=spark_env_vars,
                          spark_version=spark_version,
                          ssh_public_keys=ssh_public_keys,
+                         use_ml_runtime=use_ml_runtime,
                          workload_type=workload_type).result(timeout=timeout)
 
     def events(self,
@@ -7703,7 +9526,8 @@ def set_permissions(
             access_control_list: Optional[List[ClusterAccessControlRequest]] = None) -> ClusterPermissions:
         """Set cluster permissions.
         
-        Sets permissions on a cluster. Clusters can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param cluster_id: str
           The cluster for which to get or manage permissions.
@@ -7896,20 +9720,19 @@ def wait_command_status_command_execution_cancelled(
             attempt += 1
         raise TimeoutError(f'timed out after {timeout}: {status_message}')
 
-    def wait_command_status_command_execution_finished_or_error(
+    def wait_context_status_command_execution_running(
             self,
             cluster_id: str,
-            command_id: str,
             context_id: str,
             timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[CommandStatusResponse], None]] = None) -> CommandStatusResponse:
+            callback: Optional[Callable[[ContextStatusResponse], None]] = None) -> ContextStatusResponse:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (CommandStatus.FINISHED, CommandStatus.ERROR, )
-        failure_states = (CommandStatus.CANCELLED, CommandStatus.CANCELLING, )
+        target_states = (ContextStatus.RUNNING, )
+        failure_states = (ContextStatus.ERROR, )
         status_message = 'polling...'
         attempt = 1
         while time.time() < deadline:
-            poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id)
+            poll = self.context_status(cluster_id=cluster_id, context_id=context_id)
             status = poll.status
             status_message = f'current status: {status}'
             if status in target_states:
@@ -7917,9 +9740,9 @@ def wait_command_status_command_execution_finished_or_error(
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach Finished or Error, got {status}: {status_message}'
+                msg = f'failed to reach Running, got {status}: {status_message}'
                 raise OperationFailed(msg)
-            prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}"
+            prefix = f"cluster_id={cluster_id}, context_id={context_id}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
@@ -7929,19 +9752,20 @@ def wait_command_status_command_execution_finished_or_error(
             attempt += 1
         raise TimeoutError(f'timed out after {timeout}: {status_message}')
 
-    def wait_context_status_command_execution_running(
+    def wait_command_status_command_execution_finished_or_error(
             self,
             cluster_id: str,
+            command_id: str,
             context_id: str,
             timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[ContextStatusResponse], None]] = None) -> ContextStatusResponse:
+            callback: Optional[Callable[[CommandStatusResponse], None]] = None) -> CommandStatusResponse:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (ContextStatus.RUNNING, )
-        failure_states = (ContextStatus.ERROR, )
+        target_states = (CommandStatus.FINISHED, CommandStatus.ERROR, )
+        failure_states = (CommandStatus.CANCELLED, CommandStatus.CANCELLING, )
         status_message = 'polling...'
         attempt = 1
         while time.time() < deadline:
-            poll = self.context_status(cluster_id=cluster_id, context_id=context_id)
+            poll = self.command_status(cluster_id=cluster_id, command_id=command_id, context_id=context_id)
             status = poll.status
             status_message = f'current status: {status}'
             if status in target_states:
@@ -7949,9 +9773,9 @@ def wait_context_status_command_execution_running(
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach Running, got {status}: {status_message}'
+                msg = f'failed to reach Finished or Error, got {status}: {status_message}'
                 raise OperationFailed(msg)
-            prefix = f"cluster_id={cluster_id}, context_id={context_id}"
+            prefix = f"cluster_id={cluster_id}, command_id={command_id}, context_id={context_id}"
             sleep = attempt
             if sleep > 10:
                 # sleep 10s max per attempt
@@ -8546,7 +10370,8 @@ def set_permissions(
     ) -> InstancePoolPermissions:
         """Set instance pool permissions.
         
-        Sets permissions on an instance pool. Instance pools can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param instance_pool_id: str
           The instance pool for which to get or manage permissions.
diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py
index 4a4c640e6..c81159cca 100755
--- a/databricks/sdk/service/dashboards.py
+++ b/databricks/sdk/service/dashboards.py
@@ -21,100 +21,63 @@
 
 
 @dataclass
-class CreateDashboardRequest:
-    display_name: str
-    """The display name of the dashboard."""
-
-    parent_path: Optional[str] = None
-    """The workspace path of the folder containing the dashboard. Includes leading slash and no
-    trailing slash. This field is excluded in List Dashboards responses."""
-
-    serialized_dashboard: Optional[str] = None
-    """The contents of the dashboard in serialized string form. This field is excluded in List
-    Dashboards responses. Use the [get dashboard API] to retrieve an example response, which
-    includes the `serialized_dashboard` field. This field provides the structure of the JSON string
-    that represents the dashboard's layout and components.
-    
-    [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get"""
-
-    warehouse_id: Optional[str] = None
-    """The warehouse ID used to run the dashboard."""
+class CancelQueryExecutionResponse:
+    status: Optional[List[CancelQueryExecutionResponseStatus]] = None
 
     def as_dict(self) -> dict:
-        """Serializes the CreateDashboardRequest into a dictionary suitable for use as a JSON request body."""
+        """Serializes the CancelQueryExecutionResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.parent_path is not None: body['parent_path'] = self.parent_path
-        if self.serialized_dashboard is not None: body['serialized_dashboard'] = self.serialized_dashboard
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.status: body['status'] = [v.as_dict() for v in self.status]
         return body
 
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateDashboardRequest:
-        """Deserializes the CreateDashboardRequest from a dictionary."""
-        return cls(display_name=d.get('display_name', None),
-                   parent_path=d.get('parent_path', None),
-                   serialized_dashboard=d.get('serialized_dashboard', None),
-                   warehouse_id=d.get('warehouse_id', None))
-
-
-@dataclass
-class CreateScheduleRequest:
-    cron_schedule: CronSchedule
-    """The cron expression describing the frequency of the periodic refresh for this schedule."""
-
-    dashboard_id: Optional[str] = None
-    """UUID identifying the dashboard to which the schedule belongs."""
-
-    display_name: Optional[str] = None
-    """The display name for schedule."""
-
-    pause_status: Optional[SchedulePauseStatus] = None
-    """The status indicates whether this schedule is paused or not."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CreateScheduleRequest into a dictionary suitable for use as a JSON request body."""
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelQueryExecutionResponse into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.cron_schedule: body['cron_schedule'] = self.cron_schedule.as_dict()
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.pause_status is not None: body['pause_status'] = self.pause_status.value
+        if self.status: body['status'] = self.status
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateScheduleRequest:
-        """Deserializes the CreateScheduleRequest from a dictionary."""
-        return cls(cron_schedule=_from_dict(d, 'cron_schedule', CronSchedule),
-                   dashboard_id=d.get('dashboard_id', None),
-                   display_name=d.get('display_name', None),
-                   pause_status=_enum(d, 'pause_status', SchedulePauseStatus))
+    def from_dict(cls, d: Dict[str, any]) -> CancelQueryExecutionResponse:
+        """Deserializes the CancelQueryExecutionResponse from a dictionary."""
+        return cls(status=_repeated_dict(d, 'status', CancelQueryExecutionResponseStatus))
 
 
 @dataclass
-class CreateSubscriptionRequest:
-    subscriber: Subscriber
-    """Subscriber details for users and destinations to be added as subscribers to the schedule."""
+class CancelQueryExecutionResponseStatus:
+    data_token: str
+    """The token to poll for result asynchronously Example:
+    EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ"""
 
-    dashboard_id: Optional[str] = None
-    """UUID identifying the dashboard to which the subscription belongs."""
+    pending: Optional[Empty] = None
+    """Represents an empty message, similar to google.protobuf.Empty, which is not available in the
+    firm right now."""
 
-    schedule_id: Optional[str] = None
-    """UUID identifying the schedule to which the subscription belongs."""
+    success: Optional[Empty] = None
+    """Represents an empty message, similar to google.protobuf.Empty, which is not available in the
+    firm right now."""
 
     def as_dict(self) -> dict:
-        """Serializes the CreateSubscriptionRequest into a dictionary suitable for use as a JSON request body."""
+        """Serializes the CancelQueryExecutionResponseStatus into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.schedule_id is not None: body['schedule_id'] = self.schedule_id
-        if self.subscriber: body['subscriber'] = self.subscriber.as_dict()
+        if self.data_token is not None: body['data_token'] = self.data_token
+        if self.pending: body['pending'] = self.pending.as_dict()
+        if self.success: body['success'] = self.success.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelQueryExecutionResponseStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data_token is not None: body['data_token'] = self.data_token
+        if self.pending: body['pending'] = self.pending
+        if self.success: body['success'] = self.success
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateSubscriptionRequest:
-        """Deserializes the CreateSubscriptionRequest from a dictionary."""
-        return cls(dashboard_id=d.get('dashboard_id', None),
-                   schedule_id=d.get('schedule_id', None),
-                   subscriber=_from_dict(d, 'subscriber', Subscriber))
+    def from_dict(cls, d: Dict[str, any]) -> CancelQueryExecutionResponseStatus:
+        """Deserializes the CancelQueryExecutionResponseStatus from a dictionary."""
+        return cls(data_token=d.get('data_token', None),
+                   pending=_from_dict(d, 'pending', Empty),
+                   success=_from_dict(d, 'success', Empty))
 
 
 @dataclass
@@ -139,6 +102,14 @@ def as_dict(self) -> dict:
         if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CronSchedule into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.quartz_cron_expression is not None:
+            body['quartz_cron_expression'] = self.quartz_cron_expression
+        if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CronSchedule:
         """Deserializes the CronSchedule from a dictionary."""
@@ -202,6 +173,21 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Dashboard into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.etag is not None: body['etag'] = self.etag
+        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state
+        if self.parent_path is not None: body['parent_path'] = self.parent_path
+        if self.path is not None: body['path'] = self.path
+        if self.serialized_dashboard is not None: body['serialized_dashboard'] = self.serialized_dashboard
+        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Dashboard:
         """Deserializes the Dashboard from a dictionary."""
@@ -222,6 +208,27 @@ class DashboardView(Enum):
     DASHBOARD_VIEW_BASIC = 'DASHBOARD_VIEW_BASIC'
 
 
+class DataType(Enum):
+
+    DATA_TYPE_ARRAY = 'DATA_TYPE_ARRAY'
+    DATA_TYPE_BIG_INT = 'DATA_TYPE_BIG_INT'
+    DATA_TYPE_BINARY = 'DATA_TYPE_BINARY'
+    DATA_TYPE_BOOLEAN = 'DATA_TYPE_BOOLEAN'
+    DATA_TYPE_DATE = 'DATA_TYPE_DATE'
+    DATA_TYPE_DECIMAL = 'DATA_TYPE_DECIMAL'
+    DATA_TYPE_DOUBLE = 'DATA_TYPE_DOUBLE'
+    DATA_TYPE_FLOAT = 'DATA_TYPE_FLOAT'
+    DATA_TYPE_INT = 'DATA_TYPE_INT'
+    DATA_TYPE_INTERVAL = 'DATA_TYPE_INTERVAL'
+    DATA_TYPE_MAP = 'DATA_TYPE_MAP'
+    DATA_TYPE_SMALL_INT = 'DATA_TYPE_SMALL_INT'
+    DATA_TYPE_STRING = 'DATA_TYPE_STRING'
+    DATA_TYPE_STRUCT = 'DATA_TYPE_STRUCT'
+    DATA_TYPE_TIMESTAMP = 'DATA_TYPE_TIMESTAMP'
+    DATA_TYPE_TINY_INT = 'DATA_TYPE_TINY_INT'
+    DATA_TYPE_VOID = 'DATA_TYPE_VOID'
+
+
 @dataclass
 class DeleteScheduleResponse:
 
@@ -230,6 +237,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteScheduleResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteScheduleResponse:
         """Deserializes the DeleteScheduleResponse from a dictionary."""
@@ -244,12 +256,98 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteSubscriptionResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteSubscriptionResponse:
         """Deserializes the DeleteSubscriptionResponse from a dictionary."""
         return cls()
 
 
+@dataclass
+class Empty:
+    """Represents an empty message, similar to google.protobuf.Empty, which is not available in the
+    firm right now."""
+
+    def as_dict(self) -> dict:
+        """Serializes the Empty into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Empty into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> Empty:
+        """Deserializes the Empty from a dictionary."""
+        return cls()
+
+
+@dataclass
+class ExecutePublishedDashboardQueryRequest:
+    """Execute query request for published Dashboards. Since published dashboards have the option of
+    running as the publisher, the datasets, warehouse_id are excluded from the request and instead
+    read from the source (lakeview-config) via the additional parameters (dashboardName and
+    dashboardRevisionId)"""
+
+    dashboard_name: str
+    """Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains
+    the list of datasets, warehouse_id, and embedded_credentials"""
+
+    dashboard_revision_id: str
+
+    override_warehouse_id: Optional[str] = None
+    """A dashboard schedule can override the warehouse used as compute for processing the published
+    dashboard queries"""
+
+    def as_dict(self) -> dict:
+        """Serializes the ExecutePublishedDashboardQueryRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.dashboard_name is not None: body['dashboard_name'] = self.dashboard_name
+        if self.dashboard_revision_id is not None: body['dashboard_revision_id'] = self.dashboard_revision_id
+        if self.override_warehouse_id is not None: body['override_warehouse_id'] = self.override_warehouse_id
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExecutePublishedDashboardQueryRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_name is not None: body['dashboard_name'] = self.dashboard_name
+        if self.dashboard_revision_id is not None: body['dashboard_revision_id'] = self.dashboard_revision_id
+        if self.override_warehouse_id is not None: body['override_warehouse_id'] = self.override_warehouse_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ExecutePublishedDashboardQueryRequest:
+        """Deserializes the ExecutePublishedDashboardQueryRequest from a dictionary."""
+        return cls(dashboard_name=d.get('dashboard_name', None),
+                   dashboard_revision_id=d.get('dashboard_revision_id', None),
+                   override_warehouse_id=d.get('override_warehouse_id', None))
+
+
+@dataclass
+class ExecuteQueryResponse:
+
+    def as_dict(self) -> dict:
+        """Serializes the ExecuteQueryResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExecuteQueryResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ExecuteQueryResponse:
+        """Deserializes the ExecuteQueryResponse from a dictionary."""
+        return cls()
+
+
 @dataclass
 class GenieAttachment:
     """Genie AI Response"""
@@ -265,6 +363,13 @@ def as_dict(self) -> dict:
         if self.text: body['text'] = self.text.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenieAttachment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.query: body['query'] = self.query
+        if self.text: body['text'] = self.text
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieAttachment:
         """Deserializes the GenieAttachment from a dictionary."""
@@ -303,6 +408,18 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenieConversation into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp
+        if self.id is not None: body['id'] = self.id
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.space_id is not None: body['space_id'] = self.space_id
+        if self.title is not None: body['title'] = self.title
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieConversation:
         """Deserializes the GenieConversation from a dictionary."""
@@ -333,6 +450,14 @@ def as_dict(self) -> dict:
         if self.space_id is not None: body['space_id'] = self.space_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenieCreateConversationMessageRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.content is not None: body['content'] = self.content
+        if self.conversation_id is not None: body['conversation_id'] = self.conversation_id
+        if self.space_id is not None: body['space_id'] = self.space_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieCreateConversationMessageRequest:
         """Deserializes the GenieCreateConversationMessageRequest from a dictionary."""
@@ -353,6 +478,12 @@ def as_dict(self) -> dict:
         if self.statement_response: body['statement_response'] = self.statement_response.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenieGetMessageQueryResultResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.statement_response: body['statement_response'] = self.statement_response
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieGetMessageQueryResultResponse:
         """Deserializes the GenieGetMessageQueryResultResponse from a dictionary."""
@@ -391,8 +522,9 @@ class GenieMessage:
     status: Optional[MessageStatus] = None
     """MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data
     sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. *
-    `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`:
-    Executing AI provided SQL query. Get the SQL query result by calling
+    `ASKING_AI`: Waiting for the LLM to respond to the users question. * `PENDING_WAREHOUSE`:
+    Waiting for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing
+    AI provided SQL query. Get the SQL query result by calling
     [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message
     status will stay in the `EXECUTING_QUERY` until a client calls
     [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a
@@ -422,6 +554,23 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenieMessage into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.attachments: body['attachments'] = self.attachments
+        if self.content is not None: body['content'] = self.content
+        if self.conversation_id is not None: body['conversation_id'] = self.conversation_id
+        if self.created_timestamp is not None: body['created_timestamp'] = self.created_timestamp
+        if self.error: body['error'] = self.error
+        if self.id is not None: body['id'] = self.id
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.query_result: body['query_result'] = self.query_result
+        if self.space_id is not None: body['space_id'] = self.space_id
+        if self.status is not None: body['status'] = self.status
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieMessage:
         """Deserializes the GenieMessage from a dictionary."""
@@ -453,6 +602,13 @@ def as_dict(self) -> dict:
         if self.space_id is not None: body['space_id'] = self.space_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenieStartConversationMessageRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.content is not None: body['content'] = self.content
+        if self.space_id is not None: body['space_id'] = self.space_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieStartConversationMessageRequest:
         """Deserializes the GenieStartConversationMessageRequest from a dictionary."""
@@ -480,6 +636,15 @@ def as_dict(self) -> dict:
         if self.message_id is not None: body['message_id'] = self.message_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenieStartConversationResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.conversation: body['conversation'] = self.conversation
+        if self.conversation_id is not None: body['conversation_id'] = self.conversation_id
+        if self.message: body['message'] = self.message
+        if self.message_id is not None: body['message_id'] = self.message_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenieStartConversationResponse:
         """Deserializes the GenieStartConversationResponse from a dictionary."""
@@ -489,6 +654,25 @@ def from_dict(cls, d: Dict[str, any]) -> GenieStartConversationResponse:
                    message_id=d.get('message_id', None))
 
 
+@dataclass
+class GetPublishedDashboardEmbeddedResponse:
+
+    def as_dict(self) -> dict:
+        """Serializes the GetPublishedDashboardEmbeddedResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPublishedDashboardEmbeddedResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> GetPublishedDashboardEmbeddedResponse:
+        """Deserializes the GetPublishedDashboardEmbeddedResponse from a dictionary."""
+        return cls()
+
+
 class LifecycleState(Enum):
 
     ACTIVE = 'ACTIVE'
@@ -510,6 +694,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListDashboardsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboards: body['dashboards'] = self.dashboards
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListDashboardsResponse:
         """Deserializes the ListDashboardsResponse from a dictionary."""
@@ -532,6 +723,13 @@ def as_dict(self) -> dict:
         if self.schedules: body['schedules'] = [v.as_dict() for v in self.schedules]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListSchedulesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.schedules: body['schedules'] = self.schedules
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSchedulesResponse:
         """Deserializes the ListSchedulesResponse from a dictionary."""
@@ -554,6 +752,13 @@ def as_dict(self) -> dict:
         if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListSubscriptionsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.subscriptions: body['subscriptions'] = self.subscriptions
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSubscriptionsResponse:
         """Deserializes the ListSubscriptionsResponse from a dictionary."""
@@ -574,6 +779,13 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MessageError into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.error is not None: body['error'] = self.error
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MessageError:
         """Deserializes the MessageError from a dictionary."""
@@ -607,6 +819,7 @@ class MessageErrorType(Enum):
     LOCAL_CONTEXT_EXCEEDED_EXCEPTION = 'LOCAL_CONTEXT_EXCEEDED_EXCEPTION'
     MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION'
     MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION'
+    NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE = 'NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE'
     NO_QUERY_TO_VISUALIZE_EXCEPTION = 'NO_QUERY_TO_VISUALIZE_EXCEPTION'
     NO_TABLES_TO_QUERY_EXCEPTION = 'NO_TABLES_TO_QUERY_EXCEPTION'
     RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION = 'RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION'
@@ -614,6 +827,7 @@ class MessageErrorType(Enum):
     REPLY_PROCESS_TIMEOUT_EXCEPTION = 'REPLY_PROCESS_TIMEOUT_EXCEPTION'
     RETRYABLE_PROCESSING_EXCEPTION = 'RETRYABLE_PROCESSING_EXCEPTION'
     SQL_EXECUTION_EXCEPTION = 'SQL_EXECUTION_EXCEPTION'
+    STOP_PROCESS_DUE_TO_AUTO_REGENERATE = 'STOP_PROCESS_DUE_TO_AUTO_REGENERATE'
     TABLES_MISSING_EXCEPTION = 'TABLES_MISSING_EXCEPTION'
     TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION = 'TOO_MANY_CERTIFIED_ANSWERS_EXCEPTION'
     TOO_MANY_TABLES_EXCEPTION = 'TOO_MANY_TABLES_EXCEPTION'
@@ -626,8 +840,9 @@ class MessageErrorType(Enum):
 class MessageStatus(Enum):
     """MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data
     sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. *
-    `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`:
-    Executing AI provided SQL query. Get the SQL query result by calling
+    `ASKING_AI`: Waiting for the LLM to respond to the users question. * `PENDING_WAREHOUSE`:
+    Waiting for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing
+    AI provided SQL query. Get the SQL query result by calling
     [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message
     status will stay in the `EXECUTING_QUERY` until a client calls
     [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a
@@ -644,6 +859,7 @@ class MessageStatus(Enum):
     FAILED = 'FAILED'
     FETCHING_METADATA = 'FETCHING_METADATA'
     FILTERING_CONTEXT = 'FILTERING_CONTEXT'
+    PENDING_WAREHOUSE = 'PENDING_WAREHOUSE'
     QUERY_RESULT_EXPIRED = 'QUERY_RESULT_EXPIRED'
     SUBMITTED = 'SUBMITTED'
 
@@ -659,12 +875,28 @@ class MigrateDashboardRequest:
     parent_path: Optional[str] = None
     """The workspace path of the folder to contain the migrated Lakeview dashboard."""
 
+    update_parameter_syntax: Optional[bool] = None
+    """Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named
+    syntax (:param) when converting datasets in the dashboard."""
+
     def as_dict(self) -> dict:
         """Serializes the MigrateDashboardRequest into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.display_name is not None: body['display_name'] = self.display_name
         if self.parent_path is not None: body['parent_path'] = self.parent_path
         if self.source_dashboard_id is not None: body['source_dashboard_id'] = self.source_dashboard_id
+        if self.update_parameter_syntax is not None:
+            body['update_parameter_syntax'] = self.update_parameter_syntax
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MigrateDashboardRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.parent_path is not None: body['parent_path'] = self.parent_path
+        if self.source_dashboard_id is not None: body['source_dashboard_id'] = self.source_dashboard_id
+        if self.update_parameter_syntax is not None:
+            body['update_parameter_syntax'] = self.update_parameter_syntax
         return body
 
     @classmethod
@@ -672,7 +904,76 @@ def from_dict(cls, d: Dict[str, any]) -> MigrateDashboardRequest:
         """Deserializes the MigrateDashboardRequest from a dictionary."""
         return cls(display_name=d.get('display_name', None),
                    parent_path=d.get('parent_path', None),
-                   source_dashboard_id=d.get('source_dashboard_id', None))
+                   source_dashboard_id=d.get('source_dashboard_id', None),
+                   update_parameter_syntax=d.get('update_parameter_syntax', None))
+
+
+@dataclass
+class PendingStatus:
+    data_token: str
+    """The token to poll for result asynchronously Example:
+    EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ"""
+
+    def as_dict(self) -> dict:
+        """Serializes the PendingStatus into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.data_token is not None: body['data_token'] = self.data_token
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PendingStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data_token is not None: body['data_token'] = self.data_token
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> PendingStatus:
+        """Deserializes the PendingStatus from a dictionary."""
+        return cls(data_token=d.get('data_token', None))
+
+
+@dataclass
+class PollQueryStatusResponse:
+    data: Optional[List[PollQueryStatusResponseData]] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the PollQueryStatusResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.data: body['data'] = [v.as_dict() for v in self.data]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PollQueryStatusResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data: body['data'] = self.data
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> PollQueryStatusResponse:
+        """Deserializes the PollQueryStatusResponse from a dictionary."""
+        return cls(data=_repeated_dict(d, 'data', PollQueryStatusResponseData))
+
+
+@dataclass
+class PollQueryStatusResponseData:
+    status: QueryResponseStatus
+
+    def as_dict(self) -> dict:
+        """Serializes the PollQueryStatusResponseData into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.status: body['status'] = self.status.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PollQueryStatusResponseData into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.status: body['status'] = self.status
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> PollQueryStatusResponseData:
+        """Deserializes the PollQueryStatusResponseData from a dictionary."""
+        return cls(status=_from_dict(d, 'status', QueryResponseStatus))
 
 
 @dataclass
@@ -695,6 +996,14 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PublishRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.embed_credentials is not None: body['embed_credentials'] = self.embed_credentials
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PublishRequest:
         """Deserializes the PublishRequest from a dictionary."""
@@ -726,17 +1035,28 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> PublishedDashboard:
-        """Deserializes the PublishedDashboard from a dictionary."""
-        return cls(display_name=d.get('display_name', None),
-                   embed_credentials=d.get('embed_credentials', None),
-                   revision_create_time=d.get('revision_create_time', None),
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PublishedDashboard into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.embed_credentials is not None: body['embed_credentials'] = self.embed_credentials
+        if self.revision_create_time is not None: body['revision_create_time'] = self.revision_create_time
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> PublishedDashboard:
+        """Deserializes the PublishedDashboard from a dictionary."""
+        return cls(display_name=d.get('display_name', None),
+                   embed_credentials=d.get('embed_credentials', None),
+                   revision_create_time=d.get('revision_create_time', None),
                    warehouse_id=d.get('warehouse_id', None))
 
 
 @dataclass
 class QueryAttachment:
+    cached_query_schema: Optional[QuerySchema] = None
+
     description: Optional[str] = None
     """Description of the query"""
 
@@ -755,12 +1075,15 @@ class QueryAttachment:
     query: Optional[str] = None
     """AI generated SQL query"""
 
+    statement_id: Optional[str] = None
+
     title: Optional[str] = None
     """Name of the query"""
 
     def as_dict(self) -> dict:
         """Serializes the QueryAttachment into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.cached_query_schema: body['cached_query_schema'] = self.cached_query_schema.as_dict()
         if self.description is not None: body['description'] = self.description
         if self.id is not None: body['id'] = self.id
         if self.instruction_id is not None: body['instruction_id'] = self.instruction_id
@@ -768,21 +1091,151 @@ def as_dict(self) -> dict:
         if self.last_updated_timestamp is not None:
             body['last_updated_timestamp'] = self.last_updated_timestamp
         if self.query is not None: body['query'] = self.query
+        if self.statement_id is not None: body['statement_id'] = self.statement_id
+        if self.title is not None: body['title'] = self.title
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryAttachment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cached_query_schema: body['cached_query_schema'] = self.cached_query_schema
+        if self.description is not None: body['description'] = self.description
+        if self.id is not None: body['id'] = self.id
+        if self.instruction_id is not None: body['instruction_id'] = self.instruction_id
+        if self.instruction_title is not None: body['instruction_title'] = self.instruction_title
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.query is not None: body['query'] = self.query
+        if self.statement_id is not None: body['statement_id'] = self.statement_id
         if self.title is not None: body['title'] = self.title
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryAttachment:
         """Deserializes the QueryAttachment from a dictionary."""
-        return cls(description=d.get('description', None),
+        return cls(cached_query_schema=_from_dict(d, 'cached_query_schema', QuerySchema),
+                   description=d.get('description', None),
                    id=d.get('id', None),
                    instruction_id=d.get('instruction_id', None),
                    instruction_title=d.get('instruction_title', None),
                    last_updated_timestamp=d.get('last_updated_timestamp', None),
                    query=d.get('query', None),
+                   statement_id=d.get('statement_id', None),
                    title=d.get('title', None))
 
 
+@dataclass
+class QueryResponseStatus:
+    canceled: Optional[Empty] = None
+    """Represents an empty message, similar to google.protobuf.Empty, which is not available in the
+    firm right now."""
+
+    closed: Optional[Empty] = None
+    """Represents an empty message, similar to google.protobuf.Empty, which is not available in the
+    firm right now."""
+
+    pending: Optional[PendingStatus] = None
+
+    statement_id: Optional[str] = None
+    """The statement id in format(01eef5da-c56e-1f36-bafa-21906587d6ba) The statement_id should be
+    identical to data_token in SuccessStatus and PendingStatus. This field is created for audit
+    logging purpose to record the statement_id of all QueryResponseStatus."""
+
+    success: Optional[SuccessStatus] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the QueryResponseStatus into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.canceled: body['canceled'] = self.canceled.as_dict()
+        if self.closed: body['closed'] = self.closed.as_dict()
+        if self.pending: body['pending'] = self.pending.as_dict()
+        if self.statement_id is not None: body['statement_id'] = self.statement_id
+        if self.success: body['success'] = self.success.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryResponseStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.canceled: body['canceled'] = self.canceled
+        if self.closed: body['closed'] = self.closed
+        if self.pending: body['pending'] = self.pending
+        if self.statement_id is not None: body['statement_id'] = self.statement_id
+        if self.success: body['success'] = self.success
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> QueryResponseStatus:
+        """Deserializes the QueryResponseStatus from a dictionary."""
+        return cls(canceled=_from_dict(d, 'canceled', Empty),
+                   closed=_from_dict(d, 'closed', Empty),
+                   pending=_from_dict(d, 'pending', PendingStatus),
+                   statement_id=d.get('statement_id', None),
+                   success=_from_dict(d, 'success', SuccessStatus))
+
+
+@dataclass
+class QuerySchema:
+    columns: Optional[List[QuerySchemaColumn]] = None
+
+    statement_id: Optional[str] = None
+    """Used to determine if the stored query schema is compatible with the latest run. The service
+    should always clear the schema when the query is re-executed."""
+
+    def as_dict(self) -> dict:
+        """Serializes the QuerySchema into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
+        if self.statement_id is not None: body['statement_id'] = self.statement_id
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QuerySchema into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.columns: body['columns'] = self.columns
+        if self.statement_id is not None: body['statement_id'] = self.statement_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> QuerySchema:
+        """Deserializes the QuerySchema from a dictionary."""
+        return cls(columns=_repeated_dict(d, 'columns', QuerySchemaColumn),
+                   statement_id=d.get('statement_id', None))
+
+
+@dataclass
+class QuerySchemaColumn:
+    name: str
+
+    type_text: str
+    """Corresponds to type desc"""
+
+    data_type: DataType
+    """Populated from https://docs.databricks.com/sql/language-manual/sql-ref-datatypes.html"""
+
+    def as_dict(self) -> dict:
+        """Serializes the QuerySchemaColumn into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.data_type is not None: body['data_type'] = self.data_type.value
+        if self.name is not None: body['name'] = self.name
+        if self.type_text is not None: body['type_text'] = self.type_text
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QuerySchemaColumn into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data_type is not None: body['data_type'] = self.data_type
+        if self.name is not None: body['name'] = self.name
+        if self.type_text is not None: body['type_text'] = self.type_text
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> QuerySchemaColumn:
+        """Deserializes the QuerySchemaColumn from a dictionary."""
+        return cls(data_type=_enum(d, 'data_type', DataType),
+                   name=d.get('name', None),
+                   type_text=d.get('type_text', None))
+
+
 @dataclass
 class Result:
     is_truncated: Optional[bool] = None
@@ -803,6 +1256,14 @@ def as_dict(self) -> dict:
         if self.statement_id is not None: body['statement_id'] = self.statement_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Result into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_truncated is not None: body['is_truncated'] = self.is_truncated
+        if self.row_count is not None: body['row_count'] = self.row_count
+        if self.statement_id is not None: body['statement_id'] = self.statement_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Result:
         """Deserializes the Result from a dictionary."""
@@ -839,6 +1300,9 @@ class Schedule:
     update_time: Optional[str] = None
     """A timestamp indicating when the schedule was last updated."""
 
+    warehouse_id: Optional[str] = None
+    """The warehouse id to run the dashboard with for the schedule."""
+
     def as_dict(self) -> dict:
         """Serializes the Schedule into a dictionary suitable for use as a JSON request body."""
         body = {}
@@ -850,6 +1314,21 @@ def as_dict(self) -> dict:
         if self.pause_status is not None: body['pause_status'] = self.pause_status.value
         if self.schedule_id is not None: body['schedule_id'] = self.schedule_id
         if self.update_time is not None: body['update_time'] = self.update_time
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Schedule into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.cron_schedule: body['cron_schedule'] = self.cron_schedule
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.etag is not None: body['etag'] = self.etag
+        if self.pause_status is not None: body['pause_status'] = self.pause_status
+        if self.schedule_id is not None: body['schedule_id'] = self.schedule_id
+        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
     @classmethod
@@ -862,7 +1341,8 @@ def from_dict(cls, d: Dict[str, any]) -> Schedule:
                    etag=d.get('etag', None),
                    pause_status=_enum(d, 'pause_status', SchedulePauseStatus),
                    schedule_id=d.get('schedule_id', None),
-                   update_time=d.get('update_time', None))
+                   update_time=d.get('update_time', None),
+                   warehouse_id=d.get('warehouse_id', None))
 
 
 class SchedulePauseStatus(Enum):
@@ -888,6 +1368,13 @@ def as_dict(self) -> dict:
         if self.user_subscriber: body['user_subscriber'] = self.user_subscriber.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Subscriber into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination_subscriber: body['destination_subscriber'] = self.destination_subscriber
+        if self.user_subscriber: body['user_subscriber'] = self.user_subscriber
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Subscriber:
         """Deserializes the Subscriber from a dictionary."""
@@ -937,6 +1424,19 @@ def as_dict(self) -> dict:
         if self.update_time is not None: body['update_time'] = self.update_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Subscription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.created_by_user_id is not None: body['created_by_user_id'] = self.created_by_user_id
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.etag is not None: body['etag'] = self.etag
+        if self.schedule_id is not None: body['schedule_id'] = self.schedule_id
+        if self.subscriber: body['subscriber'] = self.subscriber
+        if self.subscription_id is not None: body['subscription_id'] = self.subscription_id
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Subscription:
         """Deserializes the Subscription from a dictionary."""
@@ -961,6 +1461,12 @@ def as_dict(self) -> dict:
         if self.destination_id is not None: body['destination_id'] = self.destination_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SubscriptionSubscriberDestination into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination_id is not None: body['destination_id'] = self.destination_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SubscriptionSubscriberDestination:
         """Deserializes the SubscriptionSubscriberDestination from a dictionary."""
@@ -978,12 +1484,47 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SubscriptionSubscriberUser into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SubscriptionSubscriberUser:
         """Deserializes the SubscriptionSubscriberUser from a dictionary."""
         return cls(user_id=d.get('user_id', None))
 
 
+@dataclass
+class SuccessStatus:
+    data_token: str
+    """The token to poll for result asynchronously Example:
+    EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ"""
+
+    truncated: Optional[bool] = None
+    """Whether the query result is truncated (either by byte limit or row limit)"""
+
+    def as_dict(self) -> dict:
+        """Serializes the SuccessStatus into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.data_token is not None: body['data_token'] = self.data_token
+        if self.truncated is not None: body['truncated'] = self.truncated
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SuccessStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data_token is not None: body['data_token'] = self.data_token
+        if self.truncated is not None: body['truncated'] = self.truncated
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> SuccessStatus:
+        """Deserializes the SuccessStatus from a dictionary."""
+        return cls(data_token=d.get('data_token', None), truncated=d.get('truncated', None))
+
+
 @dataclass
 class TextAttachment:
     content: Optional[str] = None
@@ -998,6 +1539,13 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TextAttachment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.content is not None: body['content'] = self.content
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TextAttachment:
         """Deserializes the TextAttachment from a dictionary."""
@@ -1012,6 +1560,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TrashDashboardResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TrashDashboardResponse:
         """Deserializes the TrashDashboardResponse from a dictionary."""
@@ -1026,97 +1579,229 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UnpublishDashboardResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UnpublishDashboardResponse:
         """Deserializes the UnpublishDashboardResponse from a dictionary."""
         return cls()
 
 
-@dataclass
-class UpdateDashboardRequest:
-    dashboard_id: Optional[str] = None
-    """UUID identifying the dashboard."""
+class GenieAPI:
+    """Genie provides a no-code experience for business users, powered by AI/BI. Analysts set up spaces that
+    business users can use to ask questions using natural language. Genie uses data registered to Unity
+    Catalog and requires at least CAN USE permission on a Pro or Serverless SQL warehouse. Also, Databricks
+    Assistant must be enabled."""
 
-    display_name: Optional[str] = None
-    """The display name of the dashboard."""
+    def __init__(self, api_client):
+        self._api = api_client
 
-    etag: Optional[str] = None
-    """The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard
-    has not been modified since the last read. This field is excluded in List Dashboards responses."""
+    def wait_get_message_genie_completed(
+            self,
+            conversation_id: str,
+            message_id: str,
+            space_id: str,
+            timeout=timedelta(minutes=20),
+            callback: Optional[Callable[[GenieMessage], None]] = None) -> GenieMessage:
+        deadline = time.time() + timeout.total_seconds()
+        target_states = (MessageStatus.COMPLETED, )
+        failure_states = (MessageStatus.FAILED, )
+        status_message = 'polling...'
+        attempt = 1
+        while time.time() < deadline:
+            poll = self.get_message(conversation_id=conversation_id, message_id=message_id, space_id=space_id)
+            status = poll.status
+            status_message = f'current status: {status}'
+            if status in target_states:
+                return poll
+            if callback:
+                callback(poll)
+            if status in failure_states:
+                msg = f'failed to reach COMPLETED, got {status}: {status_message}'
+                raise OperationFailed(msg)
+            prefix = f"conversation_id={conversation_id}, message_id={message_id}, space_id={space_id}"
+            sleep = attempt
+            if sleep > 10:
+                # sleep 10s max per attempt
+                sleep = 10
+            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            time.sleep(sleep + random.random())
+            attempt += 1
+        raise TimeoutError(f'timed out after {timeout}: {status_message}')
 
-    serialized_dashboard: Optional[str] = None
-    """The contents of the dashboard in serialized string form. This field is excluded in List
-    Dashboards responses. Use the [get dashboard API] to retrieve an example response, which
-    includes the `serialized_dashboard` field. This field provides the structure of the JSON string
-    that represents the dashboard's layout and components.
-    
-    [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get"""
+    def create_message(self, space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage]:
+        """Create conversation message.
+        
+        Create new message in [conversation](:method:genie/startconversation). The AI response uses all
+        previously created messages in the conversation to respond.
+        
+        :param space_id: str
+          The ID associated with the Genie space where the conversation is started.
+        :param conversation_id: str
+          The ID associated with the conversation.
+        :param content: str
+          User message content.
+        
+        :returns:
+          Long-running operation waiter for :class:`GenieMessage`.
+          See :method:wait_get_message_genie_completed for more details.
+        """
+        body = {}
+        if content is not None: body['content'] = content
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-    warehouse_id: Optional[str] = None
-    """The warehouse ID used to run the dashboard."""
+        op_response = self._api.do(
+            'POST',
+            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages',
+            body=body,
+            headers=headers)
+        return Wait(self.wait_get_message_genie_completed,
+                    response=GenieMessage.from_dict(op_response),
+                    conversation_id=conversation_id,
+                    message_id=op_response['id'],
+                    space_id=space_id)
 
-    def as_dict(self) -> dict:
-        """Serializes the UpdateDashboardRequest into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.etag is not None: body['etag'] = self.etag
-        if self.serialized_dashboard is not None: body['serialized_dashboard'] = self.serialized_dashboard
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
-        return body
+    def create_message_and_wait(self,
+                                space_id: str,
+                                conversation_id: str,
+                                content: str,
+                                timeout=timedelta(minutes=20)) -> GenieMessage:
+        return self.create_message(content=content, conversation_id=conversation_id,
+                                   space_id=space_id).result(timeout=timeout)
 
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateDashboardRequest:
-        """Deserializes the UpdateDashboardRequest from a dictionary."""
-        return cls(dashboard_id=d.get('dashboard_id', None),
-                   display_name=d.get('display_name', None),
-                   etag=d.get('etag', None),
-                   serialized_dashboard=d.get('serialized_dashboard', None),
-                   warehouse_id=d.get('warehouse_id', None))
+    def execute_message_query(self, space_id: str, conversation_id: str,
+                              message_id: str) -> GenieGetMessageQueryResultResponse:
+        """Execute SQL query in a conversation message.
+        
+        Execute the SQL query in the message.
+        
+        :param space_id: str
+          Genie space ID
+        :param conversation_id: str
+          Conversation ID
+        :param message_id: str
+          Message ID
+        
+        :returns: :class:`GenieGetMessageQueryResultResponse`
+        """
 
+        headers = {'Accept': 'application/json', }
 
-@dataclass
-class UpdateScheduleRequest:
-    cron_schedule: CronSchedule
-    """The cron expression describing the frequency of the periodic refresh for this schedule."""
+        res = self._api.do(
+            'POST',
+            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/execute-query',
+            headers=headers)
+        return GenieGetMessageQueryResultResponse.from_dict(res)
 
-    dashboard_id: Optional[str] = None
-    """UUID identifying the dashboard to which the schedule belongs."""
+    def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage:
+        """Get conversation message.
+        
+        Get message from conversation.
+        
+        :param space_id: str
+          The ID associated with the Genie space where the target conversation is located.
+        :param conversation_id: str
+          The ID associated with the target conversation.
+        :param message_id: str
+          The ID associated with the target message from the identified conversation.
+        
+        :returns: :class:`GenieMessage`
+        """
 
-    display_name: Optional[str] = None
-    """The display name for schedule."""
+        headers = {'Accept': 'application/json', }
 
-    etag: Optional[str] = None
-    """The etag for the schedule. Must be left empty on create, must be provided on updates to ensure
-    that the schedule has not been modified since the last read, and can be optionally provided on
-    delete."""
+        res = self._api.do(
+            'GET',
+            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}',
+            headers=headers)
+        return GenieMessage.from_dict(res)
 
-    pause_status: Optional[SchedulePauseStatus] = None
-    """The status indicates whether this schedule is paused or not."""
+    def get_message_query_result(self, space_id: str, conversation_id: str,
+                                 message_id: str) -> GenieGetMessageQueryResultResponse:
+        """Get conversation message SQL query result.
+        
+        Get the result of SQL query if the message has a query attachment. This is only available if a message
+        has a query attachment and the message status is `EXECUTING_QUERY`.
+        
+        :param space_id: str
+          Genie space ID
+        :param conversation_id: str
+          Conversation ID
+        :param message_id: str
+          Message ID
+        
+        :returns: :class:`GenieGetMessageQueryResultResponse`
+        """
 
-    schedule_id: Optional[str] = None
-    """UUID identifying the schedule."""
+        headers = {'Accept': 'application/json', }
 
-    def as_dict(self) -> dict:
-        """Serializes the UpdateScheduleRequest into a dictionary suitable for use as a JSON request body."""
+        res = self._api.do(
+            'GET',
+            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result',
+            headers=headers)
+        return GenieGetMessageQueryResultResponse.from_dict(res)
+
+    def get_message_query_result_by_attachment(self, space_id: str, conversation_id: str, message_id: str,
+                                               attachment_id: str) -> GenieGetMessageQueryResultResponse:
+        """Get conversation message SQL query result by attachment id.
+        
+        Get the result of SQL query by attachment id This is only available if a message has a query
+        attachment and the message status is `EXECUTING_QUERY`.
+        
+        :param space_id: str
+          Genie space ID
+        :param conversation_id: str
+          Conversation ID
+        :param message_id: str
+          Message ID
+        :param attachment_id: str
+          Attachment ID
+        
+        :returns: :class:`GenieGetMessageQueryResultResponse`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'GET',
+            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result/{attachment_id}',
+            headers=headers)
+        return GenieGetMessageQueryResultResponse.from_dict(res)
+
+    def start_conversation(self, space_id: str, content: str) -> Wait[GenieMessage]:
+        """Start conversation.
+        
+        Start a new conversation.
+        
+        :param space_id: str
+          The ID associated with the Genie space where you want to start a conversation.
+        :param content: str
+          The text of the message that starts the conversation.
+        
+        :returns:
+          Long-running operation waiter for :class:`GenieMessage`.
+          See :method:wait_get_message_genie_completed for more details.
+        """
         body = {}
-        if self.cron_schedule: body['cron_schedule'] = self.cron_schedule.as_dict()
-        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
-        if self.display_name is not None: body['display_name'] = self.display_name
-        if self.etag is not None: body['etag'] = self.etag
-        if self.pause_status is not None: body['pause_status'] = self.pause_status.value
-        if self.schedule_id is not None: body['schedule_id'] = self.schedule_id
-        return body
+        if content is not None: body['content'] = content
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateScheduleRequest:
-        """Deserializes the UpdateScheduleRequest from a dictionary."""
-        return cls(cron_schedule=_from_dict(d, 'cron_schedule', CronSchedule),
-                   dashboard_id=d.get('dashboard_id', None),
-                   display_name=d.get('display_name', None),
-                   etag=d.get('etag', None),
-                   pause_status=_enum(d, 'pause_status', SchedulePauseStatus),
-                   schedule_id=d.get('schedule_id', None))
+        op_response = self._api.do('POST',
+                                   f'/api/2.0/genie/spaces/{space_id}/start-conversation',
+                                   body=body,
+                                   headers=headers)
+        return Wait(self.wait_get_message_genie_completed,
+                    response=GenieStartConversationResponse.from_dict(op_response),
+                    conversation_id=op_response['conversation_id'],
+                    message_id=op_response['message_id'],
+                    space_id=space_id)
+
+    def start_conversation_and_wait(self, space_id: str, content: str,
+                                    timeout=timedelta(minutes=20)) -> GenieMessage:
+        return self.start_conversation(content=content, space_id=space_id).result(timeout=timeout)
 
 
 class GenieAPI:
@@ -1313,66 +1998,31 @@ class LakeviewAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def create(self,
-               display_name: str,
-               *,
-               parent_path: Optional[str] = None,
-               serialized_dashboard: Optional[str] = None,
-               warehouse_id: Optional[str] = None) -> Dashboard:
+    def create(self, *, dashboard: Optional[Dashboard] = None) -> Dashboard:
         """Create dashboard.
         
         Create a draft dashboard.
         
-        :param display_name: str
-          The display name of the dashboard.
-        :param parent_path: str (optional)
-          The workspace path of the folder containing the dashboard. Includes leading slash and no trailing
-          slash. This field is excluded in List Dashboards responses.
-        :param serialized_dashboard: str (optional)
-          The contents of the dashboard in serialized string form. This field is excluded in List Dashboards
-          responses. Use the [get dashboard API] to retrieve an example response, which includes the
-          `serialized_dashboard` field. This field provides the structure of the JSON string that represents
-          the dashboard's layout and components.
-          
-          [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get
-        :param warehouse_id: str (optional)
-          The warehouse ID used to run the dashboard.
+        :param dashboard: :class:`Dashboard` (optional)
         
         :returns: :class:`Dashboard`
         """
-        body = {}
-        if display_name is not None: body['display_name'] = display_name
-        if parent_path is not None: body['parent_path'] = parent_path
-        if serialized_dashboard is not None: body['serialized_dashboard'] = serialized_dashboard
-        if warehouse_id is not None: body['warehouse_id'] = warehouse_id
+        body = dashboard.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST', '/api/2.0/lakeview/dashboards', body=body, headers=headers)
         return Dashboard.from_dict(res)
 
-    def create_schedule(self,
-                        dashboard_id: str,
-                        cron_schedule: CronSchedule,
-                        *,
-                        display_name: Optional[str] = None,
-                        pause_status: Optional[SchedulePauseStatus] = None) -> Schedule:
+    def create_schedule(self, dashboard_id: str, *, schedule: Optional[Schedule] = None) -> Schedule:
         """Create dashboard schedule.
         
         :param dashboard_id: str
           UUID identifying the dashboard to which the schedule belongs.
-        :param cron_schedule: :class:`CronSchedule`
-          The cron expression describing the frequency of the periodic refresh for this schedule.
-        :param display_name: str (optional)
-          The display name for schedule.
-        :param pause_status: :class:`SchedulePauseStatus` (optional)
-          The status indicates whether this schedule is paused or not.
+        :param schedule: :class:`Schedule` (optional)
         
         :returns: :class:`Schedule`
         """
-        body = {}
-        if cron_schedule is not None: body['cron_schedule'] = cron_schedule.as_dict()
-        if display_name is not None: body['display_name'] = display_name
-        if pause_status is not None: body['pause_status'] = pause_status.value
+        body = schedule.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST',
@@ -1381,21 +2031,22 @@ def create_schedule(self,
                            headers=headers)
         return Schedule.from_dict(res)
 
-    def create_subscription(self, dashboard_id: str, schedule_id: str,
-                            subscriber: Subscriber) -> Subscription:
+    def create_subscription(self,
+                            dashboard_id: str,
+                            schedule_id: str,
+                            *,
+                            subscription: Optional[Subscription] = None) -> Subscription:
         """Create schedule subscription.
         
         :param dashboard_id: str
           UUID identifying the dashboard to which the subscription belongs.
         :param schedule_id: str
           UUID identifying the schedule to which the subscription belongs.
-        :param subscriber: :class:`Subscriber`
-          Subscriber details for users and destinations to be added as subscribers to the schedule.
+        :param subscription: :class:`Subscription` (optional)
         
         :returns: :class:`Subscription`
         """
-        body = {}
-        if subscriber is not None: body['subscriber'] = subscriber.as_dict()
+        body = subscription.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do(
@@ -1481,7 +2132,7 @@ def get_published(self, dashboard_id: str) -> PublishedDashboard:
         Get the current published dashboard.
         
         :param dashboard_id: str
-          UUID identifying the dashboard to be published.
+          UUID identifying the published dashboard.
         
         :returns: :class:`PublishedDashboard`
         """
@@ -1576,7 +2227,7 @@ def list_schedules(self,
         """List dashboard schedules.
         
         :param dashboard_id: str
-          UUID identifying the dashboard to which the schedule belongs.
+          UUID identifying the dashboard to which the schedules belongs.
         :param page_size: int (optional)
           The number of schedules to return per page.
         :param page_token: str (optional)
@@ -1612,9 +2263,9 @@ def list_subscriptions(self,
         """List schedule subscriptions.
         
         :param dashboard_id: str
-          UUID identifying the dashboard to which the subscription belongs.
+          UUID identifying the dashboard which the subscriptions belongs.
         :param schedule_id: str
-          UUID identifying the schedule to which the subscription belongs.
+          UUID identifying the schedule which the subscriptions belongs.
         :param page_size: int (optional)
           The number of subscriptions to return per page.
         :param page_token: str (optional)
@@ -1646,7 +2297,8 @@ def migrate(self,
                 source_dashboard_id: str,
                 *,
                 display_name: Optional[str] = None,
-                parent_path: Optional[str] = None) -> Dashboard:
+                parent_path: Optional[str] = None,
+                update_parameter_syntax: Optional[bool] = None) -> Dashboard:
         """Migrate dashboard.
         
         Migrates a classic SQL dashboard to Lakeview.
@@ -1657,6 +2309,9 @@ def migrate(self,
           Display name for the new Lakeview dashboard.
         :param parent_path: str (optional)
           The workspace path of the folder to contain the migrated Lakeview dashboard.
+        :param update_parameter_syntax: bool (optional)
+          Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named syntax
+          (:param) when converting datasets in the dashboard.
         
         :returns: :class:`Dashboard`
         """
@@ -1664,6 +2319,7 @@ def migrate(self,
         if display_name is not None: body['display_name'] = display_name
         if parent_path is not None: body['parent_path'] = parent_path
         if source_dashboard_id is not None: body['source_dashboard_id'] = source_dashboard_id
+        if update_parameter_syntax is not None: body['update_parameter_syntax'] = update_parameter_syntax
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST', '/api/2.0/lakeview/dashboards/migrate', body=body, headers=headers)
@@ -1720,7 +2376,7 @@ def unpublish(self, dashboard_id: str):
         Unpublish the dashboard.
         
         :param dashboard_id: str
-          UUID identifying the dashboard to be published.
+          UUID identifying the published dashboard.
         
         
         """
@@ -1729,41 +2385,18 @@ def unpublish(self, dashboard_id: str):
 
         self._api.do('DELETE', f'/api/2.0/lakeview/dashboards/{dashboard_id}/published', headers=headers)
 
-    def update(self,
-               dashboard_id: str,
-               *,
-               display_name: Optional[str] = None,
-               etag: Optional[str] = None,
-               serialized_dashboard: Optional[str] = None,
-               warehouse_id: Optional[str] = None) -> Dashboard:
+    def update(self, dashboard_id: str, *, dashboard: Optional[Dashboard] = None) -> Dashboard:
         """Update dashboard.
         
         Update a draft dashboard.
         
         :param dashboard_id: str
           UUID identifying the dashboard.
-        :param display_name: str (optional)
-          The display name of the dashboard.
-        :param etag: str (optional)
-          The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard has
-          not been modified since the last read. This field is excluded in List Dashboards responses.
-        :param serialized_dashboard: str (optional)
-          The contents of the dashboard in serialized string form. This field is excluded in List Dashboards
-          responses. Use the [get dashboard API] to retrieve an example response, which includes the
-          `serialized_dashboard` field. This field provides the structure of the JSON string that represents
-          the dashboard's layout and components.
-          
-          [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get
-        :param warehouse_id: str (optional)
-          The warehouse ID used to run the dashboard.
+        :param dashboard: :class:`Dashboard` (optional)
         
         :returns: :class:`Dashboard`
         """
-        body = {}
-        if display_name is not None: body['display_name'] = display_name
-        if etag is not None: body['etag'] = etag
-        if serialized_dashboard is not None: body['serialized_dashboard'] = serialized_dashboard
-        if warehouse_id is not None: body['warehouse_id'] = warehouse_id
+        body = dashboard.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('PATCH',
@@ -1775,34 +2408,19 @@ def update(self,
     def update_schedule(self,
                         dashboard_id: str,
                         schedule_id: str,
-                        cron_schedule: CronSchedule,
                         *,
-                        display_name: Optional[str] = None,
-                        etag: Optional[str] = None,
-                        pause_status: Optional[SchedulePauseStatus] = None) -> Schedule:
+                        schedule: Optional[Schedule] = None) -> Schedule:
         """Update dashboard schedule.
         
         :param dashboard_id: str
           UUID identifying the dashboard to which the schedule belongs.
         :param schedule_id: str
           UUID identifying the schedule.
-        :param cron_schedule: :class:`CronSchedule`
-          The cron expression describing the frequency of the periodic refresh for this schedule.
-        :param display_name: str (optional)
-          The display name for schedule.
-        :param etag: str (optional)
-          The etag for the schedule. Must be left empty on create, must be provided on updates to ensure that
-          the schedule has not been modified since the last read, and can be optionally provided on delete.
-        :param pause_status: :class:`SchedulePauseStatus` (optional)
-          The status indicates whether this schedule is paused or not.
+        :param schedule: :class:`Schedule` (optional)
         
         :returns: :class:`Schedule`
         """
-        body = {}
-        if cron_schedule is not None: body['cron_schedule'] = cron_schedule.as_dict()
-        if display_name is not None: body['display_name'] = display_name
-        if etag is not None: body['etag'] = etag
-        if pause_status is not None: body['pause_status'] = pause_status.value
+        body = schedule.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('PUT',
@@ -1810,3 +2428,107 @@ def update_schedule(self,
                            body=body,
                            headers=headers)
         return Schedule.from_dict(res)
+
+
+class LakeviewEmbeddedAPI:
+    """Token-based Lakeview APIs for embedding dashboards in external applications."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def get_published_dashboard_embedded(self, dashboard_id: str):
+        """Read a published dashboard in an embedded ui.
+        
+        Get the current published dashboard within an embedded context.
+        
+        :param dashboard_id: str
+          UUID identifying the published dashboard.
+        
+        
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        self._api.do('GET',
+                     f'/api/2.0/lakeview/dashboards/{dashboard_id}/published/embedded',
+                     headers=headers)
+
+
+class QueryExecutionAPI:
+    """Query execution APIs for AI / BI Dashboards"""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def cancel_published_query_execution(self,
+                                         dashboard_name: str,
+                                         dashboard_revision_id: str,
+                                         *,
+                                         tokens: Optional[List[str]] = None) -> CancelQueryExecutionResponse:
+        """Cancel the results for the a query for a published, embedded dashboard.
+        
+        :param dashboard_name: str
+        :param dashboard_revision_id: str
+        :param tokens: List[str] (optional)
+          Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+        
+        :returns: :class:`CancelQueryExecutionResponse`
+        """
+
+        query = {}
+        if dashboard_name is not None: query['dashboard_name'] = dashboard_name
+        if dashboard_revision_id is not None: query['dashboard_revision_id'] = dashboard_revision_id
+        if tokens is not None: query['tokens'] = [v for v in tokens]
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('DELETE', '/api/2.0/lakeview-query/query/published', query=query, headers=headers)
+        return CancelQueryExecutionResponse.from_dict(res)
+
+    def execute_published_dashboard_query(self,
+                                          dashboard_name: str,
+                                          dashboard_revision_id: str,
+                                          *,
+                                          override_warehouse_id: Optional[str] = None):
+        """Execute a query for a published dashboard.
+        
+        :param dashboard_name: str
+          Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains the
+          list of datasets, warehouse_id, and embedded_credentials
+        :param dashboard_revision_id: str
+        :param override_warehouse_id: str (optional)
+          A dashboard schedule can override the warehouse used as compute for processing the published
+          dashboard queries
+        
+        
+        """
+        body = {}
+        if dashboard_name is not None: body['dashboard_name'] = dashboard_name
+        if dashboard_revision_id is not None: body['dashboard_revision_id'] = dashboard_revision_id
+        if override_warehouse_id is not None: body['override_warehouse_id'] = override_warehouse_id
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        self._api.do('POST', '/api/2.0/lakeview-query/query/published', body=body, headers=headers)
+
+    def poll_published_query_status(self,
+                                    dashboard_name: str,
+                                    dashboard_revision_id: str,
+                                    *,
+                                    tokens: Optional[List[str]] = None) -> PollQueryStatusResponse:
+        """Poll the results for the a query for a published, embedded dashboard.
+        
+        :param dashboard_name: str
+        :param dashboard_revision_id: str
+        :param tokens: List[str] (optional)
+          Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+        
+        :returns: :class:`PollQueryStatusResponse`
+        """
+
+        query = {}
+        if dashboard_name is not None: query['dashboard_name'] = dashboard_name
+        if dashboard_revision_id is not None: query['dashboard_revision_id'] = dashboard_revision_id
+        if tokens is not None: query['tokens'] = [v for v in tokens]
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET', '/api/2.0/lakeview-query/query/published', query=query, headers=headers)
+        return PollQueryStatusResponse.from_dict(res)
diff --git a/databricks/sdk/service/files.py b/databricks/sdk/service/files.py
index 255e1c1a0..99c252298 100755
--- a/databricks/sdk/service/files.py
+++ b/databricks/sdk/service/files.py
@@ -28,6 +28,13 @@ def as_dict(self) -> dict:
         if self.handle is not None: body['handle'] = self.handle
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AddBlock into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data is not None: body['data'] = self.data
+        if self.handle is not None: body['handle'] = self.handle
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AddBlock:
         """Deserializes the AddBlock from a dictionary."""
@@ -42,6 +49,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AddBlockResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AddBlockResponse:
         """Deserializes the AddBlockResponse from a dictionary."""
@@ -59,6 +71,12 @@ def as_dict(self) -> dict:
         if self.handle is not None: body['handle'] = self.handle
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Close into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.handle is not None: body['handle'] = self.handle
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Close:
         """Deserializes the Close from a dictionary."""
@@ -73,6 +91,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CloseResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CloseResponse:
         """Deserializes the CloseResponse from a dictionary."""
@@ -94,6 +117,13 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Create into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.overwrite is not None: body['overwrite'] = self.overwrite
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Create:
         """Deserializes the Create from a dictionary."""
@@ -108,6 +138,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateDirectoryResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateDirectoryResponse:
         """Deserializes the CreateDirectoryResponse from a dictionary."""
@@ -126,6 +161,12 @@ def as_dict(self) -> dict:
         if self.handle is not None: body['handle'] = self.handle
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.handle is not None: body['handle'] = self.handle
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateResponse:
         """Deserializes the CreateResponse from a dictionary."""
@@ -148,6 +189,13 @@ def as_dict(self) -> dict:
         if self.recursive is not None: body['recursive'] = self.recursive
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Delete into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.path is not None: body['path'] = self.path
+        if self.recursive is not None: body['recursive'] = self.recursive
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Delete:
         """Deserializes the Delete from a dictionary."""
@@ -162,6 +210,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteDirectoryResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDirectoryResponse:
         """Deserializes the DeleteDirectoryResponse from a dictionary."""
@@ -176,6 +229,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -209,6 +267,16 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DirectoryEntry into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.file_size is not None: body['file_size'] = self.file_size
+        if self.is_directory is not None: body['is_directory'] = self.is_directory
+        if self.last_modified is not None: body['last_modified'] = self.last_modified
+        if self.name is not None: body['name'] = self.name
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DirectoryEntry:
         """Deserializes the DirectoryEntry from a dictionary."""
@@ -238,6 +306,15 @@ def as_dict(self) -> dict:
         if self.last_modified is not None: body['last-modified'] = self.last_modified
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DownloadResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.content_length is not None: body['content-length'] = self.content_length
+        if self.content_type is not None: body['content-type'] = self.content_type
+        if self.contents: body['contents'] = self.contents
+        if self.last_modified is not None: body['last-modified'] = self.last_modified
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DownloadResponse:
         """Deserializes the DownloadResponse from a dictionary."""
@@ -270,6 +347,15 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FileInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.file_size is not None: body['file_size'] = self.file_size
+        if self.is_dir is not None: body['is_dir'] = self.is_dir
+        if self.modification_time is not None: body['modification_time'] = self.modification_time
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FileInfo:
         """Deserializes the FileInfo from a dictionary."""
@@ -287,6 +373,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetDirectoryMetadataResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetDirectoryMetadataResponse:
         """Deserializes the GetDirectoryMetadataResponse from a dictionary."""
@@ -309,6 +400,14 @@ def as_dict(self) -> dict:
         if self.last_modified is not None: body['last-modified'] = self.last_modified
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetMetadataResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.content_length is not None: body['content-length'] = self.content_length
+        if self.content_type is not None: body['content-type'] = self.content_type
+        if self.last_modified is not None: body['last-modified'] = self.last_modified
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetMetadataResponse:
         """Deserializes the GetMetadataResponse from a dictionary."""
@@ -332,6 +431,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListDirectoryResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.contents: body['contents'] = self.contents
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListDirectoryResponse:
         """Deserializes the ListDirectoryResponse from a dictionary."""
@@ -350,6 +456,12 @@ def as_dict(self) -> dict:
         if self.files: body['files'] = [v.as_dict() for v in self.files]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListStatusResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.files: body['files'] = self.files
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListStatusResponse:
         """Deserializes the ListStatusResponse from a dictionary."""
@@ -367,6 +479,12 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MkDirs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MkDirs:
         """Deserializes the MkDirs from a dictionary."""
@@ -381,6 +499,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MkDirsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MkDirsResponse:
         """Deserializes the MkDirsResponse from a dictionary."""
@@ -402,6 +525,13 @@ def as_dict(self) -> dict:
         if self.source_path is not None: body['source_path'] = self.source_path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Move into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination_path is not None: body['destination_path'] = self.destination_path
+        if self.source_path is not None: body['source_path'] = self.source_path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Move:
         """Deserializes the Move from a dictionary."""
@@ -416,6 +546,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MoveResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MoveResponse:
         """Deserializes the MoveResponse from a dictionary."""
@@ -441,6 +576,14 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Put into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.contents is not None: body['contents'] = self.contents
+        if self.overwrite is not None: body['overwrite'] = self.overwrite
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Put:
         """Deserializes the Put from a dictionary."""
@@ -457,6 +600,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PutResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutResponse:
         """Deserializes the PutResponse from a dictionary."""
@@ -479,6 +627,13 @@ def as_dict(self) -> dict:
         if self.data is not None: body['data'] = self.data
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ReadResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.bytes_read is not None: body['bytes_read'] = self.bytes_read
+        if self.data is not None: body['data'] = self.data
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ReadResponse:
         """Deserializes the ReadResponse from a dictionary."""
@@ -493,6 +648,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UploadResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UploadResponse:
         """Deserializes the UploadResponse from a dictionary."""
@@ -765,9 +925,12 @@ class FilesAPI:
     /Volumes/<catalog_name>/<schema_name>/<volume_name>/<path_to_file>.
     
     The Files API has two distinct endpoints, one for working with files (`/fs/files`) and another one for
-    working with directories (`/fs/directories`). Both endpoints, use the standard HTTP methods GET, HEAD,
-    PUT, and DELETE to manage files and directories specified using their URI path. The path is always
-    absolute.
+    working with directories (`/fs/directories`). Both endpoints use the standard HTTP methods GET, HEAD, PUT,
+    and DELETE to manage files and directories specified using their URI path. The path is always absolute.
+    
+    Some Files API client features are currently experimental. To enable them, set
+    `enable_experimental_files_api_client = True` in your configuration profile or use the environment
+    variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`.
     
     [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html"""
 
@@ -833,8 +996,8 @@ def delete_directory(self, directory_path: str):
     def download(self, file_path: str) -> DownloadResponse:
         """Download a file.
         
-        Downloads a file of up to 5 GiB. The file contents are the response body. This is a standard HTTP file
-        download, not a JSON RPC.
+        Downloads a file. The file contents are the response body. This is a standard HTTP file download, not
+        a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers.
         
         :param file_path: str
           The absolute path of the file.
diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py
index f1c56a1a9..b841bec8b 100755
--- a/databricks/sdk/service/iam.py
+++ b/databricks/sdk/service/iam.py
@@ -38,6 +38,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccessControlRequest:
         """Deserializes the AccessControlRequest from a dictionary."""
@@ -75,6 +85,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccessControlResponse:
         """Deserializes the AccessControlResponse from a dictionary."""
@@ -85,6 +106,58 @@ def from_dict(cls, d: Dict[str, any]) -> AccessControlResponse:
                    user_name=d.get('user_name', None))
 
 
+@dataclass
+class Actor:
+    """represents an identity trying to access a resource - user or a service principal group can be a
+    principal of a permission set assignment but an actor is always a user or a service principal"""
+
+    actor_id: Optional[int] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the Actor into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.actor_id is not None: body['actor_id'] = self.actor_id
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Actor into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.actor_id is not None: body['actor_id'] = self.actor_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> Actor:
+        """Deserializes the Actor from a dictionary."""
+        return cls(actor_id=d.get('actor_id', None))
+
+
+@dataclass
+class CheckPolicyResponse:
+    consistency_token: ConsistencyToken
+
+    is_permitted: Optional[bool] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the CheckPolicyResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.consistency_token: body['consistency_token'] = self.consistency_token.as_dict()
+        if self.is_permitted is not None: body['is_permitted'] = self.is_permitted
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CheckPolicyResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.consistency_token: body['consistency_token'] = self.consistency_token
+        if self.is_permitted is not None: body['is_permitted'] = self.is_permitted
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CheckPolicyResponse:
+        """Deserializes the CheckPolicyResponse from a dictionary."""
+        return cls(consistency_token=_from_dict(d, 'consistency_token', ConsistencyToken),
+                   is_permitted=d.get('is_permitted', None))
+
+
 @dataclass
 class ComplexValue:
     display: Optional[str] = None
@@ -107,6 +180,16 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ComplexValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.display is not None: body['display'] = self.display
+        if self.primary is not None: body['primary'] = self.primary
+        if self.ref is not None: body['$ref'] = self.ref
+        if self.type is not None: body['type'] = self.type
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ComplexValue:
         """Deserializes the ComplexValue from a dictionary."""
@@ -117,6 +200,28 @@ def from_dict(cls, d: Dict[str, any]) -> ComplexValue:
                    value=d.get('value', None))
 
 
+@dataclass
+class ConsistencyToken:
+    value: str
+
+    def as_dict(self) -> dict:
+        """Serializes the ConsistencyToken into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.value is not None: body['value'] = self.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ConsistencyToken into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.value is not None: body['value'] = self.value
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ConsistencyToken:
+        """Deserializes the ConsistencyToken from a dictionary."""
+        return cls(value=d.get('value', None))
+
+
 @dataclass
 class DeleteResponse:
 
@@ -125,6 +230,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -139,6 +249,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteWorkspacePermissionAssignmentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteWorkspacePermissionAssignmentResponse:
         """Deserializes the DeleteWorkspacePermissionAssignmentResponse from a dictionary."""
@@ -155,6 +270,12 @@ def as_dict(self) -> dict:
         if self.roles: body['roles'] = [v.as_dict() for v in self.roles]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetAssignableRolesForResourceResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.roles: body['roles'] = self.roles
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetAssignableRolesForResourceResponse:
         """Deserializes the GetAssignableRolesForResourceResponse from a dictionary."""
@@ -172,6 +293,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPasswordPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPasswordPermissionLevelsResponse:
         """Deserializes the GetPasswordPermissionLevelsResponse from a dictionary."""
@@ -189,6 +316,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPermissionLevelsResponse:
         """Deserializes the GetPermissionLevelsResponse from a dictionary."""
@@ -216,6 +349,13 @@ def as_dict(self) -> dict:
         if self.role is not None: body['role'] = self.role
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GrantRule into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.principals: body['principals'] = self.principals
+        if self.role is not None: body['role'] = self.role
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GrantRule:
         """Deserializes the GrantRule from a dictionary."""
@@ -265,6 +405,20 @@ def as_dict(self) -> dict:
         if self.schemas: body['schemas'] = [v.value for v in self.schemas]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Group into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.display_name is not None: body['displayName'] = self.display_name
+        if self.entitlements: body['entitlements'] = self.entitlements
+        if self.external_id is not None: body['externalId'] = self.external_id
+        if self.groups: body['groups'] = self.groups
+        if self.id is not None: body['id'] = self.id
+        if self.members: body['members'] = self.members
+        if self.meta: body['meta'] = self.meta
+        if self.roles: body['roles'] = self.roles
+        if self.schemas: body['schemas'] = self.schemas
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Group:
         """Deserializes the Group from a dictionary."""
@@ -311,6 +465,16 @@ def as_dict(self) -> dict:
         if self.total_results is not None: body['totalResults'] = self.total_results
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListGroupsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page
+        if self.resources: body['Resources'] = self.resources
+        if self.schemas: body['schemas'] = self.schemas
+        if self.start_index is not None: body['startIndex'] = self.start_index
+        if self.total_results is not None: body['totalResults'] = self.total_results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListGroupsResponse:
         """Deserializes the ListGroupsResponse from a dictionary."""
@@ -353,6 +517,16 @@ def as_dict(self) -> dict:
         if self.total_results is not None: body['totalResults'] = self.total_results
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListServicePrincipalResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page
+        if self.resources: body['Resources'] = self.resources
+        if self.schemas: body['schemas'] = self.schemas
+        if self.start_index is not None: body['startIndex'] = self.start_index
+        if self.total_results is not None: body['totalResults'] = self.total_results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListServicePrincipalResponse:
         """Deserializes the ListServicePrincipalResponse from a dictionary."""
@@ -396,6 +570,16 @@ def as_dict(self) -> dict:
         if self.total_results is not None: body['totalResults'] = self.total_results
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListUsersResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.items_per_page is not None: body['itemsPerPage'] = self.items_per_page
+        if self.resources: body['Resources'] = self.resources
+        if self.schemas: body['schemas'] = self.schemas
+        if self.start_index is not None: body['startIndex'] = self.start_index
+        if self.total_results is not None: body['totalResults'] = self.total_results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListUsersResponse:
         """Deserializes the ListUsersResponse from a dictionary."""
@@ -430,6 +614,16 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MigratePermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.from_workspace_group_name is not None:
+            body['from_workspace_group_name'] = self.from_workspace_group_name
+        if self.size is not None: body['size'] = self.size
+        if self.to_account_group_name is not None: body['to_account_group_name'] = self.to_account_group_name
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MigratePermissionsRequest:
         """Deserializes the MigratePermissionsRequest from a dictionary."""
@@ -450,6 +644,12 @@ def as_dict(self) -> dict:
         if self.permissions_migrated is not None: body['permissions_migrated'] = self.permissions_migrated
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MigratePermissionsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permissions_migrated is not None: body['permissions_migrated'] = self.permissions_migrated
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MigratePermissionsResponse:
         """Deserializes the MigratePermissionsResponse from a dictionary."""
@@ -471,6 +671,13 @@ def as_dict(self) -> dict:
         if self.given_name is not None: body['givenName'] = self.given_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Name into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.family_name is not None: body['familyName'] = self.family_name
+        if self.given_name is not None: body['givenName'] = self.given_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Name:
         """Deserializes the Name from a dictionary."""
@@ -494,6 +701,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ObjectPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ObjectPermissions:
         """Deserializes the ObjectPermissions from a dictionary."""
@@ -520,6 +735,14 @@ def as_dict(self) -> dict:
         if self.schemas: body['schemas'] = [v.value for v in self.schemas]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PartialUpdate into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.operations: body['Operations'] = self.operations
+        if self.schemas: body['schemas'] = self.schemas
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PartialUpdate:
         """Deserializes the PartialUpdate from a dictionary."""
@@ -552,6 +775,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PasswordAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PasswordAccessControlRequest:
         """Deserializes the PasswordAccessControlRequest from a dictionary."""
@@ -589,6 +822,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PasswordAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PasswordAccessControlResponse:
         """Deserializes the PasswordAccessControlResponse from a dictionary."""
@@ -616,6 +860,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PasswordPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PasswordPermission:
         """Deserializes the PasswordPermission from a dictionary."""
@@ -647,6 +899,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PasswordPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PasswordPermissions:
         """Deserializes the PasswordPermissions from a dictionary."""
@@ -670,6 +930,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PasswordPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PasswordPermissionsDescription:
         """Deserializes the PasswordPermissionsDescription from a dictionary."""
@@ -688,6 +955,12 @@ def as_dict(self) -> dict:
             body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PasswordPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PasswordPermissionsRequest:
         """Deserializes the PasswordPermissionsRequest from a dictionary."""
@@ -713,6 +986,14 @@ def as_dict(self) -> dict:
         if self.value: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Patch into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.op is not None: body['op'] = self.op
+        if self.path is not None: body['path'] = self.path
+        if self.value: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Patch:
         """Deserializes the Patch from a dictionary."""
@@ -735,6 +1016,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PatchResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PatchResponse:
         """Deserializes the PatchResponse from a dictionary."""
@@ -763,6 +1049,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Permission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Permission:
         """Deserializes the Permission from a dictionary."""
@@ -793,6 +1087,14 @@ def as_dict(self) -> dict:
         if self.principal: body['principal'] = self.principal.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PermissionAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.error is not None: body['error'] = self.error
+        if self.permissions: body['permissions'] = self.permissions
+        if self.principal: body['principal'] = self.principal
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionAssignment:
         """Deserializes the PermissionAssignment from a dictionary."""
@@ -813,6 +1115,12 @@ def as_dict(self) -> dict:
             body['permission_assignments'] = [v.as_dict() for v in self.permission_assignments]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PermissionAssignments into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_assignments: body['permission_assignments'] = self.permission_assignments
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionAssignments:
         """Deserializes the PermissionAssignments from a dictionary."""
@@ -855,6 +1163,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PermissionOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionOutput:
         """Deserializes the PermissionOutput from a dictionary."""
@@ -876,6 +1191,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionsDescription:
         """Deserializes the PermissionsDescription from a dictionary."""
@@ -904,6 +1226,14 @@ def as_dict(self) -> dict:
         if self.request_object_type is not None: body['request_object_type'] = self.request_object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.request_object_id is not None: body['request_object_id'] = self.request_object_id
+        if self.request_object_type is not None: body['request_object_type'] = self.request_object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PermissionsRequest:
         """Deserializes the PermissionsRequest from a dictionary."""
@@ -942,6 +1272,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PrincipalOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.principal_id is not None: body['principal_id'] = self.principal_id
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PrincipalOutput:
         """Deserializes the PrincipalOutput from a dictionary."""
@@ -952,6 +1293,49 @@ def from_dict(cls, d: Dict[str, any]) -> PrincipalOutput:
                    user_name=d.get('user_name', None))
 
 
+class RequestAuthzIdentity(Enum):
+    """Defines the identity to be used for authZ of the request on the server side. See one pager for
+    for more information: http://go/acl/service-identity"""
+
+    REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY = 'REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY'
+    REQUEST_AUTHZ_IDENTITY_USER_CONTEXT = 'REQUEST_AUTHZ_IDENTITY_USER_CONTEXT'
+
+
+@dataclass
+class ResourceInfo:
+    id: str
+    """Id of the current resource."""
+
+    legacy_acl_path: Optional[str] = None
+    """The legacy acl path of the current resource."""
+
+    parent_resource_info: Optional[ResourceInfo] = None
+    """Parent resource info for the current resource. The parent may have another parent."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ResourceInfo into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.legacy_acl_path is not None: body['legacy_acl_path'] = self.legacy_acl_path
+        if self.parent_resource_info: body['parent_resource_info'] = self.parent_resource_info.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResourceInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.legacy_acl_path is not None: body['legacy_acl_path'] = self.legacy_acl_path
+        if self.parent_resource_info: body['parent_resource_info'] = self.parent_resource_info
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ResourceInfo:
+        """Deserializes the ResourceInfo from a dictionary."""
+        return cls(id=d.get('id', None),
+                   legacy_acl_path=d.get('legacy_acl_path', None),
+                   parent_resource_info=_from_dict(d, 'parent_resource_info', ResourceInfo))
+
+
 @dataclass
 class ResourceMeta:
     resource_type: Optional[str] = None
@@ -964,6 +1348,12 @@ def as_dict(self) -> dict:
         if self.resource_type is not None: body['resourceType'] = self.resource_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResourceMeta into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.resource_type is not None: body['resourceType'] = self.resource_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResourceMeta:
         """Deserializes the ResourceMeta from a dictionary."""
@@ -981,6 +1371,12 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Role into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Role:
         """Deserializes the Role from a dictionary."""
@@ -1005,6 +1401,14 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RuleSetResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        if self.grant_rules: body['grant_rules'] = self.grant_rules
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RuleSetResponse:
         """Deserializes the RuleSetResponse from a dictionary."""
@@ -1032,6 +1436,14 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RuleSetUpdateRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        if self.grant_rules: body['grant_rules'] = self.grant_rules
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RuleSetUpdateRequest:
         """Deserializes the RuleSetUpdateRequest from a dictionary."""
@@ -1084,6 +1496,20 @@ def as_dict(self) -> dict:
         if self.schemas: body['schemas'] = [v.value for v in self.schemas]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServicePrincipal into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.active is not None: body['active'] = self.active
+        if self.application_id is not None: body['applicationId'] = self.application_id
+        if self.display_name is not None: body['displayName'] = self.display_name
+        if self.entitlements: body['entitlements'] = self.entitlements
+        if self.external_id is not None: body['externalId'] = self.external_id
+        if self.groups: body['groups'] = self.groups
+        if self.id is not None: body['id'] = self.id
+        if self.roles: body['roles'] = self.roles
+        if self.schemas: body['schemas'] = self.schemas
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServicePrincipal:
         """Deserializes the ServicePrincipal from a dictionary."""
@@ -1111,6 +1537,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
         """Deserializes the UpdateResponse from a dictionary."""
@@ -1131,6 +1562,13 @@ def as_dict(self) -> dict:
         if self.rule_set: body['rule_set'] = self.rule_set.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateRuleSetRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.rule_set: body['rule_set'] = self.rule_set
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRuleSetRequest:
         """Deserializes the UpdateRuleSetRequest from a dictionary."""
@@ -1160,6 +1598,14 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateWorkspaceAssignments into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permissions: body['permissions'] = self.permissions
+        if self.principal_id is not None: body['principal_id'] = self.principal_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceAssignments:
         """Deserializes the UpdateWorkspaceAssignments from a dictionary."""
@@ -1225,6 +1671,22 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['userName'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the User into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.active is not None: body['active'] = self.active
+        if self.display_name is not None: body['displayName'] = self.display_name
+        if self.emails: body['emails'] = self.emails
+        if self.entitlements: body['entitlements'] = self.entitlements
+        if self.external_id is not None: body['externalId'] = self.external_id
+        if self.groups: body['groups'] = self.groups
+        if self.id is not None: body['id'] = self.id
+        if self.name: body['name'] = self.name
+        if self.roles: body['roles'] = self.roles
+        if self.schemas: body['schemas'] = self.schemas
+        if self.user_name is not None: body['userName'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> User:
         """Deserializes the User from a dictionary."""
@@ -1265,12 +1727,59 @@ def as_dict(self) -> dict:
         if self.permissions: body['permissions'] = [v.as_dict() for v in self.permissions]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspacePermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permissions: body['permissions'] = self.permissions
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspacePermissions:
         """Deserializes the WorkspacePermissions from a dictionary."""
         return cls(permissions=_repeated_dict(d, 'permissions', PermissionOutput))
 
 
+class AccessControlAPI:
+    """Rule based Access Control for Databricks Resources."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def check_policy(self,
+                     actor: Actor,
+                     permission: str,
+                     resource: str,
+                     consistency_token: ConsistencyToken,
+                     authz_identity: RequestAuthzIdentity,
+                     *,
+                     resource_info: Optional[ResourceInfo] = None) -> CheckPolicyResponse:
+        """Check access policy to a resource.
+        
+        :param actor: :class:`Actor`
+        :param permission: str
+        :param resource: str
+          Ex: (servicePrincipal/use, accounts//servicePrincipals/) Ex:
+          (servicePrincipal.ruleSet/update, accounts//servicePrincipals//ruleSets/default)
+        :param consistency_token: :class:`ConsistencyToken`
+        :param authz_identity: :class:`RequestAuthzIdentity`
+        :param resource_info: :class:`ResourceInfo` (optional)
+        
+        :returns: :class:`CheckPolicyResponse`
+        """
+
+        query = {}
+        if actor is not None: query['actor'] = actor.as_dict()
+        if authz_identity is not None: query['authz_identity'] = authz_identity.value
+        if consistency_token is not None: query['consistency_token'] = consistency_token.as_dict()
+        if permission is not None: query['permission'] = permission
+        if resource is not None: query['resource'] = resource
+        if resource_info is not None: query['resource_info'] = resource_info.as_dict()
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET', '/api/2.0/access-control/check-policy-v2', query=query, headers=headers)
+        return CheckPolicyResponse.from_dict(res)
+
+
 class AccountAccessControlAPI:
     """These APIs manage access rules on resources in an account. Currently, only grant rules are supported. A
     grant rule specifies a role assigned to a set of principals. A list of rules attached to a resource is
@@ -2643,7 +3152,8 @@ def set(self,
             access_control_list: Optional[List[AccessControlRequest]] = None) -> ObjectPermissions:
         """Set object permissions.
         
-        Sets permissions on an object. Objects can inherit permissions from their parent objects or root
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their parent objects or root
         object.
         
         :param request_object_type: str
@@ -3205,7 +3715,8 @@ def set_permissions(
             access_control_list: Optional[List[PasswordAccessControlRequest]] = None) -> PasswordPermissions:
         """Set password permissions.
         
-        Sets permissions on all passwords. Passwords can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional)
         
diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index 56c4a838e..8220a0715 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -35,6 +35,11 @@ class BaseJob:
     Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based
     on accessible budget policies of the run_as identity on job creation or modification."""
 
+    has_more: Optional[bool] = None
+    """Indicates if the job has more sub-resources (`tasks`, `job_clusters`) that are not shown. They
+    can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 :method:jobs/list
+    requests with `expand_tasks=true`."""
+
     job_id: Optional[int] = None
     """The canonical identifier for this job."""
 
@@ -49,16 +54,30 @@ def as_dict(self) -> dict:
         if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
         if self.effective_budget_policy_id is not None:
             body['effective_budget_policy_id'] = self.effective_budget_policy_id
+        if self.has_more is not None: body['has_more'] = self.has_more
         if self.job_id is not None: body['job_id'] = self.job_id
         if self.settings: body['settings'] = self.settings.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BaseJob into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_time is not None: body['created_time'] = self.created_time
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.effective_budget_policy_id is not None:
+            body['effective_budget_policy_id'] = self.effective_budget_policy_id
+        if self.has_more is not None: body['has_more'] = self.has_more
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.settings: body['settings'] = self.settings
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BaseJob:
         """Deserializes the BaseJob from a dictionary."""
         return cls(created_time=d.get('created_time', None),
                    creator_user_name=d.get('creator_user_name', None),
                    effective_budget_policy_id=d.get('effective_budget_policy_id', None),
+                   has_more=d.get('has_more', None),
                    job_id=d.get('job_id', None),
                    settings=_from_dict(d, 'settings', JobSettings))
 
@@ -92,6 +111,12 @@ class BaseRun:
     description: Optional[str] = None
     """Description of the run"""
 
+    effective_performance_target: Optional[PerformanceTarget] = None
+    """effective_performance_target is the actual performance target used by the run during execution.
+    effective_performance_target can differ from performance_target depending on if the job was
+    eligible to be cost-optimized (e.g. contains at least 1 serverless task) or if we specifically
+    override the value for the run (ex. RunNow)."""
+
     end_time: Optional[int] = None
     """The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This
     field is set to 0 if the job is still running."""
@@ -113,10 +138,16 @@ class BaseRun:
     Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks
     are used, `git_source` must be defined on the job."""
 
+    has_more: Optional[bool] = None
+    """Indicates if the run has more sub-resources (`tasks`, `job_clusters`) that are not shown. They
+    can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2
+    :method:jobs/listruns requests with `expand_tasks=true`."""
+
     job_clusters: Optional[List[JobCluster]] = None
     """A list of job cluster specifications that can be shared and reused by tasks of this job.
     Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in
-    task settings."""
+    task settings. If more than 100 job clusters are available, you can paginate through them using
+    :method:jobs/getrun."""
 
     job_id: Optional[int] = None
     """The canonical identifier of the job that contains this run."""
@@ -187,7 +218,9 @@ class BaseRun:
 
     tasks: Optional[List[RunTask]] = None
     """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call
-    `JobsGetOutput` to retrieve the run resutls."""
+    `JobsGetOutput` to retrieve the run resutls. If more than 100 tasks are available, you can
+    paginate through them using :method:jobs/getrun. Use the `next_page_token` field at the object
+    root to determine if more results are available."""
 
     trigger: Optional[TriggerType] = None
     """The type of trigger that fired this run.
@@ -198,7 +231,8 @@ class BaseRun:
     previously failed run. This occurs when you request to re-run the job in case of failures. *
     `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`:
     Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is
-    triggered by a table update."""
+    triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to manually
+    restart a continuous job run."""
 
     trigger_info: Optional[TriggerInfo] = None
     """Additional details about what triggered the run"""
@@ -212,9 +246,12 @@ def as_dict(self) -> dict:
         if self.cluster_spec: body['cluster_spec'] = self.cluster_spec.as_dict()
         if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
         if self.description is not None: body['description'] = self.description
+        if self.effective_performance_target is not None:
+            body['effective_performance_target'] = self.effective_performance_target.value
         if self.end_time is not None: body['end_time'] = self.end_time
         if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
         if self.git_source: body['git_source'] = self.git_source.as_dict()
+        if self.has_more is not None: body['has_more'] = self.has_more
         if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters]
         if self.job_id is not None: body['job_id'] = self.job_id
         if self.job_parameters: body['job_parameters'] = [v.as_dict() for v in self.job_parameters]
@@ -240,6 +277,46 @@ def as_dict(self) -> dict:
         if self.trigger_info: body['trigger_info'] = self.trigger_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BaseRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.attempt_number is not None: body['attempt_number'] = self.attempt_number
+        if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration
+        if self.cluster_instance: body['cluster_instance'] = self.cluster_instance
+        if self.cluster_spec: body['cluster_spec'] = self.cluster_spec
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.description is not None: body['description'] = self.description
+        if self.effective_performance_target is not None:
+            body['effective_performance_target'] = self.effective_performance_target
+        if self.end_time is not None: body['end_time'] = self.end_time
+        if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
+        if self.git_source: body['git_source'] = self.git_source
+        if self.has_more is not None: body['has_more'] = self.has_more
+        if self.job_clusters: body['job_clusters'] = self.job_clusters
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.job_parameters: body['job_parameters'] = self.job_parameters
+        if self.job_run_id is not None: body['job_run_id'] = self.job_run_id
+        if self.number_in_job is not None: body['number_in_job'] = self.number_in_job
+        if self.original_attempt_run_id is not None:
+            body['original_attempt_run_id'] = self.original_attempt_run_id
+        if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters
+        if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
+        if self.repair_history: body['repair_history'] = self.repair_history
+        if self.run_duration is not None: body['run_duration'] = self.run_duration
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_name is not None: body['run_name'] = self.run_name
+        if self.run_page_url is not None: body['run_page_url'] = self.run_page_url
+        if self.run_type is not None: body['run_type'] = self.run_type
+        if self.schedule: body['schedule'] = self.schedule
+        if self.setup_duration is not None: body['setup_duration'] = self.setup_duration
+        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.state: body['state'] = self.state
+        if self.status: body['status'] = self.status
+        if self.tasks: body['tasks'] = self.tasks
+        if self.trigger is not None: body['trigger'] = self.trigger
+        if self.trigger_info: body['trigger_info'] = self.trigger_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BaseRun:
         """Deserializes the BaseRun from a dictionary."""
@@ -249,9 +326,11 @@ def from_dict(cls, d: Dict[str, any]) -> BaseRun:
                    cluster_spec=_from_dict(d, 'cluster_spec', ClusterSpec),
                    creator_user_name=d.get('creator_user_name', None),
                    description=d.get('description', None),
+                   effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget),
                    end_time=d.get('end_time', None),
                    execution_duration=d.get('execution_duration', None),
                    git_source=_from_dict(d, 'git_source', GitSource),
+                   has_more=d.get('has_more', None),
                    job_clusters=_repeated_dict(d, 'job_clusters', JobCluster),
                    job_id=d.get('job_id', None),
                    job_parameters=_repeated_dict(d, 'job_parameters', JobParameter),
@@ -292,6 +371,13 @@ def as_dict(self) -> dict:
         if self.job_id is not None: body['job_id'] = self.job_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelAllRuns into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_queued_runs is not None: body['all_queued_runs'] = self.all_queued_runs
+        if self.job_id is not None: body['job_id'] = self.job_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelAllRuns:
         """Deserializes the CancelAllRuns from a dictionary."""
@@ -306,6 +392,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelAllRunsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelAllRunsResponse:
         """Deserializes the CancelAllRunsResponse from a dictionary."""
@@ -323,6 +414,12 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelRun:
         """Deserializes the CancelRun from a dictionary."""
@@ -337,12 +434,162 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelRunResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelRunResponse:
         """Deserializes the CancelRunResponse from a dictionary."""
         return cls()
 
 
+class CleanRoomTaskRunLifeCycleState(Enum):
+    """Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to
+    remove coupling with jobs API definition"""
+
+    BLOCKED = 'BLOCKED'
+    INTERNAL_ERROR = 'INTERNAL_ERROR'
+    PENDING = 'PENDING'
+    QUEUED = 'QUEUED'
+    RUNNING = 'RUNNING'
+    RUN_LIFE_CYCLE_STATE_UNSPECIFIED = 'RUN_LIFE_CYCLE_STATE_UNSPECIFIED'
+    SKIPPED = 'SKIPPED'
+    TERMINATED = 'TERMINATED'
+    TERMINATING = 'TERMINATING'
+    WAITING_FOR_RETRY = 'WAITING_FOR_RETRY'
+
+
+class CleanRoomTaskRunResultState(Enum):
+    """Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to avoid
+    cyclic dependency."""
+
+    CANCELED = 'CANCELED'
+    DISABLED = 'DISABLED'
+    EVICTED = 'EVICTED'
+    EXCLUDED = 'EXCLUDED'
+    FAILED = 'FAILED'
+    MAXIMUM_CONCURRENT_RUNS_REACHED = 'MAXIMUM_CONCURRENT_RUNS_REACHED'
+    RUN_RESULT_STATE_UNSPECIFIED = 'RUN_RESULT_STATE_UNSPECIFIED'
+    SUCCESS = 'SUCCESS'
+    SUCCESS_WITH_FAILURES = 'SUCCESS_WITH_FAILURES'
+    TIMEDOUT = 'TIMEDOUT'
+    UPSTREAM_CANCELED = 'UPSTREAM_CANCELED'
+    UPSTREAM_EVICTED = 'UPSTREAM_EVICTED'
+    UPSTREAM_FAILED = 'UPSTREAM_FAILED'
+
+
+@dataclass
+class CleanRoomTaskRunState:
+    """Stores the run state of the clean rooms notebook task."""
+
+    life_cycle_state: Optional[CleanRoomTaskRunLifeCycleState] = None
+    """A value indicating the run's current lifecycle state. This field is always available in the
+    response."""
+
+    result_state: Optional[CleanRoomTaskRunResultState] = None
+    """A value indicating the run's result. This field is only available for terminal lifecycle states."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomTaskRunState into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state.value
+        if self.result_state is not None: body['result_state'] = self.result_state.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomTaskRunState into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state
+        if self.result_state is not None: body['result_state'] = self.result_state
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomTaskRunState:
+        """Deserializes the CleanRoomTaskRunState from a dictionary."""
+        return cls(life_cycle_state=_enum(d, 'life_cycle_state', CleanRoomTaskRunLifeCycleState),
+                   result_state=_enum(d, 'result_state', CleanRoomTaskRunResultState))
+
+
+@dataclass
+class CleanRoomsNotebookTask:
+    clean_room_name: str
+    """The clean room that the notebook belongs to."""
+
+    notebook_name: str
+    """Name of the notebook being run."""
+
+    etag: Optional[str] = None
+    """Checksum to validate the freshness of the notebook resource (i.e. the notebook being run is the
+    latest version). It can be fetched by calling the :method:cleanroomassets/get API."""
+
+    notebook_base_parameters: Optional[Dict[str, str]] = None
+    """Base parameters to be used for the clean room notebook job."""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomsNotebookTask into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.clean_room_name is not None: body['clean_room_name'] = self.clean_room_name
+        if self.etag is not None: body['etag'] = self.etag
+        if self.notebook_base_parameters: body['notebook_base_parameters'] = self.notebook_base_parameters
+        if self.notebook_name is not None: body['notebook_name'] = self.notebook_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomsNotebookTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.clean_room_name is not None: body['clean_room_name'] = self.clean_room_name
+        if self.etag is not None: body['etag'] = self.etag
+        if self.notebook_base_parameters: body['notebook_base_parameters'] = self.notebook_base_parameters
+        if self.notebook_name is not None: body['notebook_name'] = self.notebook_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomsNotebookTask:
+        """Deserializes the CleanRoomsNotebookTask from a dictionary."""
+        return cls(clean_room_name=d.get('clean_room_name', None),
+                   etag=d.get('etag', None),
+                   notebook_base_parameters=d.get('notebook_base_parameters', None),
+                   notebook_name=d.get('notebook_name', None))
+
+
+@dataclass
+class CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput:
+    clean_room_job_run_state: Optional[CleanRoomTaskRunState] = None
+    """The run state of the clean rooms notebook task."""
+
+    notebook_output: Optional[NotebookOutput] = None
+    """The notebook output for the clean room run"""
+
+    output_schema_info: Optional[OutputSchemaInfo] = None
+    """Information on how to access the output schema for the clean room run"""
+
+    def as_dict(self) -> dict:
+        """Serializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.clean_room_job_run_state:
+            body['clean_room_job_run_state'] = self.clean_room_job_run_state.as_dict()
+        if self.notebook_output: body['notebook_output'] = self.notebook_output.as_dict()
+        if self.output_schema_info: body['output_schema_info'] = self.output_schema_info.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.clean_room_job_run_state: body['clean_room_job_run_state'] = self.clean_room_job_run_state
+        if self.notebook_output: body['notebook_output'] = self.notebook_output
+        if self.output_schema_info: body['output_schema_info'] = self.output_schema_info
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput:
+        """Deserializes the CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput from a dictionary."""
+        return cls(clean_room_job_run_state=_from_dict(d, 'clean_room_job_run_state', CleanRoomTaskRunState),
+                   notebook_output=_from_dict(d, 'notebook_output', NotebookOutput),
+                   output_schema_info=_from_dict(d, 'output_schema_info', OutputSchemaInfo))
+
+
 @dataclass
 class ClusterInstance:
     cluster_id: Optional[str] = None
@@ -369,6 +616,13 @@ def as_dict(self) -> dict:
         if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterInstance into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.spark_context_id is not None: body['spark_context_id'] = self.spark_context_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterInstance:
         """Deserializes the ClusterInstance from a dictionary."""
@@ -402,6 +656,15 @@ def as_dict(self) -> dict:
         if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
+        if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key
+        if self.libraries: body['libraries'] = self.libraries
+        if self.new_cluster: body['new_cluster'] = self.new_cluster
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterSpec:
         """Deserializes the ClusterSpec from a dictionary."""
@@ -446,6 +709,14 @@ def as_dict(self) -> dict:
         if self.right is not None: body['right'] = self.right
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ConditionTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.left is not None: body['left'] = self.left
+        if self.op is not None: body['op'] = self.op
+        if self.right is not None: body['right'] = self.right
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ConditionTask:
         """Deserializes the ConditionTask from a dictionary."""
@@ -482,6 +753,12 @@ def as_dict(self) -> dict:
         if self.pause_status is not None: body['pause_status'] = self.pause_status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Continuous into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.pause_status is not None: body['pause_status'] = self.pause_status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Continuous:
         """Deserializes the Continuous from a dictionary."""
@@ -545,7 +822,8 @@ class CreateJob:
     job_clusters: Optional[List[JobCluster]] = None
     """A list of job cluster specifications that can be shared and reused by tasks of this job.
     Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in
-    task settings."""
+    task settings. If more than 100 job clusters are available, you can paginate through them using
+    :method:jobs/get."""
 
     max_concurrent_runs: Optional[int] = None
     """An optional maximum allowed number of concurrent runs of the job. Set this value if you want to
@@ -567,15 +845,18 @@ class CreateJob:
     parameters: Optional[List[JobParameterDefinition]] = None
     """Job-level parameter definitions"""
 
+    performance_target: Optional[PerformanceTarget] = None
+    """PerformanceTarget defines how performant or cost efficient the execution of run on serverless
+    should be."""
+
     queue: Optional[QueueSettings] = None
     """The queue settings of the job."""
 
     run_as: Optional[JobRunAs] = None
-    """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
-    as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
+    """Write-only setting. Specifies the user or service principal that the job runs as. If not
+    specified, the job runs as the user who created the job.
     
-    Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not,
-    an error is thrown."""
+    Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown."""
 
     schedule: Optional[CronSchedule] = None
     """An optional periodic schedule for this job. The default behavior is that the job only runs when
@@ -587,7 +868,9 @@ class CreateJob:
     be added to the job."""
 
     tasks: Optional[List[Task]] = None
-    """A list of task specifications to be executed by this job."""
+    """A list of task specifications to be executed by this job. If more than 100 tasks are available,
+    you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the
+    object root to determine if more results are available."""
 
     timeout_seconds: Optional[int] = None
     """An optional timeout applied to each run of this job. A value of `0` means no timeout."""
@@ -620,6 +903,7 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict()
         if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
+        if self.performance_target is not None: body['performance_target'] = self.performance_target.value
         if self.queue: body['queue'] = self.queue.as_dict()
         if self.run_as: body['run_as'] = self.run_as.as_dict()
         if self.schedule: body['schedule'] = self.schedule.as_dict()
@@ -630,6 +914,36 @@ def as_dict(self) -> dict:
         if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateJob into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
+        if self.continuous: body['continuous'] = self.continuous
+        if self.deployment: body['deployment'] = self.deployment
+        if self.description is not None: body['description'] = self.description
+        if self.edit_mode is not None: body['edit_mode'] = self.edit_mode
+        if self.email_notifications: body['email_notifications'] = self.email_notifications
+        if self.environments: body['environments'] = self.environments
+        if self.format is not None: body['format'] = self.format
+        if self.git_source: body['git_source'] = self.git_source
+        if self.health: body['health'] = self.health
+        if self.job_clusters: body['job_clusters'] = self.job_clusters
+        if self.max_concurrent_runs is not None: body['max_concurrent_runs'] = self.max_concurrent_runs
+        if self.name is not None: body['name'] = self.name
+        if self.notification_settings: body['notification_settings'] = self.notification_settings
+        if self.parameters: body['parameters'] = self.parameters
+        if self.performance_target is not None: body['performance_target'] = self.performance_target
+        if self.queue: body['queue'] = self.queue
+        if self.run_as: body['run_as'] = self.run_as
+        if self.schedule: body['schedule'] = self.schedule
+        if self.tags: body['tags'] = self.tags
+        if self.tasks: body['tasks'] = self.tasks
+        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
+        if self.trigger: body['trigger'] = self.trigger
+        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateJob:
         """Deserializes the CreateJob from a dictionary."""
@@ -649,6 +963,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateJob:
                    name=d.get('name', None),
                    notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings),
                    parameters=_repeated_dict(d, 'parameters', JobParameterDefinition),
+                   performance_target=_enum(d, 'performance_target', PerformanceTarget),
                    queue=_from_dict(d, 'queue', QueueSettings),
                    run_as=_from_dict(d, 'run_as', JobRunAs),
                    schedule=_from_dict(d, 'schedule', CronSchedule),
@@ -672,6 +987,12 @@ def as_dict(self) -> dict:
         if self.job_id is not None: body['job_id'] = self.job_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.job_id is not None: body['job_id'] = self.job_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateResponse:
         """Deserializes the CreateResponse from a dictionary."""
@@ -704,6 +1025,15 @@ def as_dict(self) -> dict:
         if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CronSchedule into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.pause_status is not None: body['pause_status'] = self.pause_status
+        if self.quartz_cron_expression is not None:
+            body['quartz_cron_expression'] = self.quartz_cron_expression
+        if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CronSchedule:
         """Deserializes the CronSchedule from a dictionary."""
@@ -728,6 +1058,13 @@ def as_dict(self) -> dict:
         if self.artifacts_link is not None: body['artifacts_link'] = self.artifacts_link
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DbtOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.artifacts_headers: body['artifacts_headers'] = self.artifacts_headers
+        if self.artifacts_link is not None: body['artifacts_link'] = self.artifacts_link
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DbtOutput:
         """Deserializes the DbtOutput from a dictionary."""
@@ -784,6 +1121,18 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DbtTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.commands: body['commands'] = self.commands
+        if self.profiles_directory is not None: body['profiles_directory'] = self.profiles_directory
+        if self.project_directory is not None: body['project_directory'] = self.project_directory
+        if self.schema is not None: body['schema'] = self.schema
+        if self.source is not None: body['source'] = self.source
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DbtTask:
         """Deserializes the DbtTask from a dictionary."""
@@ -807,6 +1156,12 @@ def as_dict(self) -> dict:
         if self.job_id is not None: body['job_id'] = self.job_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteJob into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.job_id is not None: body['job_id'] = self.job_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteJob:
         """Deserializes the DeleteJob from a dictionary."""
@@ -821,6 +1176,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -838,6 +1198,12 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRun:
         """Deserializes the DeleteRun from a dictionary."""
@@ -852,6 +1218,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteRunResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRunResponse:
         """Deserializes the DeleteRunResponse from a dictionary."""
@@ -884,6 +1255,14 @@ def as_dict(self) -> dict:
         if self.previous_value is not None: body['previous_value'] = self.previous_value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.field is not None: body['field'] = self.field
+        if self.new_value is not None: body['new_value'] = self.new_value
+        if self.previous_value is not None: body['previous_value'] = self.previous_value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceForJobResponseJobClusterSettingsChange:
         """Deserializes the EnforcePolicyComplianceForJobResponseJobClusterSettingsChange from a dictionary."""
@@ -907,6 +1286,13 @@ def as_dict(self) -> dict:
         if self.validate_only is not None: body['validate_only'] = self.validate_only
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EnforcePolicyComplianceRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.validate_only is not None: body['validate_only'] = self.validate_only
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceRequest:
         """Deserializes the EnforcePolicyComplianceRequest from a dictionary."""
@@ -938,6 +1324,14 @@ def as_dict(self) -> dict:
         if self.settings: body['settings'] = self.settings.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EnforcePolicyComplianceResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.has_changes is not None: body['has_changes'] = self.has_changes
+        if self.job_cluster_changes: body['job_cluster_changes'] = self.job_cluster_changes
+        if self.settings: body['settings'] = self.settings
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnforcePolicyComplianceResponse:
         """Deserializes the EnforcePolicyComplianceResponse from a dictionary."""
@@ -964,6 +1358,12 @@ def as_dict(self) -> dict:
         if self.views: body['views'] = [v.as_dict() for v in self.views]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExportRunOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.views: body['views'] = self.views
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExportRunOutput:
         """Deserializes the ExportRunOutput from a dictionary."""
@@ -995,6 +1395,16 @@ def as_dict(self) -> dict:
             body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FileArrivalTriggerConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.min_time_between_triggers_seconds is not None:
+            body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds
+        if self.url is not None: body['url'] = self.url
+        if self.wait_after_last_change_seconds is not None:
+            body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FileArrivalTriggerConfiguration:
         """Deserializes the FileArrivalTriggerConfiguration from a dictionary."""
@@ -1019,6 +1429,13 @@ def as_dict(self) -> dict:
         if self.task_run_stats: body['task_run_stats'] = self.task_run_stats.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ForEachStats into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.error_message_stats: body['error_message_stats'] = self.error_message_stats
+        if self.task_run_stats: body['task_run_stats'] = self.task_run_stats
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ForEachStats:
         """Deserializes the ForEachStats from a dictionary."""
@@ -1046,6 +1463,14 @@ def as_dict(self) -> dict:
         if self.task: body['task'] = self.task.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ForEachTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.concurrency is not None: body['concurrency'] = self.concurrency
+        if self.inputs is not None: body['inputs'] = self.inputs
+        if self.task: body['task'] = self.task
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ForEachTask:
         """Deserializes the ForEachTask from a dictionary."""
@@ -1073,6 +1498,14 @@ def as_dict(self) -> dict:
         if self.termination_category is not None: body['termination_category'] = self.termination_category
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ForEachTaskErrorMessageStats into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.count is not None: body['count'] = self.count
+        if self.error_message is not None: body['error_message'] = self.error_message
+        if self.termination_category is not None: body['termination_category'] = self.termination_category
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ForEachTaskErrorMessageStats:
         """Deserializes the ForEachTaskErrorMessageStats from a dictionary."""
@@ -1112,6 +1545,17 @@ def as_dict(self) -> dict:
         if self.total_iterations is not None: body['total_iterations'] = self.total_iterations
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ForEachTaskTaskRunStats into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.active_iterations is not None: body['active_iterations'] = self.active_iterations
+        if self.completed_iterations is not None: body['completed_iterations'] = self.completed_iterations
+        if self.failed_iterations is not None: body['failed_iterations'] = self.failed_iterations
+        if self.scheduled_iterations is not None: body['scheduled_iterations'] = self.scheduled_iterations
+        if self.succeeded_iterations is not None: body['succeeded_iterations'] = self.succeeded_iterations
+        if self.total_iterations is not None: body['total_iterations'] = self.total_iterations
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ForEachTaskTaskRunStats:
         """Deserializes the ForEachTaskTaskRunStats from a dictionary."""
@@ -1140,6 +1584,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetJobPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetJobPermissionLevelsResponse:
         """Deserializes the GetJobPermissionLevelsResponse from a dictionary."""
@@ -1166,6 +1616,13 @@ def as_dict(self) -> dict:
         if self.violations: body['violations'] = self.violations
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPolicyComplianceResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
+        if self.violations: body['violations'] = self.violations
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPolicyComplianceResponse:
         """Deserializes the GetPolicyComplianceResponse from a dictionary."""
@@ -1207,6 +1664,12 @@ def as_dict(self) -> dict:
         if self.used_commit is not None: body['used_commit'] = self.used_commit
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GitSnapshot into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.used_commit is not None: body['used_commit'] = self.used_commit
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GitSnapshot:
         """Deserializes the GitSnapshot from a dictionary."""
@@ -1261,6 +1724,18 @@ def as_dict(self) -> dict:
         if self.job_source: body['job_source'] = self.job_source.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GitSource into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.git_branch is not None: body['git_branch'] = self.git_branch
+        if self.git_commit is not None: body['git_commit'] = self.git_commit
+        if self.git_provider is not None: body['git_provider'] = self.git_provider
+        if self.git_snapshot: body['git_snapshot'] = self.git_snapshot
+        if self.git_tag is not None: body['git_tag'] = self.git_tag
+        if self.git_url is not None: body['git_url'] = self.git_url
+        if self.job_source: body['job_source'] = self.job_source
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GitSource:
         """Deserializes the GitSource from a dictionary."""
@@ -1290,9 +1765,17 @@ class Job:
     Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based
     on accessible budget policies of the run_as identity on job creation or modification."""
 
+    has_more: Optional[bool] = None
+    """Indicates if the job has more sub-resources (`tasks`, `job_clusters`) that are not shown. They
+    can be accessed via :method:jobs/get endpoint. It is only relevant for API 2.2 :method:jobs/list
+    requests with `expand_tasks=true`."""
+
     job_id: Optional[int] = None
     """The canonical identifier for this job."""
 
+    next_page_token: Optional[str] = None
+    """A token that can be used to list the next page of sub-resources."""
+
     run_as_user_name: Optional[str] = None
     """The email of an active workspace user or the application ID of a service principal that the job
     runs as. This value can be changed by setting the `run_as` field when creating or updating a
@@ -1313,18 +1796,36 @@ def as_dict(self) -> dict:
         if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
         if self.effective_budget_policy_id is not None:
             body['effective_budget_policy_id'] = self.effective_budget_policy_id
+        if self.has_more is not None: body['has_more'] = self.has_more
         if self.job_id is not None: body['job_id'] = self.job_id
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
         if self.settings: body['settings'] = self.settings.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Job into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_time is not None: body['created_time'] = self.created_time
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.effective_budget_policy_id is not None:
+            body['effective_budget_policy_id'] = self.effective_budget_policy_id
+        if self.has_more is not None: body['has_more'] = self.has_more
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
+        if self.settings: body['settings'] = self.settings
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Job:
         """Deserializes the Job from a dictionary."""
         return cls(created_time=d.get('created_time', None),
                    creator_user_name=d.get('creator_user_name', None),
                    effective_budget_policy_id=d.get('effective_budget_policy_id', None),
+                   has_more=d.get('has_more', None),
                    job_id=d.get('job_id', None),
+                   next_page_token=d.get('next_page_token', None),
                    run_as_user_name=d.get('run_as_user_name', None),
                    settings=_from_dict(d, 'settings', JobSettings))
 
@@ -1353,6 +1854,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobAccessControlRequest:
         """Deserializes the JobAccessControlRequest from a dictionary."""
@@ -1390,6 +1901,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobAccessControlResponse:
         """Deserializes the JobAccessControlResponse from a dictionary."""
@@ -1417,6 +1939,13 @@ def as_dict(self) -> dict:
         if self.new_cluster: body['new_cluster'] = self.new_cluster.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key
+        if self.new_cluster: body['new_cluster'] = self.new_cluster
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobCluster:
         """Deserializes the JobCluster from a dictionary."""
@@ -1446,6 +1975,14 @@ def as_dict(self) -> dict:
         if self.violations: body['violations'] = self.violations
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobCompliance into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_compliant is not None: body['is_compliant'] = self.is_compliant
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.violations: body['violations'] = self.violations
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobCompliance:
         """Deserializes the JobCompliance from a dictionary."""
@@ -1471,6 +2008,13 @@ def as_dict(self) -> dict:
         if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobDeployment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.kind is not None: body['kind'] = self.kind
+        if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobDeployment:
         """Deserializes the JobDeployment from a dictionary."""
@@ -1545,6 +2089,20 @@ def as_dict(self) -> dict:
         if self.on_success: body['on_success'] = [v for v in self.on_success]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobEmailNotifications into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.no_alert_for_skipped_runs is not None:
+            body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
+        if self.on_duration_warning_threshold_exceeded:
+            body['on_duration_warning_threshold_exceeded'] = self.on_duration_warning_threshold_exceeded
+        if self.on_failure: body['on_failure'] = self.on_failure
+        if self.on_start: body['on_start'] = self.on_start
+        if self.on_streaming_backlog_exceeded:
+            body['on_streaming_backlog_exceeded'] = self.on_streaming_backlog_exceeded
+        if self.on_success: body['on_success'] = self.on_success
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobEmailNotifications:
         """Deserializes the JobEmailNotifications from a dictionary."""
@@ -1573,6 +2131,13 @@ def as_dict(self) -> dict:
         if self.spec: body['spec'] = self.spec.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobEnvironment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.environment_key is not None: body['environment_key'] = self.environment_key
+        if self.spec: body['spec'] = self.spec
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobEnvironment:
         """Deserializes the JobEnvironment from a dictionary."""
@@ -1599,6 +2164,15 @@ def as_dict(self) -> dict:
             body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobNotificationSettings into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.no_alert_for_canceled_runs is not None:
+            body['no_alert_for_canceled_runs'] = self.no_alert_for_canceled_runs
+        if self.no_alert_for_skipped_runs is not None:
+            body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobNotificationSettings:
         """Deserializes the JobNotificationSettings from a dictionary."""
@@ -1617,8 +2191,16 @@ class JobParameter:
     value: Optional[str] = None
     """The value used in the run"""
 
-    def as_dict(self) -> dict:
-        """Serializes the JobParameter into a dictionary suitable for use as a JSON request body."""
+    def as_dict(self) -> dict:
+        """Serializes the JobParameter into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.default is not None: body['default'] = self.default
+        if self.name is not None: body['name'] = self.name
+        if self.value is not None: body['value'] = self.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobParameter into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.default is not None: body['default'] = self.default
         if self.name is not None: body['name'] = self.name
@@ -1646,6 +2228,13 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobParameterDefinition into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.default is not None: body['default'] = self.default
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobParameterDefinition:
         """Deserializes the JobParameterDefinition from a dictionary."""
@@ -1669,6 +2258,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobPermission:
         """Deserializes the JobPermission from a dictionary."""
@@ -1703,6 +2300,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobPermissions:
         """Deserializes the JobPermissions from a dictionary."""
@@ -1725,6 +2330,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobPermissionsDescription:
         """Deserializes the JobPermissionsDescription from a dictionary."""
@@ -1747,6 +2359,13 @@ def as_dict(self) -> dict:
         if self.job_id is not None: body['job_id'] = self.job_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.job_id is not None: body['job_id'] = self.job_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobPermissionsRequest:
         """Deserializes the JobPermissionsRequest from a dictionary."""
@@ -1756,11 +2375,10 @@ def from_dict(cls, d: Dict[str, any]) -> JobPermissionsRequest:
 
 @dataclass
 class JobRunAs:
-    """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
-    as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
+    """Write-only setting. Specifies the user or service principal that the job runs as. If not
+    specified, the job runs as the user who created the job.
     
-    Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not,
-    an error is thrown."""
+    Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown."""
 
     service_principal_name: Optional[str] = None
     """Application ID of an active service principal. Setting this field requires the
@@ -1778,6 +2396,14 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobRunAs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobRunAs:
         """Deserializes the JobRunAs from a dictionary."""
@@ -1839,7 +2465,8 @@ class JobSettings:
     job_clusters: Optional[List[JobCluster]] = None
     """A list of job cluster specifications that can be shared and reused by tasks of this job.
     Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in
-    task settings."""
+    task settings. If more than 100 job clusters are available, you can paginate through them using
+    :method:jobs/get."""
 
     max_concurrent_runs: Optional[int] = None
     """An optional maximum allowed number of concurrent runs of the job. Set this value if you want to
@@ -1861,15 +2488,18 @@ class JobSettings:
     parameters: Optional[List[JobParameterDefinition]] = None
     """Job-level parameter definitions"""
 
+    performance_target: Optional[PerformanceTarget] = None
+    """PerformanceTarget defines how performant or cost efficient the execution of run on serverless
+    should be."""
+
     queue: Optional[QueueSettings] = None
     """The queue settings of the job."""
 
     run_as: Optional[JobRunAs] = None
-    """Write-only setting. Specifies the user, service principal or group that the job/pipeline runs
-    as. If not specified, the job/pipeline runs as the user who created the job/pipeline.
+    """Write-only setting. Specifies the user or service principal that the job runs as. If not
+    specified, the job runs as the user who created the job.
     
-    Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not,
-    an error is thrown."""
+    Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown."""
 
     schedule: Optional[CronSchedule] = None
     """An optional periodic schedule for this job. The default behavior is that the job only runs when
@@ -1881,7 +2511,9 @@ class JobSettings:
     be added to the job."""
 
     tasks: Optional[List[Task]] = None
-    """A list of task specifications to be executed by this job."""
+    """A list of task specifications to be executed by this job. If more than 100 tasks are available,
+    you can paginate through them using :method:jobs/get. Use the `next_page_token` field at the
+    object root to determine if more results are available."""
 
     timeout_seconds: Optional[int] = None
     """An optional timeout applied to each run of this job. A value of `0` means no timeout."""
@@ -1912,6 +2544,7 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         if self.notification_settings: body['notification_settings'] = self.notification_settings.as_dict()
         if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
+        if self.performance_target is not None: body['performance_target'] = self.performance_target.value
         if self.queue: body['queue'] = self.queue.as_dict()
         if self.run_as: body['run_as'] = self.run_as.as_dict()
         if self.schedule: body['schedule'] = self.schedule.as_dict()
@@ -1922,6 +2555,35 @@ def as_dict(self) -> dict:
         if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobSettings into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
+        if self.continuous: body['continuous'] = self.continuous
+        if self.deployment: body['deployment'] = self.deployment
+        if self.description is not None: body['description'] = self.description
+        if self.edit_mode is not None: body['edit_mode'] = self.edit_mode
+        if self.email_notifications: body['email_notifications'] = self.email_notifications
+        if self.environments: body['environments'] = self.environments
+        if self.format is not None: body['format'] = self.format
+        if self.git_source: body['git_source'] = self.git_source
+        if self.health: body['health'] = self.health
+        if self.job_clusters: body['job_clusters'] = self.job_clusters
+        if self.max_concurrent_runs is not None: body['max_concurrent_runs'] = self.max_concurrent_runs
+        if self.name is not None: body['name'] = self.name
+        if self.notification_settings: body['notification_settings'] = self.notification_settings
+        if self.parameters: body['parameters'] = self.parameters
+        if self.performance_target is not None: body['performance_target'] = self.performance_target
+        if self.queue: body['queue'] = self.queue
+        if self.run_as: body['run_as'] = self.run_as
+        if self.schedule: body['schedule'] = self.schedule
+        if self.tags: body['tags'] = self.tags
+        if self.tasks: body['tasks'] = self.tasks
+        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
+        if self.trigger: body['trigger'] = self.trigger
+        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobSettings:
         """Deserializes the JobSettings from a dictionary."""
@@ -1940,6 +2602,7 @@ def from_dict(cls, d: Dict[str, any]) -> JobSettings:
                    name=d.get('name', None),
                    notification_settings=_from_dict(d, 'notification_settings', JobNotificationSettings),
                    parameters=_repeated_dict(d, 'parameters', JobParameterDefinition),
+                   performance_target=_enum(d, 'performance_target', PerformanceTarget),
                    queue=_from_dict(d, 'queue', QueueSettings),
                    run_as=_from_dict(d, 'run_as', JobRunAs),
                    schedule=_from_dict(d, 'schedule', CronSchedule),
@@ -1979,6 +2642,15 @@ def as_dict(self) -> dict:
         if self.job_config_path is not None: body['job_config_path'] = self.job_config_path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobSource into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dirty_state is not None: body['dirty_state'] = self.dirty_state
+        if self.import_from_git_branch is not None:
+            body['import_from_git_branch'] = self.import_from_git_branch
+        if self.job_config_path is not None: body['job_config_path'] = self.job_config_path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobSource:
         """Deserializes the JobSource from a dictionary."""
@@ -2006,11 +2678,11 @@ class JobsHealthMetric(Enum):
     
     * `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`:
     An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric
-    is in Private Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag
-    across all streams. This metric is in Private Preview. * `STREAMING_BACKLOG_SECONDS`: An
-    estimate of the maximum consumer delay across all streams. This metric is in Private Preview. *
+    is in Public Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag
+    across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_SECONDS`: An estimate
+    of the maximum consumer delay across all streams. This metric is in Public Preview. *
     `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all
-    streams. This metric is in Private Preview."""
+    streams. This metric is in Public Preview."""
 
     RUN_DURATION_SECONDS = 'RUN_DURATION_SECONDS'
     STREAMING_BACKLOG_BYTES = 'STREAMING_BACKLOG_BYTES'
@@ -2032,11 +2704,11 @@ class JobsHealthRule:
     
     * `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`:
     An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric
-    is in Private Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag
-    across all streams. This metric is in Private Preview. * `STREAMING_BACKLOG_SECONDS`: An
-    estimate of the maximum consumer delay across all streams. This metric is in Private Preview. *
+    is in Public Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag
+    across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_SECONDS`: An estimate
+    of the maximum consumer delay across all streams. This metric is in Public Preview. *
     `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all
-    streams. This metric is in Private Preview."""
+    streams. This metric is in Public Preview."""
 
     op: JobsHealthOperator
     """Specifies the operator used to compare the health metric value with the specified threshold."""
@@ -2052,6 +2724,14 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobsHealthRule into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metric is not None: body['metric'] = self.metric
+        if self.op is not None: body['op'] = self.op
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobsHealthRule:
         """Deserializes the JobsHealthRule from a dictionary."""
@@ -2072,6 +2752,12 @@ def as_dict(self) -> dict:
         if self.rules: body['rules'] = [v.as_dict() for v in self.rules]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobsHealthRules into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.rules: body['rules'] = self.rules
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobsHealthRules:
         """Deserializes the JobsHealthRules from a dictionary."""
@@ -2099,6 +2785,14 @@ def as_dict(self) -> dict:
         if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListJobComplianceForPolicyResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.jobs: body['jobs'] = self.jobs
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListJobComplianceForPolicyResponse:
         """Deserializes the ListJobComplianceForPolicyResponse from a dictionary."""
@@ -2132,6 +2826,15 @@ def as_dict(self) -> dict:
         if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListJobsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.has_more is not None: body['has_more'] = self.has_more
+        if self.jobs: body['jobs'] = self.jobs
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListJobsResponse:
         """Deserializes the ListJobsResponse from a dictionary."""
@@ -2167,6 +2870,15 @@ def as_dict(self) -> dict:
         if self.runs: body['runs'] = [v.as_dict() for v in self.runs]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListRunsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.has_more is not None: body['has_more'] = self.has_more
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        if self.runs: body['runs'] = self.runs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListRunsResponse:
         """Deserializes the ListRunsResponse from a dictionary."""
@@ -2195,6 +2907,13 @@ def as_dict(self) -> dict:
         if self.truncated is not None: body['truncated'] = self.truncated
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NotebookOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.result is not None: body['result'] = self.result
+        if self.truncated is not None: body['truncated'] = self.truncated
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NotebookOutput:
         """Deserializes the NotebookOutput from a dictionary."""
@@ -2247,6 +2966,15 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NotebookTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.base_parameters: body['base_parameters'] = self.base_parameters
+        if self.notebook_path is not None: body['notebook_path'] = self.notebook_path
+        if self.source is not None: body['source'] = self.source
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NotebookTask:
         """Deserializes the NotebookTask from a dictionary."""
@@ -2256,12 +2984,57 @@ def from_dict(cls, d: Dict[str, any]) -> NotebookTask:
                    warehouse_id=d.get('warehouse_id', None))
 
 
+@dataclass
+class OutputSchemaInfo:
+    """Stores the catalog name, schema name, and the output schema expiration time for the clean room
+    run."""
+
+    catalog_name: Optional[str] = None
+
+    expiration_time: Optional[int] = None
+    """The expiration time for the output schema as a Unix timestamp in milliseconds."""
+
+    schema_name: Optional[str] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the OutputSchemaInfo into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the OutputSchemaInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> OutputSchemaInfo:
+        """Deserializes the OutputSchemaInfo from a dictionary."""
+        return cls(catalog_name=d.get('catalog_name', None),
+                   expiration_time=d.get('expiration_time', None),
+                   schema_name=d.get('schema_name', None))
+
+
 class PauseStatus(Enum):
 
     PAUSED = 'PAUSED'
     UNPAUSED = 'UNPAUSED'
 
 
+class PerformanceTarget(Enum):
+    """PerformanceTarget defines how performant (lower latency) or cost efficient the execution of run
+    on serverless compute should be. The performance mode on the job or pipeline should map to a
+    performance setting that is passed to Cluster Manager (see cluster-common PerformanceTarget)."""
+
+    COST_OPTIMIZED = 'COST_OPTIMIZED'
+    PERFORMANCE_OPTIMIZED = 'PERFORMANCE_OPTIMIZED'
+
+
 @dataclass
 class PeriodicTriggerConfiguration:
     interval: int
@@ -2277,6 +3050,13 @@ def as_dict(self) -> dict:
         if self.unit is not None: body['unit'] = self.unit.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PeriodicTriggerConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.interval is not None: body['interval'] = self.interval
+        if self.unit is not None: body['unit'] = self.unit
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PeriodicTriggerConfiguration:
         """Deserializes the PeriodicTriggerConfiguration from a dictionary."""
@@ -2302,6 +3082,12 @@ def as_dict(self) -> dict:
         if self.full_refresh is not None: body['full_refresh'] = self.full_refresh
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineParams into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.full_refresh is not None: body['full_refresh'] = self.full_refresh
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineParams:
         """Deserializes the PipelineParams from a dictionary."""
@@ -2323,6 +3109,13 @@ def as_dict(self) -> dict:
         if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.full_refresh is not None: body['full_refresh'] = self.full_refresh
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineTask:
         """Deserializes the PipelineTask from a dictionary."""
@@ -2355,6 +3148,15 @@ def as_dict(self) -> dict:
         if self.parameters: body['parameters'] = [v for v in self.parameters]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PythonWheelTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.entry_point is not None: body['entry_point'] = self.entry_point
+        if self.named_parameters: body['named_parameters'] = self.named_parameters
+        if self.package_name is not None: body['package_name'] = self.package_name
+        if self.parameters: body['parameters'] = self.parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PythonWheelTask:
         """Deserializes the PythonWheelTask from a dictionary."""
@@ -2384,6 +3186,13 @@ def as_dict(self) -> dict:
         if self.message is not None: body['message'] = self.message
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueueDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.code is not None: body['code'] = self.code
+        if self.message is not None: body['message'] = self.message
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueueDetails:
         """Deserializes the QueueDetails from a dictionary."""
@@ -2413,6 +3222,12 @@ def as_dict(self) -> dict:
         if self.enabled is not None: body['enabled'] = self.enabled
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueueSettings into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enabled is not None: body['enabled'] = self.enabled
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueueSettings:
         """Deserializes the QueueSettings from a dictionary."""
@@ -2454,6 +3269,18 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepairHistoryItem into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.end_time is not None: body['end_time'] = self.end_time
+        if self.id is not None: body['id'] = self.id
+        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.state: body['state'] = self.state
+        if self.status: body['status'] = self.status
+        if self.task_run_ids: body['task_run_ids'] = self.task_run_ids
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepairHistoryItem:
         """Deserializes the RepairHistoryItem from a dictionary."""
@@ -2489,8 +3316,9 @@ class RepairRun:
     be specified in conjunction with notebook_params. The JSON representation of this field (for
     example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
     
-    Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
-    information about job runs."""
+    Use [Task parameter variables] to set parameters containing information about job runs.
+    
+    [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
 
     job_parameters: Optional[Dict[str, str]] = None
     """Job-level parameters used in the run. for example `"param": "overriding_val"`"""
@@ -2589,6 +3417,26 @@ def as_dict(self) -> dict:
         if self.sql_params: body['sql_params'] = self.sql_params
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepairRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dbt_commands: body['dbt_commands'] = self.dbt_commands
+        if self.jar_params: body['jar_params'] = self.jar_params
+        if self.job_parameters: body['job_parameters'] = self.job_parameters
+        if self.latest_repair_id is not None: body['latest_repair_id'] = self.latest_repair_id
+        if self.notebook_params: body['notebook_params'] = self.notebook_params
+        if self.pipeline_params: body['pipeline_params'] = self.pipeline_params
+        if self.python_named_params: body['python_named_params'] = self.python_named_params
+        if self.python_params: body['python_params'] = self.python_params
+        if self.rerun_all_failed_tasks is not None:
+            body['rerun_all_failed_tasks'] = self.rerun_all_failed_tasks
+        if self.rerun_dependent_tasks is not None: body['rerun_dependent_tasks'] = self.rerun_dependent_tasks
+        if self.rerun_tasks: body['rerun_tasks'] = self.rerun_tasks
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params
+        if self.sql_params: body['sql_params'] = self.sql_params
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepairRun:
         """Deserializes the RepairRun from a dictionary."""
@@ -2622,6 +3470,12 @@ def as_dict(self) -> dict:
         if self.repair_id is not None: body['repair_id'] = self.repair_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepairRunResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.repair_id is not None: body['repair_id'] = self.repair_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepairRunResponse:
         """Deserializes the RepairRunResponse from a dictionary."""
@@ -2646,6 +3500,13 @@ def as_dict(self) -> dict:
         if self.new_settings: body['new_settings'] = self.new_settings.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResetJob into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.new_settings: body['new_settings'] = self.new_settings
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResetJob:
         """Deserializes the ResetJob from a dictionary."""
@@ -2660,6 +3521,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResetResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResetResponse:
         """Deserializes the ResetResponse from a dictionary."""
@@ -2679,6 +3545,13 @@ def as_dict(self) -> dict:
         if self.right is not None: body['right'] = self.right
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResolvedConditionTaskValues into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.left is not None: body['left'] = self.left
+        if self.right is not None: body['right'] = self.right
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedConditionTaskValues:
         """Deserializes the ResolvedConditionTaskValues from a dictionary."""
@@ -2695,6 +3568,12 @@ def as_dict(self) -> dict:
         if self.commands: body['commands'] = [v for v in self.commands]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResolvedDbtTaskValues into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.commands: body['commands'] = self.commands
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedDbtTaskValues:
         """Deserializes the ResolvedDbtTaskValues from a dictionary."""
@@ -2711,6 +3590,12 @@ def as_dict(self) -> dict:
         if self.base_parameters: body['base_parameters'] = self.base_parameters
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResolvedNotebookTaskValues into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.base_parameters: body['base_parameters'] = self.base_parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedNotebookTaskValues:
         """Deserializes the ResolvedNotebookTaskValues from a dictionary."""
@@ -2727,6 +3612,12 @@ def as_dict(self) -> dict:
         if self.parameters: body['parameters'] = self.parameters
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResolvedParamPairValues into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.parameters: body['parameters'] = self.parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedParamPairValues:
         """Deserializes the ResolvedParamPairValues from a dictionary."""
@@ -2746,6 +3637,13 @@ def as_dict(self) -> dict:
         if self.parameters: body['parameters'] = [v for v in self.parameters]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResolvedPythonWheelTaskValues into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.named_parameters: body['named_parameters'] = self.named_parameters
+        if self.parameters: body['parameters'] = self.parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedPythonWheelTaskValues:
         """Deserializes the ResolvedPythonWheelTaskValues from a dictionary."""
@@ -2765,6 +3663,13 @@ def as_dict(self) -> dict:
         if self.parameters: body['parameters'] = self.parameters
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResolvedRunJobTaskValues into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.job_parameters: body['job_parameters'] = self.job_parameters
+        if self.parameters: body['parameters'] = self.parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedRunJobTaskValues:
         """Deserializes the ResolvedRunJobTaskValues from a dictionary."""
@@ -2781,6 +3686,12 @@ def as_dict(self) -> dict:
         if self.parameters: body['parameters'] = [v for v in self.parameters]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResolvedStringParamsValues into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.parameters: body['parameters'] = self.parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedStringParamsValues:
         """Deserializes the ResolvedStringParamsValues from a dictionary."""
@@ -2824,6 +3735,21 @@ def as_dict(self) -> dict:
         if self.sql_task: body['sql_task'] = self.sql_task.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResolvedValues into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.condition_task: body['condition_task'] = self.condition_task
+        if self.dbt_task: body['dbt_task'] = self.dbt_task
+        if self.notebook_task: body['notebook_task'] = self.notebook_task
+        if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task
+        if self.run_job_task: body['run_job_task'] = self.run_job_task
+        if self.simulation_task: body['simulation_task'] = self.simulation_task
+        if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task
+        if self.spark_python_task: body['spark_python_task'] = self.spark_python_task
+        if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task
+        if self.sql_task: body['sql_task'] = self.sql_task
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResolvedValues:
         """Deserializes the ResolvedValues from a dictionary."""
@@ -2870,6 +3796,12 @@ class Run:
     description: Optional[str] = None
     """Description of the run"""
 
+    effective_performance_target: Optional[PerformanceTarget] = None
+    """effective_performance_target is the actual performance target used by the run during execution.
+    effective_performance_target can differ from performance_target depending on if the job was
+    eligible to be cost-optimized (e.g. contains at least 1 serverless task) or if we specifically
+    override the value for the run (ex. RunNow)."""
+
     end_time: Optional[int] = None
     """The time at which this run ended in epoch milliseconds (milliseconds since 1/1/1970 UTC). This
     field is set to 0 if the job is still running."""
@@ -2891,13 +3823,19 @@ class Run:
     Note: dbt and SQL File tasks support only version-controlled sources. If dbt or SQL File tasks
     are used, `git_source` must be defined on the job."""
 
+    has_more: Optional[bool] = None
+    """Indicates if the run has more sub-resources (`tasks`, `job_clusters`) that are not shown. They
+    can be accessed via :method:jobs/getrun endpoint. It is only relevant for API 2.2
+    :method:jobs/listruns requests with `expand_tasks=true`."""
+
     iterations: Optional[List[RunTask]] = None
     """Only populated by for-each iterations. The parent for-each task is located in tasks array."""
 
     job_clusters: Optional[List[JobCluster]] = None
     """A list of job cluster specifications that can be shared and reused by tasks of this job.
     Libraries cannot be declared in a shared job cluster. You must declare dependent libraries in
-    task settings."""
+    task settings. If more than 100 job clusters are available, you can paginate through them using
+    :method:jobs/getrun."""
 
     job_id: Optional[int] = None
     """The canonical identifier of the job that contains this run."""
@@ -2974,7 +3912,9 @@ class Run:
 
     tasks: Optional[List[RunTask]] = None
     """The list of tasks performed by the run. Each task has its own `run_id` which you can use to call
-    `JobsGetOutput` to retrieve the run resutls."""
+    `JobsGetOutput` to retrieve the run resutls. If more than 100 tasks are available, you can
+    paginate through them using :method:jobs/getrun. Use the `next_page_token` field at the object
+    root to determine if more results are available."""
 
     trigger: Optional[TriggerType] = None
     """The type of trigger that fired this run.
@@ -2985,7 +3925,8 @@ class Run:
     previously failed run. This occurs when you request to re-run the job in case of failures. *
     `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`:
     Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is
-    triggered by a table update."""
+    triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to manually
+    restart a continuous job run."""
 
     trigger_info: Optional[TriggerInfo] = None
     """Additional details about what triggered the run"""
@@ -2999,9 +3940,12 @@ def as_dict(self) -> dict:
         if self.cluster_spec: body['cluster_spec'] = self.cluster_spec.as_dict()
         if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
         if self.description is not None: body['description'] = self.description
+        if self.effective_performance_target is not None:
+            body['effective_performance_target'] = self.effective_performance_target.value
         if self.end_time is not None: body['end_time'] = self.end_time
         if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
         if self.git_source: body['git_source'] = self.git_source.as_dict()
+        if self.has_more is not None: body['has_more'] = self.has_more
         if self.iterations: body['iterations'] = [v.as_dict() for v in self.iterations]
         if self.job_clusters: body['job_clusters'] = [v.as_dict() for v in self.job_clusters]
         if self.job_id is not None: body['job_id'] = self.job_id
@@ -3030,6 +3974,48 @@ def as_dict(self) -> dict:
         if self.trigger_info: body['trigger_info'] = self.trigger_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Run into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.attempt_number is not None: body['attempt_number'] = self.attempt_number
+        if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration
+        if self.cluster_instance: body['cluster_instance'] = self.cluster_instance
+        if self.cluster_spec: body['cluster_spec'] = self.cluster_spec
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.description is not None: body['description'] = self.description
+        if self.effective_performance_target is not None:
+            body['effective_performance_target'] = self.effective_performance_target
+        if self.end_time is not None: body['end_time'] = self.end_time
+        if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
+        if self.git_source: body['git_source'] = self.git_source
+        if self.has_more is not None: body['has_more'] = self.has_more
+        if self.iterations: body['iterations'] = self.iterations
+        if self.job_clusters: body['job_clusters'] = self.job_clusters
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.job_parameters: body['job_parameters'] = self.job_parameters
+        if self.job_run_id is not None: body['job_run_id'] = self.job_run_id
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.number_in_job is not None: body['number_in_job'] = self.number_in_job
+        if self.original_attempt_run_id is not None:
+            body['original_attempt_run_id'] = self.original_attempt_run_id
+        if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters
+        if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
+        if self.repair_history: body['repair_history'] = self.repair_history
+        if self.run_duration is not None: body['run_duration'] = self.run_duration
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_name is not None: body['run_name'] = self.run_name
+        if self.run_page_url is not None: body['run_page_url'] = self.run_page_url
+        if self.run_type is not None: body['run_type'] = self.run_type
+        if self.schedule: body['schedule'] = self.schedule
+        if self.setup_duration is not None: body['setup_duration'] = self.setup_duration
+        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.state: body['state'] = self.state
+        if self.status: body['status'] = self.status
+        if self.tasks: body['tasks'] = self.tasks
+        if self.trigger is not None: body['trigger'] = self.trigger
+        if self.trigger_info: body['trigger_info'] = self.trigger_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Run:
         """Deserializes the Run from a dictionary."""
@@ -3039,9 +4025,11 @@ def from_dict(cls, d: Dict[str, any]) -> Run:
                    cluster_spec=_from_dict(d, 'cluster_spec', ClusterSpec),
                    creator_user_name=d.get('creator_user_name', None),
                    description=d.get('description', None),
+                   effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget),
                    end_time=d.get('end_time', None),
                    execution_duration=d.get('execution_duration', None),
                    git_source=_from_dict(d, 'git_source', GitSource),
+                   has_more=d.get('has_more', None),
                    iterations=_repeated_dict(d, 'iterations', RunTask),
                    job_clusters=_repeated_dict(d, 'job_clusters', JobCluster),
                    job_id=d.get('job_id', None),
@@ -3103,6 +4091,15 @@ def as_dict(self) -> dict:
         if self.right is not None: body['right'] = self.right
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunConditionTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.left is not None: body['left'] = self.left
+        if self.op is not None: body['op'] = self.op
+        if self.outcome is not None: body['outcome'] = self.outcome
+        if self.right is not None: body['right'] = self.right
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunConditionTask:
         """Deserializes the RunConditionTask from a dictionary."""
@@ -3137,6 +4134,15 @@ def as_dict(self) -> dict:
         if self.task: body['task'] = self.task.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunForEachTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.concurrency is not None: body['concurrency'] = self.concurrency
+        if self.inputs is not None: body['inputs'] = self.inputs
+        if self.stats: body['stats'] = self.stats
+        if self.task: body['task'] = self.task
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunForEachTask:
         """Deserializes the RunForEachTask from a dictionary."""
@@ -3175,6 +4181,12 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunJobOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunJobOutput:
         """Deserializes the RunJobOutput from a dictionary."""
@@ -3197,8 +4209,9 @@ class RunJobTask:
     be specified in conjunction with notebook_params. The JSON representation of this field (for
     example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
     
-    Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
-    information about job runs."""
+    Use [Task parameter variables] to set parameters containing information about job runs.
+    
+    [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
 
     job_parameters: Optional[Dict[str, str]] = None
     """Job-level parameters used to trigger the job."""
@@ -3277,6 +4290,21 @@ def as_dict(self) -> dict:
         if self.sql_params: body['sql_params'] = self.sql_params
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunJobTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dbt_commands: body['dbt_commands'] = self.dbt_commands
+        if self.jar_params: body['jar_params'] = self.jar_params
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.job_parameters: body['job_parameters'] = self.job_parameters
+        if self.notebook_params: body['notebook_params'] = self.notebook_params
+        if self.pipeline_params: body['pipeline_params'] = self.pipeline_params
+        if self.python_named_params: body['python_named_params'] = self.python_named_params
+        if self.python_params: body['python_params'] = self.python_params
+        if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params
+        if self.sql_params: body['sql_params'] = self.sql_params
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunJobTask:
         """Deserializes the RunJobTask from a dictionary."""
@@ -3357,8 +4385,9 @@ class RunNow:
     be specified in conjunction with notebook_params. The JSON representation of this field (for
     example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
     
-    Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
-    information about job runs."""
+    Use [Task parameter variables] to set parameters containing information about job runs.
+    
+    [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
 
     job_parameters: Optional[Dict[str, str]] = None
     """Job-level parameters used in the run. for example `"param": "overriding_val"`"""
@@ -3380,6 +4409,15 @@ class RunNow:
     [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
     [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html"""
 
+    only: Optional[List[str]] = None
+    """A list of task keys to run inside of the job. If this field is not provided, all tasks in the
+    job will be run."""
+
+    performance_target: Optional[PerformanceTarget] = None
+    """PerformanceTarget defines how performant or cost efficient the execution of run on serverless
+    compute should be. For RunNow request, the run will execute with this settings instead of ones
+    defined in job."""
+
     pipeline_params: Optional[PipelineParams] = None
     """Controls whether the pipeline should perform a full refresh"""
 
@@ -3434,6 +4472,8 @@ def as_dict(self) -> dict:
         if self.job_id is not None: body['job_id'] = self.job_id
         if self.job_parameters: body['job_parameters'] = self.job_parameters
         if self.notebook_params: body['notebook_params'] = self.notebook_params
+        if self.only: body['only'] = [v for v in self.only]
+        if self.performance_target is not None: body['performance_target'] = self.performance_target.value
         if self.pipeline_params: body['pipeline_params'] = self.pipeline_params.as_dict()
         if self.python_named_params: body['python_named_params'] = self.python_named_params
         if self.python_params: body['python_params'] = [v for v in self.python_params]
@@ -3442,6 +4482,25 @@ def as_dict(self) -> dict:
         if self.sql_params: body['sql_params'] = self.sql_params
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunNow into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dbt_commands: body['dbt_commands'] = self.dbt_commands
+        if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token
+        if self.jar_params: body['jar_params'] = self.jar_params
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.job_parameters: body['job_parameters'] = self.job_parameters
+        if self.notebook_params: body['notebook_params'] = self.notebook_params
+        if self.only: body['only'] = self.only
+        if self.performance_target is not None: body['performance_target'] = self.performance_target
+        if self.pipeline_params: body['pipeline_params'] = self.pipeline_params
+        if self.python_named_params: body['python_named_params'] = self.python_named_params
+        if self.python_params: body['python_params'] = self.python_params
+        if self.queue: body['queue'] = self.queue
+        if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params
+        if self.sql_params: body['sql_params'] = self.sql_params
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunNow:
         """Deserializes the RunNow from a dictionary."""
@@ -3451,6 +4510,8 @@ def from_dict(cls, d: Dict[str, any]) -> RunNow:
                    job_id=d.get('job_id', None),
                    job_parameters=d.get('job_parameters', None),
                    notebook_params=d.get('notebook_params', None),
+                   only=d.get('only', None),
+                   performance_target=_enum(d, 'performance_target', PerformanceTarget),
                    pipeline_params=_from_dict(d, 'pipeline_params', PipelineParams),
                    python_named_params=d.get('python_named_params', None),
                    python_params=d.get('python_params', None),
@@ -3476,6 +4537,13 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunNowResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.number_in_job is not None: body['number_in_job'] = self.number_in_job
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunNowResponse:
         """Deserializes the RunNowResponse from a dictionary."""
@@ -3486,6 +4554,9 @@ def from_dict(cls, d: Dict[str, any]) -> RunNowResponse:
 class RunOutput:
     """Run output was retrieved successfully."""
 
+    clean_rooms_notebook_output: Optional[CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput] = None
+    """The output of a clean rooms notebook task, if available"""
+
     dbt_output: Optional[DbtOutput] = None
     """The output of a dbt task, if available."""
 
@@ -3530,6 +4601,8 @@ class RunOutput:
     def as_dict(self) -> dict:
         """Serializes the RunOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.clean_rooms_notebook_output:
+            body['clean_rooms_notebook_output'] = self.clean_rooms_notebook_output.as_dict()
         if self.dbt_output: body['dbt_output'] = self.dbt_output.as_dict()
         if self.error is not None: body['error'] = self.error
         if self.error_trace is not None: body['error_trace'] = self.error_trace
@@ -3542,10 +4615,29 @@ def as_dict(self) -> dict:
         if self.sql_output: body['sql_output'] = self.sql_output.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.clean_rooms_notebook_output:
+            body['clean_rooms_notebook_output'] = self.clean_rooms_notebook_output
+        if self.dbt_output: body['dbt_output'] = self.dbt_output
+        if self.error is not None: body['error'] = self.error
+        if self.error_trace is not None: body['error_trace'] = self.error_trace
+        if self.info is not None: body['info'] = self.info
+        if self.logs is not None: body['logs'] = self.logs
+        if self.logs_truncated is not None: body['logs_truncated'] = self.logs_truncated
+        if self.metadata: body['metadata'] = self.metadata
+        if self.notebook_output: body['notebook_output'] = self.notebook_output
+        if self.run_job_output: body['run_job_output'] = self.run_job_output
+        if self.sql_output: body['sql_output'] = self.sql_output
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunOutput:
         """Deserializes the RunOutput from a dictionary."""
-        return cls(dbt_output=_from_dict(d, 'dbt_output', DbtOutput),
+        return cls(clean_rooms_notebook_output=_from_dict(d, 'clean_rooms_notebook_output',
+                                                          CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput),
+                   dbt_output=_from_dict(d, 'dbt_output', DbtOutput),
                    error=d.get('error', None),
                    error_trace=d.get('error_trace', None),
                    info=d.get('info', None),
@@ -3570,8 +4662,9 @@ class RunParameters:
     be specified in conjunction with notebook_params. The JSON representation of this field (for
     example `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
     
-    Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
-    information about job runs."""
+    Use [Task parameter variables] to set parameters containing information about job runs.
+    
+    [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
 
     notebook_params: Optional[Dict[str, str]] = None
     """A map from keys to values for jobs with notebook task, for example `"notebook_params": {"name":
@@ -3645,6 +4738,19 @@ def as_dict(self) -> dict:
         if self.sql_params: body['sql_params'] = self.sql_params
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunParameters into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dbt_commands: body['dbt_commands'] = self.dbt_commands
+        if self.jar_params: body['jar_params'] = self.jar_params
+        if self.notebook_params: body['notebook_params'] = self.notebook_params
+        if self.pipeline_params: body['pipeline_params'] = self.pipeline_params
+        if self.python_named_params: body['python_named_params'] = self.python_named_params
+        if self.python_params: body['python_params'] = self.python_params
+        if self.spark_submit_params: body['spark_submit_params'] = self.spark_submit_params
+        if self.sql_params: body['sql_params'] = self.sql_params
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunParameters:
         """Deserializes the RunParameters from a dictionary."""
@@ -3714,6 +4820,17 @@ def as_dict(self) -> dict:
             body['user_cancelled_or_timedout'] = self.user_cancelled_or_timedout
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunState into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.life_cycle_state is not None: body['life_cycle_state'] = self.life_cycle_state
+        if self.queue_reason is not None: body['queue_reason'] = self.queue_reason
+        if self.result_state is not None: body['result_state'] = self.result_state
+        if self.state_message is not None: body['state_message'] = self.state_message
+        if self.user_cancelled_or_timedout is not None:
+            body['user_cancelled_or_timedout'] = self.user_cancelled_or_timedout
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunState:
         """Deserializes the RunState from a dictionary."""
@@ -3746,6 +4863,14 @@ def as_dict(self) -> dict:
         if self.termination_details: body['termination_details'] = self.termination_details.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.queue_details: body['queue_details'] = self.queue_details
+        if self.state is not None: body['state'] = self.state
+        if self.termination_details: body['termination_details'] = self.termination_details
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunStatus:
         """Deserializes the RunStatus from a dictionary."""
@@ -3771,6 +4896,11 @@ class RunTask:
     original attempt’s ID and an incrementing `attempt_number`. Runs are retried only until they
     succeed, and the maximum `attempt_number` is the same as the `max_retries` value for the job."""
 
+    clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None
+    """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present.
+    
+    [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html"""
+
     cleanup_duration: Optional[int] = None
     """The time in milliseconds it took to terminate the cluster and clean up any associated artifacts.
     The duration of a task run is the sum of the `setup_duration`, `execution_duration`, and the
@@ -3782,13 +4912,13 @@ class RunTask:
     once the Jobs service has requested a cluster for the run."""
 
     condition_task: Optional[RunConditionTask] = None
-    """If condition_task, specifies a condition with an outcome that can be used to control the
-    execution of other tasks. Does not require a cluster to execute and does not support retries or
-    notifications."""
+    """The task evaluates a condition that can be used to control the execution of other tasks when the
+    `condition_task` field is present. The condition task does not require a cluster to execute and
+    does not support retries or notifications."""
 
     dbt_task: Optional[DbtTask] = None
-    """If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and
-    the ability to use a serverless or a pro SQL warehouse."""
+    """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task
+    requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse."""
 
     depends_on: Optional[List[TaskDependency]] = None
     """An optional array of objects specifying the dependency graph of the task. All tasks specified in
@@ -3798,6 +4928,16 @@ class RunTask:
     description: Optional[str] = None
     """An optional description for this task."""
 
+    disabled: Optional[bool] = None
+    """Denotes whether or not the task was disabled by the user. Disabled tasks do not execute and are
+    immediately skipped as soon as they are unblocked."""
+
+    effective_performance_target: Optional[PerformanceTarget] = None
+    """effective_performance_target is the actual performance target used by the run during execution.
+    effective_performance_target can differ from performance_target depending on if the job was
+    eligible to be cost-optimized (e.g. contains at least 1 serverless task) or if an override was
+    provided for the run (ex. RunNow)."""
+
     email_notifications: Optional[JobEmailNotifications] = None
     """An optional set of email addresses notified when the task run begins or completes. The default
     behavior is to not send any emails."""
@@ -3823,7 +4963,8 @@ class RunTask:
     responding. We suggest running jobs and tasks on new clusters for greater reliability"""
 
     for_each_task: Optional[RunForEachTask] = None
-    """If for_each_task, indicates that this task must execute the nested task within it."""
+    """The task executes a nested task for every input provided when the `for_each_task` field is
+    present."""
 
     git_source: Optional[GitSource] = None
     """An optional specification for a remote Git repository containing the source code used by tasks.
@@ -3845,18 +4986,18 @@ class RunTask:
     """If new_cluster, a description of a new cluster that is created for each run."""
 
     notebook_task: Optional[NotebookTask] = None
-    """If notebook_task, indicates that this task must run a notebook. This field may not be specified
-    in conjunction with spark_jar_task."""
+    """The task runs a notebook when the `notebook_task` field is present."""
 
     notification_settings: Optional[TaskNotificationSettings] = None
     """Optional notification settings that are used when sending notifications to each of the
     `email_notifications` and `webhook_notifications` for this task run."""
 
     pipeline_task: Optional[PipelineTask] = None
-    """If pipeline_task, indicates that this task must execute a Pipeline."""
+    """The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines
+    configured to use triggered more are supported."""
 
     python_wheel_task: Optional[PythonWheelTask] = None
-    """If python_wheel_task, indicates that this job must execute a PythonWheel."""
+    """The task runs a Python wheel when the `python_wheel_task` field is present."""
 
     queue_duration: Optional[int] = None
     """The time in milliseconds that the run has spent in the queue."""
@@ -3876,7 +5017,7 @@ class RunTask:
     :method:jobs/create for a list of possible values."""
 
     run_job_task: Optional[RunJobTask] = None
-    """If run_job_task, indicates that this task must execute another job."""
+    """The task triggers another job when the `run_job_task` field is present."""
 
     run_page_url: Optional[str] = None
 
@@ -3888,14 +5029,14 @@ class RunTask:
     duration of a multitask job run is the value of the `run_duration` field."""
 
     spark_jar_task: Optional[SparkJarTask] = None
-    """If spark_jar_task, indicates that this task must run a JAR."""
+    """The task runs a JAR when the `spark_jar_task` field is present."""
 
     spark_python_task: Optional[SparkPythonTask] = None
-    """If spark_python_task, indicates that this task must run a Python file."""
+    """The task runs a Python file when the `spark_python_task` field is present."""
 
     spark_submit_task: Optional[SparkSubmitTask] = None
-    """If `spark_submit_task`, indicates that this task must be launched by the spark submit script.
-    This task can run only on new clusters.
+    """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
+    This task can run only on new clusters and is not compatible with serverless compute.
     
     In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
     `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
@@ -3911,7 +5052,8 @@ class RunTask:
     The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
 
     sql_task: Optional[SqlTask] = None
-    """If sql_task, indicates that this job must execute a SQL task."""
+    """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
+    the `sql_task` field is present."""
 
     start_time: Optional[int] = None
     """The time at which this run was started in epoch milliseconds (milliseconds since 1/1/1970 UTC).
@@ -3936,12 +5078,17 @@ def as_dict(self) -> dict:
         """Serializes the RunTask into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.attempt_number is not None: body['attempt_number'] = self.attempt_number
+        if self.clean_rooms_notebook_task:
+            body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task.as_dict()
         if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration
         if self.cluster_instance: body['cluster_instance'] = self.cluster_instance.as_dict()
         if self.condition_task: body['condition_task'] = self.condition_task.as_dict()
         if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict()
         if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on]
         if self.description is not None: body['description'] = self.description
+        if self.disabled is not None: body['disabled'] = self.disabled
+        if self.effective_performance_target is not None:
+            body['effective_performance_target'] = self.effective_performance_target.value
         if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
         if self.end_time is not None: body['end_time'] = self.end_time
         if self.environment_key is not None: body['environment_key'] = self.environment_key
@@ -3976,16 +5123,68 @@ def as_dict(self) -> dict:
         if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.attempt_number is not None: body['attempt_number'] = self.attempt_number
+        if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task
+        if self.cleanup_duration is not None: body['cleanup_duration'] = self.cleanup_duration
+        if self.cluster_instance: body['cluster_instance'] = self.cluster_instance
+        if self.condition_task: body['condition_task'] = self.condition_task
+        if self.dbt_task: body['dbt_task'] = self.dbt_task
+        if self.depends_on: body['depends_on'] = self.depends_on
+        if self.description is not None: body['description'] = self.description
+        if self.disabled is not None: body['disabled'] = self.disabled
+        if self.effective_performance_target is not None:
+            body['effective_performance_target'] = self.effective_performance_target
+        if self.email_notifications: body['email_notifications'] = self.email_notifications
+        if self.end_time is not None: body['end_time'] = self.end_time
+        if self.environment_key is not None: body['environment_key'] = self.environment_key
+        if self.execution_duration is not None: body['execution_duration'] = self.execution_duration
+        if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
+        if self.for_each_task: body['for_each_task'] = self.for_each_task
+        if self.git_source: body['git_source'] = self.git_source
+        if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key
+        if self.libraries: body['libraries'] = self.libraries
+        if self.new_cluster: body['new_cluster'] = self.new_cluster
+        if self.notebook_task: body['notebook_task'] = self.notebook_task
+        if self.notification_settings: body['notification_settings'] = self.notification_settings
+        if self.pipeline_task: body['pipeline_task'] = self.pipeline_task
+        if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task
+        if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
+        if self.resolved_values: body['resolved_values'] = self.resolved_values
+        if self.run_duration is not None: body['run_duration'] = self.run_duration
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_if is not None: body['run_if'] = self.run_if
+        if self.run_job_task: body['run_job_task'] = self.run_job_task
+        if self.run_page_url is not None: body['run_page_url'] = self.run_page_url
+        if self.setup_duration is not None: body['setup_duration'] = self.setup_duration
+        if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task
+        if self.spark_python_task: body['spark_python_task'] = self.spark_python_task
+        if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task
+        if self.sql_task: body['sql_task'] = self.sql_task
+        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.state: body['state'] = self.state
+        if self.status: body['status'] = self.status
+        if self.task_key is not None: body['task_key'] = self.task_key
+        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
+        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunTask:
         """Deserializes the RunTask from a dictionary."""
         return cls(attempt_number=d.get('attempt_number', None),
+                   clean_rooms_notebook_task=_from_dict(d, 'clean_rooms_notebook_task',
+                                                        CleanRoomsNotebookTask),
                    cleanup_duration=d.get('cleanup_duration', None),
                    cluster_instance=_from_dict(d, 'cluster_instance', ClusterInstance),
                    condition_task=_from_dict(d, 'condition_task', RunConditionTask),
                    dbt_task=_from_dict(d, 'dbt_task', DbtTask),
                    depends_on=_repeated_dict(d, 'depends_on', TaskDependency),
                    description=d.get('description', None),
+                   disabled=d.get('disabled', None),
+                   effective_performance_target=_enum(d, 'effective_performance_target', PerformanceTarget),
                    email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
                    end_time=d.get('end_time', None),
                    environment_key=d.get('environment_key', None),
@@ -4065,12 +5264,25 @@ class SparkJarTask:
     
     [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables"""
 
+    run_as_repl: Optional[bool] = None
+    """Deprecated. A value of `false` is no longer supported."""
+
     def as_dict(self) -> dict:
         """Serializes the SparkJarTask into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.jar_uri is not None: body['jar_uri'] = self.jar_uri
         if self.main_class_name is not None: body['main_class_name'] = self.main_class_name
         if self.parameters: body['parameters'] = [v for v in self.parameters]
+        if self.run_as_repl is not None: body['run_as_repl'] = self.run_as_repl
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SparkJarTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.jar_uri is not None: body['jar_uri'] = self.jar_uri
+        if self.main_class_name is not None: body['main_class_name'] = self.main_class_name
+        if self.parameters: body['parameters'] = self.parameters
+        if self.run_as_repl is not None: body['run_as_repl'] = self.run_as_repl
         return body
 
     @classmethod
@@ -4078,7 +5290,8 @@ def from_dict(cls, d: Dict[str, any]) -> SparkJarTask:
         """Deserializes the SparkJarTask from a dictionary."""
         return cls(jar_uri=d.get('jar_uri', None),
                    main_class_name=d.get('main_class_name', None),
-                   parameters=d.get('parameters', None))
+                   parameters=d.get('parameters', None),
+                   run_as_repl=d.get('run_as_repl', None))
 
 
 @dataclass
@@ -4113,6 +5326,14 @@ def as_dict(self) -> dict:
         if self.source is not None: body['source'] = self.source.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SparkPythonTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.parameters: body['parameters'] = self.parameters
+        if self.python_file is not None: body['python_file'] = self.python_file
+        if self.source is not None: body['source'] = self.source
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparkPythonTask:
         """Deserializes the SparkPythonTask from a dictionary."""
@@ -4136,6 +5357,12 @@ def as_dict(self) -> dict:
         if self.parameters: body['parameters'] = [v for v in self.parameters]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SparkSubmitTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.parameters: body['parameters'] = self.parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparkSubmitTask:
         """Deserializes the SparkSubmitTask from a dictionary."""
@@ -4173,6 +5400,16 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlAlertOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alert_state is not None: body['alert_state'] = self.alert_state
+        if self.output_link is not None: body['output_link'] = self.output_link
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.sql_statements: body['sql_statements'] = self.sql_statements
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlAlertOutput:
         """Deserializes the SqlAlertOutput from a dictionary."""
@@ -4209,6 +5446,13 @@ def as_dict(self) -> dict:
         if self.widgets: body['widgets'] = [v.as_dict() for v in self.widgets]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlDashboardOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.widgets: body['widgets'] = self.widgets
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlDashboardOutput:
         """Deserializes the SqlDashboardOutput from a dictionary."""
@@ -4251,6 +5495,18 @@ def as_dict(self) -> dict:
         if self.widget_title is not None: body['widget_title'] = self.widget_title
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlDashboardWidgetOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.end_time is not None: body['end_time'] = self.end_time
+        if self.error: body['error'] = self.error
+        if self.output_link is not None: body['output_link'] = self.output_link
+        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.status is not None: body['status'] = self.status
+        if self.widget_id is not None: body['widget_id'] = self.widget_id
+        if self.widget_title is not None: body['widget_title'] = self.widget_title
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlDashboardWidgetOutput:
         """Deserializes the SqlDashboardWidgetOutput from a dictionary."""
@@ -4291,6 +5547,14 @@ def as_dict(self) -> dict:
         if self.query_output: body['query_output'] = self.query_output.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alert_output: body['alert_output'] = self.alert_output
+        if self.dashboard_output: body['dashboard_output'] = self.dashboard_output
+        if self.query_output: body['query_output'] = self.query_output
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlOutput:
         """Deserializes the SqlOutput from a dictionary."""
@@ -4310,6 +5574,12 @@ def as_dict(self) -> dict:
         if self.message is not None: body['message'] = self.message
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlOutputError into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlOutputError:
         """Deserializes the SqlOutputError from a dictionary."""
@@ -4342,6 +5612,16 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlQueryOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id
+        if self.output_link is not None: body['output_link'] = self.output_link
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.sql_statements: body['sql_statements'] = self.sql_statements
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlQueryOutput:
         """Deserializes the SqlQueryOutput from a dictionary."""
@@ -4363,6 +5643,12 @@ def as_dict(self) -> dict:
         if self.lookup_key is not None: body['lookup_key'] = self.lookup_key
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlStatementOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.lookup_key is not None: body['lookup_key'] = self.lookup_key
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlStatementOutput:
         """Deserializes the SqlStatementOutput from a dictionary."""
@@ -4403,6 +5689,17 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alert: body['alert'] = self.alert
+        if self.dashboard: body['dashboard'] = self.dashboard
+        if self.file: body['file'] = self.file
+        if self.parameters: body['parameters'] = self.parameters
+        if self.query: body['query'] = self.query
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlTask:
         """Deserializes the SqlTask from a dictionary."""
@@ -4433,6 +5730,14 @@ def as_dict(self) -> dict:
         if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlTaskAlert into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alert_id is not None: body['alert_id'] = self.alert_id
+        if self.pause_subscriptions is not None: body['pause_subscriptions'] = self.pause_subscriptions
+        if self.subscriptions: body['subscriptions'] = self.subscriptions
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlTaskAlert:
         """Deserializes the SqlTaskAlert from a dictionary."""
@@ -4464,6 +5769,15 @@ def as_dict(self) -> dict:
         if self.subscriptions: body['subscriptions'] = [v.as_dict() for v in self.subscriptions]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlTaskDashboard into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.pause_subscriptions is not None: body['pause_subscriptions'] = self.pause_subscriptions
+        if self.subscriptions: body['subscriptions'] = self.subscriptions
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlTaskDashboard:
         """Deserializes the SqlTaskDashboard from a dictionary."""
@@ -4495,6 +5809,13 @@ def as_dict(self) -> dict:
         if self.source is not None: body['source'] = self.source.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlTaskFile into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.path is not None: body['path'] = self.path
+        if self.source is not None: body['source'] = self.source
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlTaskFile:
         """Deserializes the SqlTaskFile from a dictionary."""
@@ -4512,6 +5833,12 @@ def as_dict(self) -> dict:
         if self.query_id is not None: body['query_id'] = self.query_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlTaskQuery into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.query_id is not None: body['query_id'] = self.query_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlTaskQuery:
         """Deserializes the SqlTaskQuery from a dictionary."""
@@ -4536,6 +5863,13 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SqlTaskSubscription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination_id is not None: body['destination_id'] = self.destination_id
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SqlTaskSubscription:
         """Deserializes the SqlTaskSubscription from a dictionary."""
@@ -4626,6 +5960,25 @@ def as_dict(self) -> dict:
         if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SubmitRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
+        if self.email_notifications: body['email_notifications'] = self.email_notifications
+        if self.environments: body['environments'] = self.environments
+        if self.git_source: body['git_source'] = self.git_source
+        if self.health: body['health'] = self.health
+        if self.idempotency_token is not None: body['idempotency_token'] = self.idempotency_token
+        if self.notification_settings: body['notification_settings'] = self.notification_settings
+        if self.queue: body['queue'] = self.queue
+        if self.run_as: body['run_as'] = self.run_as
+        if self.run_name is not None: body['run_name'] = self.run_name
+        if self.tasks: body['tasks'] = self.tasks
+        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
+        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SubmitRun:
         """Deserializes the SubmitRun from a dictionary."""
@@ -4658,6 +6011,12 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SubmitRunResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SubmitRunResponse:
         """Deserializes the SubmitRunResponse from a dictionary."""
@@ -4671,14 +6030,19 @@ class SubmitTask:
     field is required and must be unique within its parent job. On Update or Reset, this field is
     used to reference the tasks to be updated or reset."""
 
+    clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None
+    """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present.
+    
+    [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html"""
+
     condition_task: Optional[ConditionTask] = None
-    """If condition_task, specifies a condition with an outcome that can be used to control the
-    execution of other tasks. Does not require a cluster to execute and does not support retries or
-    notifications."""
+    """The task evaluates a condition that can be used to control the execution of other tasks when the
+    `condition_task` field is present. The condition task does not require a cluster to execute and
+    does not support retries or notifications."""
 
     dbt_task: Optional[DbtTask] = None
-    """If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and
-    the ability to use a serverless or a pro SQL warehouse."""
+    """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task
+    requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse."""
 
     depends_on: Optional[List[TaskDependency]] = None
     """An optional array of objects specifying the dependency graph of the task. All tasks specified in
@@ -4702,7 +6066,8 @@ class SubmitTask:
     responding. We suggest running jobs and tasks on new clusters for greater reliability"""
 
     for_each_task: Optional[ForEachTask] = None
-    """If for_each_task, indicates that this task must execute the nested task within it."""
+    """The task executes a nested task for every input provided when the `for_each_task` field is
+    present."""
 
     health: Optional[JobsHealthRules] = None
     """An optional set of health rules that can be defined for this job."""
@@ -4715,18 +6080,18 @@ class SubmitTask:
     """If new_cluster, a description of a new cluster that is created for each run."""
 
     notebook_task: Optional[NotebookTask] = None
-    """If notebook_task, indicates that this task must run a notebook. This field may not be specified
-    in conjunction with spark_jar_task."""
+    """The task runs a notebook when the `notebook_task` field is present."""
 
     notification_settings: Optional[TaskNotificationSettings] = None
     """Optional notification settings that are used when sending notifications to each of the
     `email_notifications` and `webhook_notifications` for this task run."""
 
     pipeline_task: Optional[PipelineTask] = None
-    """If pipeline_task, indicates that this task must execute a Pipeline."""
+    """The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines
+    configured to use triggered more are supported."""
 
     python_wheel_task: Optional[PythonWheelTask] = None
-    """If python_wheel_task, indicates that this job must execute a PythonWheel."""
+    """The task runs a Python wheel when the `python_wheel_task` field is present."""
 
     run_if: Optional[RunIf] = None
     """An optional value indicating the condition that determines whether the task should be run once
@@ -4734,17 +6099,17 @@ class SubmitTask:
     :method:jobs/create for a list of possible values."""
 
     run_job_task: Optional[RunJobTask] = None
-    """If run_job_task, indicates that this task must execute another job."""
+    """The task triggers another job when the `run_job_task` field is present."""
 
     spark_jar_task: Optional[SparkJarTask] = None
-    """If spark_jar_task, indicates that this task must run a JAR."""
+    """The task runs a JAR when the `spark_jar_task` field is present."""
 
     spark_python_task: Optional[SparkPythonTask] = None
-    """If spark_python_task, indicates that this task must run a Python file."""
+    """The task runs a Python file when the `spark_python_task` field is present."""
 
     spark_submit_task: Optional[SparkSubmitTask] = None
-    """If `spark_submit_task`, indicates that this task must be launched by the spark submit script.
-    This task can run only on new clusters.
+    """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
+    This task can run only on new clusters and is not compatible with serverless compute.
     
     In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
     `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
@@ -4760,7 +6125,8 @@ class SubmitTask:
     The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
 
     sql_task: Optional[SqlTask] = None
-    """If sql_task, indicates that this job must execute a SQL task."""
+    """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
+    the `sql_task` field is present."""
 
     timeout_seconds: Optional[int] = None
     """An optional timeout applied to each run of this job task. A value of `0` means no timeout."""
@@ -4773,6 +6139,8 @@ class SubmitTask:
     def as_dict(self) -> dict:
         """Serializes the SubmitTask into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.clean_rooms_notebook_task:
+            body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task.as_dict()
         if self.condition_task: body['condition_task'] = self.condition_task.as_dict()
         if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict()
         if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on]
@@ -4799,10 +6167,42 @@ def as_dict(self) -> dict:
         if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SubmitTask into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task
+        if self.condition_task: body['condition_task'] = self.condition_task
+        if self.dbt_task: body['dbt_task'] = self.dbt_task
+        if self.depends_on: body['depends_on'] = self.depends_on
+        if self.description is not None: body['description'] = self.description
+        if self.email_notifications: body['email_notifications'] = self.email_notifications
+        if self.environment_key is not None: body['environment_key'] = self.environment_key
+        if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
+        if self.for_each_task: body['for_each_task'] = self.for_each_task
+        if self.health: body['health'] = self.health
+        if self.libraries: body['libraries'] = self.libraries
+        if self.new_cluster: body['new_cluster'] = self.new_cluster
+        if self.notebook_task: body['notebook_task'] = self.notebook_task
+        if self.notification_settings: body['notification_settings'] = self.notification_settings
+        if self.pipeline_task: body['pipeline_task'] = self.pipeline_task
+        if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task
+        if self.run_if is not None: body['run_if'] = self.run_if
+        if self.run_job_task: body['run_job_task'] = self.run_job_task
+        if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task
+        if self.spark_python_task: body['spark_python_task'] = self.spark_python_task
+        if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task
+        if self.sql_task: body['sql_task'] = self.sql_task
+        if self.task_key is not None: body['task_key'] = self.task_key
+        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
+        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SubmitTask:
         """Deserializes the SubmitTask from a dictionary."""
-        return cls(condition_task=_from_dict(d, 'condition_task', ConditionTask),
+        return cls(clean_rooms_notebook_task=_from_dict(d, 'clean_rooms_notebook_task',
+                                                        CleanRoomsNotebookTask),
+                   condition_task=_from_dict(d, 'condition_task', ConditionTask),
                    dbt_task=_from_dict(d, 'dbt_task', DbtTask),
                    depends_on=_repeated_dict(d, 'depends_on', TaskDependency),
                    description=d.get('description', None),
@@ -4857,6 +6257,17 @@ def as_dict(self) -> dict:
             body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TableUpdateTriggerConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.condition is not None: body['condition'] = self.condition
+        if self.min_time_between_triggers_seconds is not None:
+            body['min_time_between_triggers_seconds'] = self.min_time_between_triggers_seconds
+        if self.table_names: body['table_names'] = self.table_names
+        if self.wait_after_last_change_seconds is not None:
+            body['wait_after_last_change_seconds'] = self.wait_after_last_change_seconds
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableUpdateTriggerConfiguration:
         """Deserializes the TableUpdateTriggerConfiguration from a dictionary."""
@@ -4873,14 +6284,19 @@ class Task:
     field is required and must be unique within its parent job. On Update or Reset, this field is
     used to reference the tasks to be updated or reset."""
 
+    clean_rooms_notebook_task: Optional[CleanRoomsNotebookTask] = None
+    """The task runs a [clean rooms] notebook when the `clean_rooms_notebook_task` field is present.
+    
+    [clean rooms]: https://docs.databricks.com/en/clean-rooms/index.html"""
+
     condition_task: Optional[ConditionTask] = None
-    """If condition_task, specifies a condition with an outcome that can be used to control the
-    execution of other tasks. Does not require a cluster to execute and does not support retries or
-    notifications."""
+    """The task evaluates a condition that can be used to control the execution of other tasks when the
+    `condition_task` field is present. The condition task does not require a cluster to execute and
+    does not support retries or notifications."""
 
     dbt_task: Optional[DbtTask] = None
-    """If dbt_task, indicates that this must execute a dbt task. It requires both Databricks SQL and
-    the ability to use a serverless or a pro SQL warehouse."""
+    """The task runs one or more dbt commands when the `dbt_task` field is present. The dbt task
+    requires both Databricks SQL and the ability to use a serverless or a pro SQL warehouse."""
 
     depends_on: Optional[List[TaskDependency]] = None
     """An optional array of objects specifying the dependency graph of the task. All tasks specified in
@@ -4908,7 +6324,8 @@ class Task:
     responding. We suggest running jobs and tasks on new clusters for greater reliability"""
 
     for_each_task: Optional[ForEachTask] = None
-    """If for_each_task, indicates that this task must execute the nested task within it."""
+    """The task executes a nested task for every input provided when the `for_each_task` field is
+    present."""
 
     health: Optional[JobsHealthRules] = None
     """An optional set of health rules that can be defined for this job."""
@@ -4935,18 +6352,18 @@ class Task:
     """If new_cluster, a description of a new cluster that is created for each run."""
 
     notebook_task: Optional[NotebookTask] = None
-    """If notebook_task, indicates that this task must run a notebook. This field may not be specified
-    in conjunction with spark_jar_task."""
+    """The task runs a notebook when the `notebook_task` field is present."""
 
     notification_settings: Optional[TaskNotificationSettings] = None
     """Optional notification settings that are used when sending notifications to each of the
     `email_notifications` and `webhook_notifications` for this task."""
 
     pipeline_task: Optional[PipelineTask] = None
-    """If pipeline_task, indicates that this task must execute a Pipeline."""
+    """The task triggers a pipeline update when the `pipeline_task` field is present. Only pipelines
+    configured to use triggered more are supported."""
 
     python_wheel_task: Optional[PythonWheelTask] = None
-    """If python_wheel_task, indicates that this job must execute a PythonWheel."""
+    """The task runs a Python wheel when the `python_wheel_task` field is present."""
 
     retry_on_timeout: Optional[bool] = None
     """An optional policy to specify whether to retry a job when it times out. The default behavior is
@@ -4962,17 +6379,17 @@ class Task:
     least one dependency failed * `ALL_FAILED`: ALl dependencies have failed"""
 
     run_job_task: Optional[RunJobTask] = None
-    """If run_job_task, indicates that this task must execute another job."""
+    """The task triggers another job when the `run_job_task` field is present."""
 
     spark_jar_task: Optional[SparkJarTask] = None
-    """If spark_jar_task, indicates that this task must run a JAR."""
+    """The task runs a JAR when the `spark_jar_task` field is present."""
 
     spark_python_task: Optional[SparkPythonTask] = None
-    """If spark_python_task, indicates that this task must run a Python file."""
+    """The task runs a Python file when the `spark_python_task` field is present."""
 
     spark_submit_task: Optional[SparkSubmitTask] = None
-    """If `spark_submit_task`, indicates that this task must be launched by the spark submit script.
-    This task can run only on new clusters.
+    """(Legacy) The task runs the spark-submit script when the `spark_submit_task` field is present.
+    This task can run only on new clusters and is not compatible with serverless compute.
     
     In the `new_cluster` specification, `libraries` and `spark_conf` are not supported. Instead, use
     `--jars` and `--py-files` to add Java and Python libraries and `--conf` to set the Spark
@@ -4988,7 +6405,8 @@ class Task:
     The `--jars`, `--py-files`, `--files` arguments support DBFS and S3 paths."""
 
     sql_task: Optional[SqlTask] = None
-    """If sql_task, indicates that this job must execute a SQL task."""
+    """The task runs a SQL query or file, or it refreshes a SQL alert or a legacy SQL dashboard when
+    the `sql_task` field is present."""
 
     timeout_seconds: Optional[int] = None
     """An optional timeout applied to each run of this job task. A value of `0` means no timeout."""
@@ -5000,6 +6418,8 @@ class Task:
     def as_dict(self) -> dict:
         """Serializes the Task into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.clean_rooms_notebook_task:
+            body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task.as_dict()
         if self.condition_task: body['condition_task'] = self.condition_task.as_dict()
         if self.dbt_task: body['dbt_task'] = self.dbt_task.as_dict()
         if self.depends_on: body['depends_on'] = [v.as_dict() for v in self.depends_on]
@@ -5033,10 +6453,49 @@ def as_dict(self) -> dict:
         if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Task into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.clean_rooms_notebook_task: body['clean_rooms_notebook_task'] = self.clean_rooms_notebook_task
+        if self.condition_task: body['condition_task'] = self.condition_task
+        if self.dbt_task: body['dbt_task'] = self.dbt_task
+        if self.depends_on: body['depends_on'] = self.depends_on
+        if self.description is not None: body['description'] = self.description
+        if self.disable_auto_optimization is not None:
+            body['disable_auto_optimization'] = self.disable_auto_optimization
+        if self.email_notifications: body['email_notifications'] = self.email_notifications
+        if self.environment_key is not None: body['environment_key'] = self.environment_key
+        if self.existing_cluster_id is not None: body['existing_cluster_id'] = self.existing_cluster_id
+        if self.for_each_task: body['for_each_task'] = self.for_each_task
+        if self.health: body['health'] = self.health
+        if self.job_cluster_key is not None: body['job_cluster_key'] = self.job_cluster_key
+        if self.libraries: body['libraries'] = self.libraries
+        if self.max_retries is not None: body['max_retries'] = self.max_retries
+        if self.min_retry_interval_millis is not None:
+            body['min_retry_interval_millis'] = self.min_retry_interval_millis
+        if self.new_cluster: body['new_cluster'] = self.new_cluster
+        if self.notebook_task: body['notebook_task'] = self.notebook_task
+        if self.notification_settings: body['notification_settings'] = self.notification_settings
+        if self.pipeline_task: body['pipeline_task'] = self.pipeline_task
+        if self.python_wheel_task: body['python_wheel_task'] = self.python_wheel_task
+        if self.retry_on_timeout is not None: body['retry_on_timeout'] = self.retry_on_timeout
+        if self.run_if is not None: body['run_if'] = self.run_if
+        if self.run_job_task: body['run_job_task'] = self.run_job_task
+        if self.spark_jar_task: body['spark_jar_task'] = self.spark_jar_task
+        if self.spark_python_task: body['spark_python_task'] = self.spark_python_task
+        if self.spark_submit_task: body['spark_submit_task'] = self.spark_submit_task
+        if self.sql_task: body['sql_task'] = self.sql_task
+        if self.task_key is not None: body['task_key'] = self.task_key
+        if self.timeout_seconds is not None: body['timeout_seconds'] = self.timeout_seconds
+        if self.webhook_notifications: body['webhook_notifications'] = self.webhook_notifications
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Task:
         """Deserializes the Task from a dictionary."""
-        return cls(condition_task=_from_dict(d, 'condition_task', ConditionTask),
+        return cls(clean_rooms_notebook_task=_from_dict(d, 'clean_rooms_notebook_task',
+                                                        CleanRoomsNotebookTask),
+                   condition_task=_from_dict(d, 'condition_task', ConditionTask),
                    dbt_task=_from_dict(d, 'dbt_task', DbtTask),
                    depends_on=_repeated_dict(d, 'depends_on', TaskDependency),
                    description=d.get('description', None),
@@ -5083,6 +6542,13 @@ def as_dict(self) -> dict:
         if self.task_key is not None: body['task_key'] = self.task_key
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TaskDependency into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.outcome is not None: body['outcome'] = self.outcome
+        if self.task_key is not None: body['task_key'] = self.task_key
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TaskDependency:
         """Deserializes the TaskDependency from a dictionary."""
@@ -5140,6 +6606,20 @@ def as_dict(self) -> dict:
         if self.on_success: body['on_success'] = [v for v in self.on_success]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TaskEmailNotifications into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.no_alert_for_skipped_runs is not None:
+            body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
+        if self.on_duration_warning_threshold_exceeded:
+            body['on_duration_warning_threshold_exceeded'] = self.on_duration_warning_threshold_exceeded
+        if self.on_failure: body['on_failure'] = self.on_failure
+        if self.on_start: body['on_start'] = self.on_start
+        if self.on_streaming_backlog_exceeded:
+            body['on_streaming_backlog_exceeded'] = self.on_streaming_backlog_exceeded
+        if self.on_success: body['on_success'] = self.on_success
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TaskEmailNotifications:
         """Deserializes the TaskEmailNotifications from a dictionary."""
@@ -5177,6 +6657,16 @@ def as_dict(self) -> dict:
             body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TaskNotificationSettings into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alert_on_last_attempt is not None: body['alert_on_last_attempt'] = self.alert_on_last_attempt
+        if self.no_alert_for_canceled_runs is not None:
+            body['no_alert_for_canceled_runs'] = self.no_alert_for_canceled_runs
+        if self.no_alert_for_skipped_runs is not None:
+            body['no_alert_for_skipped_runs'] = self.no_alert_for_skipped_runs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TaskNotificationSettings:
         """Deserializes the TaskNotificationSettings from a dictionary."""
@@ -5222,6 +6712,7 @@ class TerminationCodeCode(Enum):
     
     [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now"""
 
+    BUDGET_POLICY_LIMIT_EXCEEDED = 'BUDGET_POLICY_LIMIT_EXCEEDED'
     CANCELED = 'CANCELED'
     CLOUD_FAILURE = 'CLOUD_FAILURE'
     CLUSTER_ERROR = 'CLUSTER_ERROR'
@@ -5306,6 +6797,14 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TerminationDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.code is not None: body['code'] = self.code
+        if self.message is not None: body['message'] = self.message
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TerminationDetails:
         """Deserializes the TerminationDetails from a dictionary."""
@@ -5342,6 +6841,12 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TriggerInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TriggerInfo:
         """Deserializes the TriggerInfo from a dictionary."""
@@ -5374,6 +6879,16 @@ def as_dict(self) -> dict:
         if self.table_update: body['table_update'] = self.table_update.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TriggerSettings into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.file_arrival: body['file_arrival'] = self.file_arrival
+        if self.pause_status is not None: body['pause_status'] = self.pause_status
+        if self.periodic: body['periodic'] = self.periodic
+        if self.table: body['table'] = self.table
+        if self.table_update: body['table_update'] = self.table_update
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TriggerSettings:
         """Deserializes the TriggerSettings from a dictionary."""
@@ -5393,7 +6908,8 @@ class TriggerType(Enum):
     previously failed run. This occurs when you request to re-run the job in case of failures. *
     `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`:
     Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is
-    triggered by a table update."""
+    triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to manually
+    restart a continuous job run."""
 
     FILE_ARRIVAL = 'FILE_ARRIVAL'
     ONE_TIME = 'ONE_TIME'
@@ -5434,6 +6950,14 @@ def as_dict(self) -> dict:
         if self.new_settings: body['new_settings'] = self.new_settings.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateJob into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.fields_to_remove: body['fields_to_remove'] = self.fields_to_remove
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.new_settings: body['new_settings'] = self.new_settings
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateJob:
         """Deserializes the UpdateJob from a dictionary."""
@@ -5450,6 +6974,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
         """Deserializes the UpdateResponse from a dictionary."""
@@ -5476,6 +7005,14 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ViewItem into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.content is not None: body['content'] = self.content
+        if self.name is not None: body['name'] = self.name
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ViewItem:
         """Deserializes the ViewItem from a dictionary."""
@@ -5508,6 +7045,12 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Webhook into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Webhook:
         """Deserializes the Webhook from a dictionary."""
@@ -5555,6 +7098,18 @@ def as_dict(self) -> dict:
         if self.on_success: body['on_success'] = [v.as_dict() for v in self.on_success]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WebhookNotifications into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.on_duration_warning_threshold_exceeded:
+            body['on_duration_warning_threshold_exceeded'] = self.on_duration_warning_threshold_exceeded
+        if self.on_failure: body['on_failure'] = self.on_failure
+        if self.on_start: body['on_start'] = self.on_start
+        if self.on_streaming_backlog_exceeded:
+            body['on_streaming_backlog_exceeded'] = self.on_streaming_backlog_exceeded
+        if self.on_success: body['on_success'] = self.on_success
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WebhookNotifications:
         """Deserializes the WebhookNotifications from a dictionary."""
@@ -5682,6 +7237,7 @@ def create(self,
                name: Optional[str] = None,
                notification_settings: Optional[JobNotificationSettings] = None,
                parameters: Optional[List[JobParameterDefinition]] = None,
+               performance_target: Optional[PerformanceTarget] = None,
                queue: Optional[QueueSettings] = None,
                run_as: Optional[JobRunAs] = None,
                schedule: Optional[CronSchedule] = None,
@@ -5737,6 +7293,7 @@ def create(self,
         :param job_clusters: List[:class:`JobCluster`] (optional)
           A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries
           cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.
+          If more than 100 job clusters are available, you can paginate through them using :method:jobs/get.
         :param max_concurrent_runs: int (optional)
           An optional maximum allowed number of concurrent runs of the job. Set this value if you want to be
           able to execute multiple runs of the same job concurrently. This is useful for example if you
@@ -5753,14 +7310,16 @@ def create(self,
           `email_notifications` and `webhook_notifications` for this job.
         :param parameters: List[:class:`JobParameterDefinition`] (optional)
           Job-level parameter definitions
+        :param performance_target: :class:`PerformanceTarget` (optional)
+          PerformanceTarget defines how performant or cost efficient the execution of run on serverless should
+          be.
         :param queue: :class:`QueueSettings` (optional)
           The queue settings of the job.
         :param run_as: :class:`JobRunAs` (optional)
-          Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If
-          not specified, the job/pipeline runs as the user who created the job/pipeline.
+          Write-only setting. Specifies the user or service principal that the job runs as. If not specified,
+          the job runs as the user who created the job.
           
-          Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, an
-          error is thrown.
+          Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.
         :param schedule: :class:`CronSchedule` (optional)
           An optional periodic schedule for this job. The default behavior is that the job only runs when
           triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.
@@ -5769,7 +7328,9 @@ def create(self,
           clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added
           to the job.
         :param tasks: List[:class:`Task`] (optional)
-          A list of task specifications to be executed by this job.
+          A list of task specifications to be executed by this job. If more than 100 tasks are available, you
+          can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root
+          to determine if more results are available.
         :param timeout_seconds: int (optional)
           An optional timeout applied to each run of this job. A value of `0` means no timeout.
         :param trigger: :class:`TriggerSettings` (optional)
@@ -5799,6 +7360,7 @@ def create(self,
         if name is not None: body['name'] = name
         if notification_settings is not None: body['notification_settings'] = notification_settings.as_dict()
         if parameters is not None: body['parameters'] = [v.as_dict() for v in parameters]
+        if performance_target is not None: body['performance_target'] = performance_target.value
         if queue is not None: body['queue'] = queue.as_dict()
         if run_as is not None: body['run_as'] = run_as.as_dict()
         if schedule is not None: body['schedule'] = schedule.as_dict()
@@ -5865,19 +7427,28 @@ def export_run(self, run_id: int, *, views_to_export: Optional[ViewsToExport] =
         res = self._api.do('GET', '/api/2.1/jobs/runs/export', query=query, headers=headers)
         return ExportRunOutput.from_dict(res)
 
-    def get(self, job_id: int) -> Job:
+    def get(self, job_id: int, *, page_token: Optional[str] = None) -> Job:
         """Get a single job.
         
         Retrieves the details for a single job.
         
+        In Jobs API 2.2, requests for a single job support pagination of `tasks` and `job_clusters` when
+        either exceeds 100 elements. Use the `next_page_token` field to check for more results and pass its
+        value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements in a page will
+        be empty on later pages.
+        
         :param job_id: int
           The canonical identifier of the job to retrieve information about. This field is required.
+        :param page_token: str (optional)
+          Use `next_page_token` returned from the previous GetJob to request the next page of the job's
+          sub-resources.
         
         :returns: :class:`Job`
         """
 
         query = {}
         if job_id is not None: query['job_id'] = job_id
+        if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
         res = self._api.do('GET', '/api/2.1/jobs/get', query=query, headers=headers)
@@ -5923,7 +7494,12 @@ def get_run(self,
                 page_token: Optional[str] = None) -> Run:
         """Get a single job run.
         
-        Retrieve the metadata of a run.
+        Retrieves the metadata of a run.
+        
+        In Jobs API 2.2, requests for a single job run support pagination of `tasks` and `job_clusters` when
+        either exceeds 100 elements. Use the `next_page_token` field to check for more results and pass its
+        value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements in a page will
+        be empty on later pages.
         
         :param run_id: int
           The canonical identifier of the run for which to retrieve the metadata. This field is required.
@@ -5932,8 +7508,8 @@ def get_run(self,
         :param include_resolved_values: bool (optional)
           Whether to include resolved parameter values in the response.
         :param page_token: str (optional)
-          To list the next page or the previous page of job tasks, set this field to the value of the
-          `next_page_token` or `prev_page_token` returned in the GetJob response.
+          Use `next_page_token` returned from the previous GetRun to request the next page of the run's
+          sub-resources.
         
         :returns: :class:`Run`
         """
@@ -5985,7 +7561,8 @@ def list(self,
         Retrieves a list of jobs.
         
         :param expand_tasks: bool (optional)
-          Whether to include task and cluster details in the response.
+          Whether to include task and cluster details in the response. Note that in API 2.2, only the first
+          100 elements will be shown. Use :method:jobs/get to paginate through all tasks and clusters.
         :param limit: int (optional)
           The number of jobs to return. This value must be greater than 0 and less or equal to 100. The
           default value is 20.
@@ -6042,7 +7619,8 @@ def list_runs(self,
           If completed_only is `true`, only completed runs are included in the results; otherwise, lists both
           active and completed runs. This field cannot be `true` when active_only is `true`.
         :param expand_tasks: bool (optional)
-          Whether to include task and cluster details in the response.
+          Whether to include task and cluster details in the response. Note that in API 2.2, only the first
+          100 elements will be shown. Use :method:jobs/getrun to paginate through all tasks and clusters.
         :param job_id: int (optional)
           The job for which to list runs. If omitted, the Jobs service lists runs from all jobs.
         :param limit: int (optional)
@@ -6121,8 +7699,9 @@ def repair_run(self,
           in conjunction with notebook_params. The JSON representation of this field (for example
           `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
           
-          Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
-          information about job runs.
+          Use [Task parameter variables] to set parameters containing information about job runs.
+          
+          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
         :param job_parameters: Dict[str,str] (optional)
           Job-level parameters used in the run. for example `"param": "overriding_val"`
         :param latest_repair_id: int (optional)
@@ -6279,6 +7858,8 @@ def run_now(self,
                 jar_params: Optional[List[str]] = None,
                 job_parameters: Optional[Dict[str, str]] = None,
                 notebook_params: Optional[Dict[str, str]] = None,
+                only: Optional[List[str]] = None,
+                performance_target: Optional[PerformanceTarget] = None,
                 pipeline_params: Optional[PipelineParams] = None,
                 python_named_params: Optional[Dict[str, str]] = None,
                 python_params: Optional[List[str]] = None,
@@ -6314,8 +7895,9 @@ def run_now(self,
           in conjunction with notebook_params. The JSON representation of this field (for example
           `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
           
-          Use [Task parameter variables](/jobs.html\"#parameter-variables\") to set parameters containing
-          information about job runs.
+          Use [Task parameter variables] to set parameters containing information about job runs.
+          
+          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
         :param job_parameters: Dict[str,str] (optional)
           Job-level parameters used in the run. for example `"param": "overriding_val"`
         :param notebook_params: Dict[str,str] (optional)
@@ -6334,6 +7916,13 @@ def run_now(self,
           
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
           [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
+        :param only: List[str] (optional)
+          A list of task keys to run inside of the job. If this field is not provided, all tasks in the job
+          will be run.
+        :param performance_target: :class:`PerformanceTarget` (optional)
+          PerformanceTarget defines how performant or cost efficient the execution of run on serverless
+          compute should be. For RunNow request, the run will execute with this settings instead of ones
+          defined in job.
         :param pipeline_params: :class:`PipelineParams` (optional)
           Controls whether the pipeline should perform a full refresh
         :param python_named_params: Dict[str,str] (optional)
@@ -6385,6 +7974,8 @@ def run_now(self,
         if job_id is not None: body['job_id'] = job_id
         if job_parameters is not None: body['job_parameters'] = job_parameters
         if notebook_params is not None: body['notebook_params'] = notebook_params
+        if only is not None: body['only'] = [v for v in only]
+        if performance_target is not None: body['performance_target'] = performance_target.value
         if pipeline_params is not None: body['pipeline_params'] = pipeline_params.as_dict()
         if python_named_params is not None: body['python_named_params'] = python_named_params
         if python_params is not None: body['python_params'] = [v for v in python_params]
@@ -6406,6 +7997,8 @@ def run_now_and_wait(self,
                          jar_params: Optional[List[str]] = None,
                          job_parameters: Optional[Dict[str, str]] = None,
                          notebook_params: Optional[Dict[str, str]] = None,
+                         only: Optional[List[str]] = None,
+                         performance_target: Optional[PerformanceTarget] = None,
                          pipeline_params: Optional[PipelineParams] = None,
                          python_named_params: Optional[Dict[str, str]] = None,
                          python_params: Optional[List[str]] = None,
@@ -6419,6 +8012,8 @@ def run_now_and_wait(self,
                             job_id=job_id,
                             job_parameters=job_parameters,
                             notebook_params=notebook_params,
+                            only=only,
+                            performance_target=performance_target,
                             pipeline_params=pipeline_params,
                             python_named_params=python_named_params,
                             python_params=python_params,
@@ -6433,7 +8028,8 @@ def set_permissions(
             access_control_list: Optional[List[JobAccessControlRequest]] = None) -> JobPermissions:
         """Set job permissions.
         
-        Sets permissions on a job. Jobs can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param job_id: str
           The job for which to get or manage permissions.
diff --git a/databricks/sdk/service/marketplace.py b/databricks/sdk/service/marketplace.py
index 1a2dedf31..239cd2eaf 100755
--- a/databricks/sdk/service/marketplace.py
+++ b/databricks/sdk/service/marketplace.py
@@ -27,6 +27,13 @@ def as_dict(self) -> dict:
         if self.listing_id is not None: body['listing_id'] = self.listing_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AddExchangeForListingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchange_id is not None: body['exchange_id'] = self.exchange_id
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AddExchangeForListingRequest:
         """Deserializes the AddExchangeForListingRequest from a dictionary."""
@@ -43,6 +50,12 @@ def as_dict(self) -> dict:
         if self.exchange_for_listing: body['exchange_for_listing'] = self.exchange_for_listing.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AddExchangeForListingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchange_for_listing: body['exchange_for_listing'] = self.exchange_for_listing
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AddExchangeForListingResponse:
         """Deserializes the AddExchangeForListingResponse from a dictionary."""
@@ -56,6 +69,7 @@ class AssetType(Enum):
     ASSET_TYPE_MEDIA = 'ASSET_TYPE_MEDIA'
     ASSET_TYPE_MODEL = 'ASSET_TYPE_MODEL'
     ASSET_TYPE_NOTEBOOK = 'ASSET_TYPE_NOTEBOOK'
+    ASSET_TYPE_PARTNER_INTEGRATION = 'ASSET_TYPE_PARTNER_INTEGRATION'
 
 
 @dataclass
@@ -68,6 +82,12 @@ def as_dict(self) -> dict:
         if self.listings: body['listings'] = [v.as_dict() for v in self.listings]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BatchGetListingsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.listings: body['listings'] = self.listings
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BatchGetListingsResponse:
         """Deserializes the BatchGetListingsResponse from a dictionary."""
@@ -84,6 +104,12 @@ def as_dict(self) -> dict:
         if self.providers: body['providers'] = [v.as_dict() for v in self.providers]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BatchGetProvidersResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.providers: body['providers'] = self.providers
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BatchGetProvidersResponse:
         """Deserializes the BatchGetProvidersResponse from a dictionary."""
@@ -126,6 +152,12 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ConsumerTerms into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ConsumerTerms:
         """Deserializes the ConsumerTerms from a dictionary."""
@@ -153,6 +185,15 @@ def as_dict(self) -> dict:
         if self.last_name is not None: body['last_name'] = self.last_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ContactInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.company is not None: body['company'] = self.company
+        if self.email is not None: body['email'] = self.email
+        if self.first_name is not None: body['first_name'] = self.first_name
+        if self.last_name is not None: body['last_name'] = self.last_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ContactInfo:
         """Deserializes the ContactInfo from a dictionary."""
@@ -178,6 +219,12 @@ def as_dict(self) -> dict:
         if self.filter: body['filter'] = self.filter.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateExchangeFilterRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.filter: body['filter'] = self.filter
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExchangeFilterRequest:
         """Deserializes the CreateExchangeFilterRequest from a dictionary."""
@@ -194,6 +241,12 @@ def as_dict(self) -> dict:
         if self.filter_id is not None: body['filter_id'] = self.filter_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateExchangeFilterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.filter_id is not None: body['filter_id'] = self.filter_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExchangeFilterResponse:
         """Deserializes the CreateExchangeFilterResponse from a dictionary."""
@@ -210,6 +263,12 @@ def as_dict(self) -> dict:
         if self.exchange: body['exchange'] = self.exchange.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateExchangeRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchange: body['exchange'] = self.exchange
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExchangeRequest:
         """Deserializes the CreateExchangeRequest from a dictionary."""
@@ -226,6 +285,12 @@ def as_dict(self) -> dict:
         if self.exchange_id is not None: body['exchange_id'] = self.exchange_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateExchangeResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchange_id is not None: body['exchange_id'] = self.exchange_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExchangeResponse:
         """Deserializes the CreateExchangeResponse from a dictionary."""
@@ -252,6 +317,15 @@ def as_dict(self) -> dict:
         if self.mime_type is not None: body['mime_type'] = self.mime_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateFileRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.file_parent: body['file_parent'] = self.file_parent
+        if self.marketplace_file_type is not None: body['marketplace_file_type'] = self.marketplace_file_type
+        if self.mime_type is not None: body['mime_type'] = self.mime_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateFileRequest:
         """Deserializes the CreateFileRequest from a dictionary."""
@@ -275,6 +349,13 @@ def as_dict(self) -> dict:
         if self.upload_url is not None: body['upload_url'] = self.upload_url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateFileResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.file_info: body['file_info'] = self.file_info
+        if self.upload_url is not None: body['upload_url'] = self.upload_url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateFileResponse:
         """Deserializes the CreateFileResponse from a dictionary."""
@@ -308,6 +389,17 @@ def as_dict(self) -> dict:
         if self.share_name is not None: body['share_name'] = self.share_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateInstallationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.accepted_consumer_terms: body['accepted_consumer_terms'] = self.accepted_consumer_terms
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type
+        if self.repo_detail: body['repo_detail'] = self.repo_detail
+        if self.share_name is not None: body['share_name'] = self.share_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateInstallationRequest:
         """Deserializes the CreateInstallationRequest from a dictionary."""
@@ -329,6 +421,12 @@ def as_dict(self) -> dict:
         if self.listing: body['listing'] = self.listing.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateListingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.listing: body['listing'] = self.listing
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateListingRequest:
         """Deserializes the CreateListingRequest from a dictionary."""
@@ -345,6 +443,12 @@ def as_dict(self) -> dict:
         if self.listing_id is not None: body['listing_id'] = self.listing_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateListingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateListingResponse:
         """Deserializes the CreateListingResponse from a dictionary."""
@@ -388,6 +492,20 @@ def as_dict(self) -> dict:
         if self.recipient_type is not None: body['recipient_type'] = self.recipient_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreatePersonalizationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.accepted_consumer_terms: body['accepted_consumer_terms'] = self.accepted_consumer_terms
+        if self.comment is not None: body['comment'] = self.comment
+        if self.company is not None: body['company'] = self.company
+        if self.first_name is not None: body['first_name'] = self.first_name
+        if self.intended_use is not None: body['intended_use'] = self.intended_use
+        if self.is_from_lighthouse is not None: body['is_from_lighthouse'] = self.is_from_lighthouse
+        if self.last_name is not None: body['last_name'] = self.last_name
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePersonalizationRequest:
         """Deserializes the CreatePersonalizationRequest from a dictionary."""
@@ -412,6 +530,12 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreatePersonalizationRequestResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePersonalizationRequestResponse:
         """Deserializes the CreatePersonalizationRequestResponse from a dictionary."""
@@ -428,6 +552,12 @@ def as_dict(self) -> dict:
         if self.provider: body['provider'] = self.provider.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateProviderRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.provider: body['provider'] = self.provider
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateProviderRequest:
         """Deserializes the CreateProviderRequest from a dictionary."""
@@ -444,6 +574,12 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateProviderResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateProviderResponse:
         """Deserializes the CreateProviderResponse from a dictionary."""
@@ -476,6 +612,13 @@ def as_dict(self) -> dict:
         if self.unit is not None: body['unit'] = self.unit.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DataRefreshInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.interval is not None: body['interval'] = self.interval
+        if self.unit is not None: body['unit'] = self.unit
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DataRefreshInfo:
         """Deserializes the DataRefreshInfo from a dictionary."""
@@ -490,6 +633,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteExchangeFilterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteExchangeFilterResponse:
         """Deserializes the DeleteExchangeFilterResponse from a dictionary."""
@@ -504,6 +652,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteExchangeResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteExchangeResponse:
         """Deserializes the DeleteExchangeResponse from a dictionary."""
@@ -518,6 +671,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteFileResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteFileResponse:
         """Deserializes the DeleteFileResponse from a dictionary."""
@@ -532,6 +690,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteInstallationResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteInstallationResponse:
         """Deserializes the DeleteInstallationResponse from a dictionary."""
@@ -546,6 +709,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteListingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteListingResponse:
         """Deserializes the DeleteListingResponse from a dictionary."""
@@ -560,6 +728,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteProviderResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteProviderResponse:
         """Deserializes the DeleteProviderResponse from a dictionary."""
@@ -606,6 +779,20 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Exchange into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.filters: body['filters'] = self.filters
+        if self.id is not None: body['id'] = self.id
+        if self.linked_listings: body['linked_listings'] = self.linked_listings
+        if self.name is not None: body['name'] = self.name
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Exchange:
         """Deserializes the Exchange from a dictionary."""
@@ -654,6 +841,20 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExchangeFilter into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.exchange_id is not None: body['exchange_id'] = self.exchange_id
+        if self.filter_type is not None: body['filter_type'] = self.filter_type
+        if self.filter_value is not None: body['filter_value'] = self.filter_value
+        if self.id is not None: body['id'] = self.id
+        if self.name is not None: body['name'] = self.name
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExchangeFilter:
         """Deserializes the ExchangeFilter from a dictionary."""
@@ -701,6 +902,18 @@ def as_dict(self) -> dict:
         if self.listing_name is not None: body['listing_name'] = self.listing_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExchangeListing into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.exchange_id is not None: body['exchange_id'] = self.exchange_id
+        if self.exchange_name is not None: body['exchange_name'] = self.exchange_name
+        if self.id is not None: body['id'] = self.id
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.listing_name is not None: body['listing_name'] = self.listing_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExchangeListing:
         """Deserializes the ExchangeListing from a dictionary."""
@@ -753,6 +966,21 @@ def as_dict(self) -> dict:
         if self.updated_at is not None: body['updated_at'] = self.updated_at
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FileInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.download_link is not None: body['download_link'] = self.download_link
+        if self.file_parent: body['file_parent'] = self.file_parent
+        if self.id is not None: body['id'] = self.id
+        if self.marketplace_file_type is not None: body['marketplace_file_type'] = self.marketplace_file_type
+        if self.mime_type is not None: body['mime_type'] = self.mime_type
+        if self.status is not None: body['status'] = self.status
+        if self.status_message is not None: body['status_message'] = self.status_message
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FileInfo:
         """Deserializes the FileInfo from a dictionary."""
@@ -782,6 +1010,13 @@ def as_dict(self) -> dict:
         if self.parent_id is not None: body['parent_id'] = self.parent_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FileParent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.file_parent_type is not None: body['file_parent_type'] = self.file_parent_type
+        if self.parent_id is not None: body['parent_id'] = self.parent_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FileParent:
         """Deserializes the FileParent from a dictionary."""
@@ -819,6 +1054,12 @@ def as_dict(self) -> dict:
         if self.exchange: body['exchange'] = self.exchange.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetExchangeResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchange: body['exchange'] = self.exchange
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetExchangeResponse:
         """Deserializes the GetExchangeResponse from a dictionary."""
@@ -835,6 +1076,12 @@ def as_dict(self) -> dict:
         if self.file_info: body['file_info'] = self.file_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetFileResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.file_info: body['file_info'] = self.file_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetFileResponse:
         """Deserializes the GetFileResponse from a dictionary."""
@@ -852,6 +1099,12 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetLatestVersionProviderAnalyticsDashboardResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetLatestVersionProviderAnalyticsDashboardResponse:
         """Deserializes the GetLatestVersionProviderAnalyticsDashboardResponse from a dictionary."""
@@ -872,6 +1125,13 @@ def as_dict(self) -> dict:
             body['shared_data_objects'] = [v.as_dict() for v in self.shared_data_objects]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetListingContentMetadataResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.shared_data_objects: body['shared_data_objects'] = self.shared_data_objects
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetListingContentMetadataResponse:
         """Deserializes the GetListingContentMetadataResponse from a dictionary."""
@@ -889,6 +1149,12 @@ def as_dict(self) -> dict:
         if self.listing: body['listing'] = self.listing.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetListingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.listing: body['listing'] = self.listing
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetListingResponse:
         """Deserializes the GetListingResponse from a dictionary."""
@@ -908,6 +1174,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetListingsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.listings: body['listings'] = self.listings
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetListingsResponse:
         """Deserializes the GetListingsResponse from a dictionary."""
@@ -926,6 +1199,12 @@ def as_dict(self) -> dict:
             body['personalization_requests'] = [v.as_dict() for v in self.personalization_requests]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPersonalizationRequestResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.personalization_requests: body['personalization_requests'] = self.personalization_requests
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPersonalizationRequestResponse:
         """Deserializes the GetPersonalizationRequestResponse from a dictionary."""
@@ -943,6 +1222,12 @@ def as_dict(self) -> dict:
         if self.provider: body['provider'] = self.provider.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetProviderResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.provider: body['provider'] = self.provider
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetProviderResponse:
         """Deserializes the GetProviderResponse from a dictionary."""
@@ -959,6 +1244,12 @@ def as_dict(self) -> dict:
         if self.installation: body['installation'] = self.installation.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Installation into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.installation: body['installation'] = self.installation
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Installation:
         """Deserializes the Installation from a dictionary."""
@@ -1011,6 +1302,24 @@ def as_dict(self) -> dict:
         if self.tokens: body['tokens'] = [v.as_dict() for v in self.tokens]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InstallationDetail into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.error_message is not None: body['error_message'] = self.error_message
+        if self.id is not None: body['id'] = self.id
+        if self.installed_on is not None: body['installed_on'] = self.installed_on
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.listing_name is not None: body['listing_name'] = self.listing_name
+        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type
+        if self.repo_name is not None: body['repo_name'] = self.repo_name
+        if self.repo_path is not None: body['repo_path'] = self.repo_path
+        if self.share_name is not None: body['share_name'] = self.share_name
+        if self.status is not None: body['status'] = self.status
+        if self.token_detail: body['token_detail'] = self.token_detail
+        if self.tokens: body['tokens'] = self.tokens
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InstallationDetail:
         """Deserializes the InstallationDetail from a dictionary."""
@@ -1048,6 +1357,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAllInstallationsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.installations: body['installations'] = self.installations
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAllInstallationsResponse:
         """Deserializes the ListAllInstallationsResponse from a dictionary."""
@@ -1069,6 +1385,13 @@ def as_dict(self) -> dict:
             body['personalization_requests'] = [v.as_dict() for v in self.personalization_requests]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAllPersonalizationRequestsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.personalization_requests: body['personalization_requests'] = self.personalization_requests
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAllPersonalizationRequestsResponse:
         """Deserializes the ListAllPersonalizationRequestsResponse from a dictionary."""
@@ -1090,6 +1413,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListExchangeFiltersResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.filters: body['filters'] = self.filters
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListExchangeFiltersResponse:
         """Deserializes the ListExchangeFiltersResponse from a dictionary."""
@@ -1110,6 +1440,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListExchangesForListingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchange_listing: body['exchange_listing'] = self.exchange_listing
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListExchangesForListingResponse:
         """Deserializes the ListExchangesForListingResponse from a dictionary."""
@@ -1130,6 +1467,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListExchangesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchanges: body['exchanges'] = self.exchanges
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListExchangesResponse:
         """Deserializes the ListExchangesResponse from a dictionary."""
@@ -1150,6 +1494,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListFilesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.file_infos: body['file_infos'] = self.file_infos
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListFilesResponse:
         """Deserializes the ListFilesResponse from a dictionary."""
@@ -1170,6 +1521,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListFulfillmentsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.fulfillments: body['fulfillments'] = self.fulfillments
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListFulfillmentsResponse:
         """Deserializes the ListFulfillmentsResponse from a dictionary."""
@@ -1190,6 +1548,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListInstallationsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.installations: body['installations'] = self.installations
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListInstallationsResponse:
         """Deserializes the ListInstallationsResponse from a dictionary."""
@@ -1210,6 +1575,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListListingsForExchangeResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchange_listings: body['exchange_listings'] = self.exchange_listings
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListListingsForExchangeResponse:
         """Deserializes the ListListingsForExchangeResponse from a dictionary."""
@@ -1230,6 +1602,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListListingsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.listings: body['listings'] = self.listings
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListListingsResponse:
         """Deserializes the ListListingsResponse from a dictionary."""
@@ -1254,6 +1633,14 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListProviderAnalyticsDashboardResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.id is not None: body['id'] = self.id
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListProviderAnalyticsDashboardResponse:
         """Deserializes the ListProviderAnalyticsDashboardResponse from a dictionary."""
@@ -1275,6 +1662,13 @@ def as_dict(self) -> dict:
         if self.providers: body['providers'] = [v.as_dict() for v in self.providers]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListProvidersResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.providers: body['providers'] = self.providers
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListProvidersResponse:
         """Deserializes the ListProvidersResponse from a dictionary."""
@@ -1299,6 +1693,14 @@ def as_dict(self) -> dict:
         if self.summary: body['summary'] = self.summary.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Listing into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.detail: body['detail'] = self.detail
+        if self.id is not None: body['id'] = self.id
+        if self.summary: body['summary'] = self.summary
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Listing:
         """Deserializes the Listing from a dictionary."""
@@ -1391,6 +1793,31 @@ def as_dict(self) -> dict:
         if self.update_frequency: body['update_frequency'] = self.update_frequency.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListingDetail into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.assets: body['assets'] = self.assets
+        if self.collection_date_end is not None: body['collection_date_end'] = self.collection_date_end
+        if self.collection_date_start is not None: body['collection_date_start'] = self.collection_date_start
+        if self.collection_granularity: body['collection_granularity'] = self.collection_granularity
+        if self.cost is not None: body['cost'] = self.cost
+        if self.data_source is not None: body['data_source'] = self.data_source
+        if self.description is not None: body['description'] = self.description
+        if self.documentation_link is not None: body['documentation_link'] = self.documentation_link
+        if self.embedded_notebook_file_infos:
+            body['embedded_notebook_file_infos'] = self.embedded_notebook_file_infos
+        if self.file_ids: body['file_ids'] = self.file_ids
+        if self.geographical_coverage is not None: body['geographical_coverage'] = self.geographical_coverage
+        if self.license is not None: body['license'] = self.license
+        if self.pricing_model is not None: body['pricing_model'] = self.pricing_model
+        if self.privacy_policy_link is not None: body['privacy_policy_link'] = self.privacy_policy_link
+        if self.size is not None: body['size'] = self.size
+        if self.support_link is not None: body['support_link'] = self.support_link
+        if self.tags: body['tags'] = self.tags
+        if self.terms_of_service is not None: body['terms_of_service'] = self.terms_of_service
+        if self.update_frequency: body['update_frequency'] = self.update_frequency
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListingDetail:
         """Deserializes the ListingDetail from a dictionary."""
@@ -1437,6 +1864,16 @@ def as_dict(self) -> dict:
         if self.share_info: body['share_info'] = self.share_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListingFulfillment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.fulfillment_type is not None: body['fulfillment_type'] = self.fulfillment_type
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type
+        if self.repo_info: body['repo_info'] = self.repo_info
+        if self.share_info: body['share_info'] = self.share_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListingFulfillment:
         """Deserializes the ListingFulfillment from a dictionary."""
@@ -1457,6 +1894,12 @@ def as_dict(self) -> dict:
         if self.visibility is not None: body['visibility'] = self.visibility.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListingSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.visibility is not None: body['visibility'] = self.visibility
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListingSetting:
         """Deserializes the ListingSetting from a dictionary."""
@@ -1547,6 +1990,30 @@ def as_dict(self) -> dict:
         if self.updated_by_id is not None: body['updated_by_id'] = self.updated_by_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListingSummary into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.categories: body['categories'] = self.categories
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.created_by_id is not None: body['created_by_id'] = self.created_by_id
+        if self.exchange_ids: body['exchange_ids'] = self.exchange_ids
+        if self.git_repo: body['git_repo'] = self.git_repo
+        if self.listing_type is not None: body['listingType'] = self.listing_type
+        if self.name is not None: body['name'] = self.name
+        if self.provider_id is not None: body['provider_id'] = self.provider_id
+        if self.provider_region: body['provider_region'] = self.provider_region
+        if self.published_at is not None: body['published_at'] = self.published_at
+        if self.published_by is not None: body['published_by'] = self.published_by
+        if self.setting: body['setting'] = self.setting
+        if self.share: body['share'] = self.share
+        if self.status is not None: body['status'] = self.status
+        if self.subtitle is not None: body['subtitle'] = self.subtitle
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        if self.updated_by_id is not None: body['updated_by_id'] = self.updated_by_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListingSummary:
         """Deserializes the ListingSummary from a dictionary."""
@@ -1586,6 +2053,13 @@ def as_dict(self) -> dict:
         if self.tag_values: body['tag_values'] = [v for v in self.tag_values]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListingTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.tag_name is not None: body['tag_name'] = self.tag_name
+        if self.tag_values: body['tag_values'] = self.tag_values
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListingTag:
         """Deserializes the ListingTag from a dictionary."""
@@ -1666,6 +2140,27 @@ def as_dict(self) -> dict:
         if self.updated_at is not None: body['updated_at'] = self.updated_at
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PersonalizationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.consumer_region: body['consumer_region'] = self.consumer_region
+        if self.contact_info: body['contact_info'] = self.contact_info
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.id is not None: body['id'] = self.id
+        if self.intended_use is not None: body['intended_use'] = self.intended_use
+        if self.is_from_lighthouse is not None: body['is_from_lighthouse'] = self.is_from_lighthouse
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.listing_name is not None: body['listing_name'] = self.listing_name
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.provider_id is not None: body['provider_id'] = self.provider_id
+        if self.recipient_type is not None: body['recipient_type'] = self.recipient_type
+        if self.share: body['share'] = self.share
+        if self.status is not None: body['status'] = self.status
+        if self.status_message is not None: body['status_message'] = self.status_message
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PersonalizationRequest:
         """Deserializes the PersonalizationRequest from a dictionary."""
@@ -1705,6 +2200,12 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ProviderAnalyticsDashboard into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ProviderAnalyticsDashboard:
         """Deserializes the ProviderAnalyticsDashboard from a dictionary."""
@@ -1765,6 +2266,28 @@ def as_dict(self) -> dict:
         if self.term_of_service_link is not None: body['term_of_service_link'] = self.term_of_service_link
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ProviderInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.business_contact_email is not None:
+            body['business_contact_email'] = self.business_contact_email
+        if self.company_website_link is not None: body['company_website_link'] = self.company_website_link
+        if self.dark_mode_icon_file_id is not None:
+            body['dark_mode_icon_file_id'] = self.dark_mode_icon_file_id
+        if self.dark_mode_icon_file_path is not None:
+            body['dark_mode_icon_file_path'] = self.dark_mode_icon_file_path
+        if self.description is not None: body['description'] = self.description
+        if self.icon_file_id is not None: body['icon_file_id'] = self.icon_file_id
+        if self.icon_file_path is not None: body['icon_file_path'] = self.icon_file_path
+        if self.id is not None: body['id'] = self.id
+        if self.is_featured is not None: body['is_featured'] = self.is_featured
+        if self.name is not None: body['name'] = self.name
+        if self.privacy_policy_link is not None: body['privacy_policy_link'] = self.privacy_policy_link
+        if self.published_by is not None: body['published_by'] = self.published_by
+        if self.support_contact_email is not None: body['support_contact_email'] = self.support_contact_email
+        if self.term_of_service_link is not None: body['term_of_service_link'] = self.term_of_service_link
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ProviderInfo:
         """Deserializes the ProviderInfo from a dictionary."""
@@ -1797,6 +2320,13 @@ def as_dict(self) -> dict:
         if self.region is not None: body['region'] = self.region
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegionInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cloud is not None: body['cloud'] = self.cloud
+        if self.region is not None: body['region'] = self.region
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegionInfo:
         """Deserializes the RegionInfo from a dictionary."""
@@ -1811,6 +2341,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RemoveExchangeForListingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RemoveExchangeForListingResponse:
         """Deserializes the RemoveExchangeForListingResponse from a dictionary."""
@@ -1828,6 +2363,12 @@ def as_dict(self) -> dict:
         if self.git_repo_url is not None: body['git_repo_url'] = self.git_repo_url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepoInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.git_repo_url is not None: body['git_repo_url'] = self.git_repo_url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoInfo:
         """Deserializes the RepoInfo from a dictionary."""
@@ -1850,6 +2391,13 @@ def as_dict(self) -> dict:
         if self.repo_path is not None: body['repo_path'] = self.repo_path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepoInstallation into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.repo_name is not None: body['repo_name'] = self.repo_name
+        if self.repo_path is not None: body['repo_path'] = self.repo_path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoInstallation:
         """Deserializes the RepoInstallation from a dictionary."""
@@ -1869,6 +2417,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SearchListingsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.listings: body['listings'] = self.listings
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchListingsResponse:
         """Deserializes the SearchListingsResponse from a dictionary."""
@@ -1889,6 +2444,13 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ShareInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ShareInfo:
         """Deserializes the ShareInfo from a dictionary."""
@@ -1910,6 +2472,13 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SharedDataObject into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data_object_type is not None: body['data_object_type'] = self.data_object_type
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SharedDataObject:
         """Deserializes the SharedDataObject from a dictionary."""
@@ -1938,6 +2507,16 @@ def as_dict(self) -> dict:
             body['shareCredentialsVersion'] = self.share_credentials_version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenDetail into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.bearer_token is not None: body['bearerToken'] = self.bearer_token
+        if self.endpoint is not None: body['endpoint'] = self.endpoint
+        if self.expiration_time is not None: body['expirationTime'] = self.expiration_time
+        if self.share_credentials_version is not None:
+            body['shareCredentialsVersion'] = self.share_credentials_version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenDetail:
         """Deserializes the TokenDetail from a dictionary."""
@@ -1983,6 +2562,18 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.activation_url is not None: body['activation_url'] = self.activation_url
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.id is not None: body['id'] = self.id
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenInfo:
         """Deserializes the TokenInfo from a dictionary."""
@@ -2008,6 +2599,13 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateExchangeFilterRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.filter: body['filter'] = self.filter
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExchangeFilterRequest:
         """Deserializes the UpdateExchangeFilterRequest from a dictionary."""
@@ -2024,6 +2622,12 @@ def as_dict(self) -> dict:
         if self.filter: body['filter'] = self.filter.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateExchangeFilterResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.filter: body['filter'] = self.filter
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExchangeFilterResponse:
         """Deserializes the UpdateExchangeFilterResponse from a dictionary."""
@@ -2043,6 +2647,13 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateExchangeRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchange: body['exchange'] = self.exchange
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExchangeRequest:
         """Deserializes the UpdateExchangeRequest from a dictionary."""
@@ -2059,6 +2670,12 @@ def as_dict(self) -> dict:
         if self.exchange: body['exchange'] = self.exchange.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateExchangeResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exchange: body['exchange'] = self.exchange
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExchangeResponse:
         """Deserializes the UpdateExchangeResponse from a dictionary."""
@@ -2084,6 +2701,15 @@ def as_dict(self) -> dict:
         if self.rotate_token is not None: body['rotate_token'] = self.rotate_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateInstallationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.installation: body['installation'] = self.installation
+        if self.installation_id is not None: body['installation_id'] = self.installation_id
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.rotate_token is not None: body['rotate_token'] = self.rotate_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateInstallationRequest:
         """Deserializes the UpdateInstallationRequest from a dictionary."""
@@ -2103,6 +2729,12 @@ def as_dict(self) -> dict:
         if self.installation: body['installation'] = self.installation.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateInstallationResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.installation: body['installation'] = self.installation
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateInstallationResponse:
         """Deserializes the UpdateInstallationResponse from a dictionary."""
@@ -2122,6 +2754,13 @@ def as_dict(self) -> dict:
         if self.listing: body['listing'] = self.listing.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateListingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.listing: body['listing'] = self.listing
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateListingRequest:
         """Deserializes the UpdateListingRequest from a dictionary."""
@@ -2138,6 +2777,12 @@ def as_dict(self) -> dict:
         if self.listing: body['listing'] = self.listing.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateListingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.listing: body['listing'] = self.listing
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateListingResponse:
         """Deserializes the UpdateListingResponse from a dictionary."""
@@ -2166,6 +2811,16 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdatePersonalizationRequestRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.listing_id is not None: body['listing_id'] = self.listing_id
+        if self.reason is not None: body['reason'] = self.reason
+        if self.request_id is not None: body['request_id'] = self.request_id
+        if self.share: body['share'] = self.share
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdatePersonalizationRequestRequest:
         """Deserializes the UpdatePersonalizationRequestRequest from a dictionary."""
@@ -2186,6 +2841,12 @@ def as_dict(self) -> dict:
         if self.request: body['request'] = self.request.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdatePersonalizationRequestResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.request: body['request'] = self.request
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdatePersonalizationRequestResponse:
         """Deserializes the UpdatePersonalizationRequestResponse from a dictionary."""
@@ -2208,6 +2869,13 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateProviderAnalyticsDashboardRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateProviderAnalyticsDashboardRequest:
         """Deserializes the UpdateProviderAnalyticsDashboardRequest from a dictionary."""
@@ -2232,6 +2900,14 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateProviderAnalyticsDashboardResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.id is not None: body['id'] = self.id
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateProviderAnalyticsDashboardResponse:
         """Deserializes the UpdateProviderAnalyticsDashboardResponse from a dictionary."""
@@ -2253,6 +2929,13 @@ def as_dict(self) -> dict:
         if self.provider: body['provider'] = self.provider.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateProviderRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.provider: body['provider'] = self.provider
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateProviderRequest:
         """Deserializes the UpdateProviderRequest from a dictionary."""
@@ -2269,6 +2952,12 @@ def as_dict(self) -> dict:
         if self.provider: body['provider'] = self.provider.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateProviderResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.provider: body['provider'] = self.provider
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateProviderResponse:
         """Deserializes the UpdateProviderResponse from a dictionary."""
diff --git a/databricks/sdk/service/ml.py b/databricks/sdk/service/ml.py
index b2cec8126..e551c72ca 100755
--- a/databricks/sdk/service/ml.py
+++ b/databricks/sdk/service/ml.py
@@ -90,6 +90,21 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Activity into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.activity_type is not None: body['activity_type'] = self.activity_type
+        if self.comment is not None: body['comment'] = self.comment
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.from_stage is not None: body['from_stage'] = self.from_stage
+        if self.id is not None: body['id'] = self.id
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.system_comment is not None: body['system_comment'] = self.system_comment
+        if self.to_stage is not None: body['to_stage'] = self.to_stage
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Activity:
         """Deserializes the Activity from a dictionary."""
@@ -177,6 +192,17 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ApproveTransitionRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.archive_existing_versions is not None:
+            body['archive_existing_versions'] = self.archive_existing_versions
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.stage is not None: body['stage'] = self.stage
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ApproveTransitionRequest:
         """Deserializes the ApproveTransitionRequest from a dictionary."""
@@ -198,6 +224,12 @@ def as_dict(self) -> dict:
         if self.activity: body['activity'] = self.activity.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ApproveTransitionRequestResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.activity: body['activity'] = self.activity
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ApproveTransitionRequestResponse:
         """Deserializes the ApproveTransitionRequestResponse from a dictionary."""
@@ -248,6 +280,18 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CommentObject into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.available_actions: body['available_actions'] = self.available_actions
+        if self.comment is not None: body['comment'] = self.comment
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.id is not None: body['id'] = self.id
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CommentObject:
         """Deserializes the CommentObject from a dictionary."""
@@ -278,6 +322,14 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateComment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateComment:
         """Deserializes the CreateComment from a dictionary."""
@@ -295,6 +347,12 @@ def as_dict(self) -> dict:
         if self.comment: body['comment'] = self.comment.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCommentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment: body['comment'] = self.comment
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCommentResponse:
         """Deserializes the CreateCommentResponse from a dictionary."""
@@ -324,6 +382,14 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateExperiment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.artifact_location is not None: body['artifact_location'] = self.artifact_location
+        if self.name is not None: body['name'] = self.name
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExperiment:
         """Deserializes the CreateExperiment from a dictionary."""
@@ -343,6 +409,12 @@ def as_dict(self) -> dict:
         if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateExperimentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateExperimentResponse:
         """Deserializes the CreateExperimentResponse from a dictionary."""
@@ -368,6 +440,14 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateModelRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateModelRequest:
         """Deserializes the CreateModelRequest from a dictionary."""
@@ -386,6 +466,12 @@ def as_dict(self) -> dict:
         if self.registered_model: body['registered_model'] = self.registered_model.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateModelResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.registered_model: body['registered_model'] = self.registered_model
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateModelResponse:
         """Deserializes the CreateModelResponse from a dictionary."""
@@ -425,6 +511,17 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateModelVersionRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_link is not None: body['run_link'] = self.run_link
+        if self.source is not None: body['source'] = self.source
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateModelVersionRequest:
         """Deserializes the CreateModelVersionRequest from a dictionary."""
@@ -447,6 +544,12 @@ def as_dict(self) -> dict:
         if self.model_version: body['model_version'] = self.model_version.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateModelVersionResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.model_version: body['model_version'] = self.model_version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateModelVersionResponse:
         """Deserializes the CreateModelVersionResponse from a dictionary."""
@@ -515,6 +618,17 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateRegistryWebhook into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.events: body['events'] = self.events
+        if self.http_url_spec: body['http_url_spec'] = self.http_url_spec
+        if self.job_spec: body['job_spec'] = self.job_spec
+        if self.model_name is not None: body['model_name'] = self.model_name
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRegistryWebhook:
         """Deserializes the CreateRegistryWebhook from a dictionary."""
@@ -550,6 +664,15 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.tags: body['tags'] = self.tags
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRun:
         """Deserializes the CreateRun from a dictionary."""
@@ -570,6 +693,12 @@ def as_dict(self) -> dict:
         if self.run: body['run'] = self.run.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateRunResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run: body['run'] = self.run
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRunResponse:
         """Deserializes the CreateRunResponse from a dictionary."""
@@ -607,6 +736,15 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateTransitionRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.stage is not None: body['stage'] = self.stage
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateTransitionRequest:
         """Deserializes the CreateTransitionRequest from a dictionary."""
@@ -627,6 +765,12 @@ def as_dict(self) -> dict:
         if self.request: body['request'] = self.request.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateTransitionRequestResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.request: body['request'] = self.request
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateTransitionRequestResponse:
         """Deserializes the CreateTransitionRequestResponse from a dictionary."""
@@ -643,6 +787,12 @@ def as_dict(self) -> dict:
         if self.webhook: body['webhook'] = self.webhook.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateWebhookResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.webhook: body['webhook'] = self.webhook
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateWebhookResponse:
         """Deserializes the CreateWebhookResponse from a dictionary."""
@@ -684,6 +834,17 @@ def as_dict(self) -> dict:
         if self.source_type is not None: body['source_type'] = self.source_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Dataset into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.digest is not None: body['digest'] = self.digest
+        if self.name is not None: body['name'] = self.name
+        if self.profile is not None: body['profile'] = self.profile
+        if self.schema is not None: body['schema'] = self.schema
+        if self.source is not None: body['source'] = self.source
+        if self.source_type is not None: body['source_type'] = self.source_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Dataset:
         """Deserializes the Dataset from a dictionary."""
@@ -710,6 +871,13 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DatasetInput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dataset: body['dataset'] = self.dataset
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DatasetInput:
         """Deserializes the DatasetInput from a dictionary."""
@@ -724,6 +892,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteCommentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteCommentResponse:
         """Deserializes the DeleteCommentResponse from a dictionary."""
@@ -741,6 +914,12 @@ def as_dict(self) -> dict:
         if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteExperiment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteExperiment:
         """Deserializes the DeleteExperiment from a dictionary."""
@@ -755,6 +934,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteExperimentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteExperimentResponse:
         """Deserializes the DeleteExperimentResponse from a dictionary."""
@@ -769,6 +953,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteModelResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteModelResponse:
         """Deserializes the DeleteModelResponse from a dictionary."""
@@ -783,6 +972,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteModelTagResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteModelTagResponse:
         """Deserializes the DeleteModelTagResponse from a dictionary."""
@@ -797,6 +991,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteModelVersionResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteModelVersionResponse:
         """Deserializes the DeleteModelVersionResponse from a dictionary."""
@@ -811,6 +1010,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteModelVersionTagResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteModelVersionTagResponse:
         """Deserializes the DeleteModelVersionTagResponse from a dictionary."""
@@ -828,6 +1032,12 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRun:
         """Deserializes the DeleteRun from a dictionary."""
@@ -842,6 +1052,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteRunResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRunResponse:
         """Deserializes the DeleteRunResponse from a dictionary."""
@@ -869,6 +1084,14 @@ def as_dict(self) -> dict:
         if self.max_timestamp_millis is not None: body['max_timestamp_millis'] = self.max_timestamp_millis
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteRuns into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.max_runs is not None: body['max_runs'] = self.max_runs
+        if self.max_timestamp_millis is not None: body['max_timestamp_millis'] = self.max_timestamp_millis
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRuns:
         """Deserializes the DeleteRuns from a dictionary."""
@@ -888,6 +1111,12 @@ def as_dict(self) -> dict:
         if self.runs_deleted is not None: body['runs_deleted'] = self.runs_deleted
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteRunsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.runs_deleted is not None: body['runs_deleted'] = self.runs_deleted
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRunsResponse:
         """Deserializes the DeleteRunsResponse from a dictionary."""
@@ -909,6 +1138,13 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteTag:
         """Deserializes the DeleteTag from a dictionary."""
@@ -923,6 +1159,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteTagResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteTagResponse:
         """Deserializes the DeleteTagResponse from a dictionary."""
@@ -937,6 +1178,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteTransitionRequestResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteTransitionRequestResponse:
         """Deserializes the DeleteTransitionRequestResponse from a dictionary."""
@@ -959,6 +1205,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteWebhookResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteWebhookResponse:
         """Deserializes the DeleteWebhookResponse from a dictionary."""
@@ -1001,6 +1252,18 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Experiment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.artifact_location is not None: body['artifact_location'] = self.artifact_location
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.last_update_time is not None: body['last_update_time'] = self.last_update_time
+        if self.lifecycle_stage is not None: body['lifecycle_stage'] = self.lifecycle_stage
+        if self.name is not None: body['name'] = self.name
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Experiment:
         """Deserializes the Experiment from a dictionary."""
@@ -1037,6 +1300,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExperimentAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentAccessControlRequest:
         """Deserializes the ExperimentAccessControlRequest from a dictionary."""
@@ -1074,6 +1347,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExperimentAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentAccessControlResponse:
         """Deserializes the ExperimentAccessControlResponse from a dictionary."""
@@ -1101,6 +1385,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExperimentPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentPermission:
         """Deserializes the ExperimentPermission from a dictionary."""
@@ -1134,6 +1426,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExperimentPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentPermissions:
         """Deserializes the ExperimentPermissions from a dictionary."""
@@ -1157,6 +1457,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExperimentPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentPermissionsDescription:
         """Deserializes the ExperimentPermissionsDescription from a dictionary."""
@@ -1179,6 +1486,13 @@ def as_dict(self) -> dict:
         if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExperimentPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentPermissionsRequest:
         """Deserializes the ExperimentPermissionsRequest from a dictionary."""
@@ -1202,6 +1516,13 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExperimentTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExperimentTag:
         """Deserializes the ExperimentTag from a dictionary."""
@@ -1227,6 +1548,14 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FileInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.file_size is not None: body['file_size'] = self.file_size
+        if self.is_dir is not None: body['is_dir'] = self.is_dir
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FileInfo:
         """Deserializes the FileInfo from a dictionary."""
@@ -1244,6 +1573,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetExperimentPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetExperimentPermissionLevelsResponse:
         """Deserializes the GetExperimentPermissionLevelsResponse from a dictionary."""
@@ -1261,6 +1596,12 @@ def as_dict(self) -> dict:
         if self.experiment: body['experiment'] = self.experiment.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetExperimentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment: body['experiment'] = self.experiment
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetExperimentResponse:
         """Deserializes the GetExperimentResponse from a dictionary."""
@@ -1282,6 +1623,13 @@ def as_dict(self) -> dict:
         if self.stages: body['stages'] = [v for v in self.stages]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetLatestVersionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.stages: body['stages'] = self.stages
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetLatestVersionsRequest:
         """Deserializes the GetLatestVersionsRequest from a dictionary."""
@@ -1300,6 +1648,12 @@ def as_dict(self) -> dict:
         if self.model_versions: body['model_versions'] = [v.as_dict() for v in self.model_versions]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetLatestVersionsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.model_versions: body['model_versions'] = self.model_versions
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetLatestVersionsResponse:
         """Deserializes the GetLatestVersionsResponse from a dictionary."""
@@ -1321,6 +1675,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetMetricHistoryResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metrics: body['metrics'] = self.metrics
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetMetricHistoryResponse:
         """Deserializes the GetMetricHistoryResponse from a dictionary."""
@@ -1339,6 +1700,13 @@ def as_dict(self) -> dict:
             body['registered_model_databricks'] = self.registered_model_databricks.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetModelResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.registered_model_databricks:
+            body['registered_model_databricks'] = self.registered_model_databricks
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetModelResponse:
         """Deserializes the GetModelResponse from a dictionary."""
@@ -1356,6 +1724,12 @@ def as_dict(self) -> dict:
         if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetModelVersionDownloadUriResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetModelVersionDownloadUriResponse:
         """Deserializes the GetModelVersionDownloadUriResponse from a dictionary."""
@@ -1372,6 +1746,12 @@ def as_dict(self) -> dict:
         if self.model_version: body['model_version'] = self.model_version.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetModelVersionResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.model_version: body['model_version'] = self.model_version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetModelVersionResponse:
         """Deserializes the GetModelVersionResponse from a dictionary."""
@@ -1389,6 +1769,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetRegisteredModelPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetRegisteredModelPermissionLevelsResponse:
         """Deserializes the GetRegisteredModelPermissionLevelsResponse from a dictionary."""
@@ -1407,6 +1793,12 @@ def as_dict(self) -> dict:
         if self.run: body['run'] = self.run.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetRunResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run: body['run'] = self.run
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetRunResponse:
         """Deserializes the GetRunResponse from a dictionary."""
@@ -1444,6 +1836,16 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the HttpUrlSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.authorization is not None: body['authorization'] = self.authorization
+        if self.enable_ssl_verification is not None:
+            body['enable_ssl_verification'] = self.enable_ssl_verification
+        if self.secret is not None: body['secret'] = self.secret
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> HttpUrlSpec:
         """Deserializes the HttpUrlSpec from a dictionary."""
@@ -1473,6 +1875,14 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the HttpUrlSpecWithoutSecret into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enable_ssl_verification is not None:
+            body['enable_ssl_verification'] = self.enable_ssl_verification
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> HttpUrlSpecWithoutSecret:
         """Deserializes the HttpUrlSpecWithoutSecret from a dictionary."""
@@ -1494,6 +1904,13 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the InputTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> InputTag:
         """Deserializes the InputTag from a dictionary."""
@@ -1520,6 +1937,14 @@ def as_dict(self) -> dict:
         if self.workspace_url is not None: body['workspace_url'] = self.workspace_url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_token is not None: body['access_token'] = self.access_token
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.workspace_url is not None: body['workspace_url'] = self.workspace_url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobSpec:
         """Deserializes the JobSpec from a dictionary."""
@@ -1545,6 +1970,13 @@ def as_dict(self) -> dict:
         if self.workspace_url is not None: body['workspace_url'] = self.workspace_url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the JobSpecWithoutSecret into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.job_id is not None: body['job_id'] = self.job_id
+        if self.workspace_url is not None: body['workspace_url'] = self.workspace_url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> JobSpecWithoutSecret:
         """Deserializes the JobSpecWithoutSecret from a dictionary."""
@@ -1570,6 +2002,14 @@ def as_dict(self) -> dict:
         if self.root_uri is not None: body['root_uri'] = self.root_uri
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListArtifactsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.files: body['files'] = self.files
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.root_uri is not None: body['root_uri'] = self.root_uri
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListArtifactsResponse:
         """Deserializes the ListArtifactsResponse from a dictionary."""
@@ -1594,6 +2034,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListExperimentsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiments: body['experiments'] = self.experiments
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListExperimentsResponse:
         """Deserializes the ListExperimentsResponse from a dictionary."""
@@ -1615,6 +2062,13 @@ def as_dict(self) -> dict:
         if self.registered_models: body['registered_models'] = [v.as_dict() for v in self.registered_models]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListModelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.registered_models: body['registered_models'] = self.registered_models
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListModelsResponse:
         """Deserializes the ListModelsResponse from a dictionary."""
@@ -1637,6 +2091,13 @@ def as_dict(self) -> dict:
         if self.webhooks: body['webhooks'] = [v.as_dict() for v in self.webhooks]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListRegistryWebhooks into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.webhooks: body['webhooks'] = self.webhooks
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListRegistryWebhooks:
         """Deserializes the ListRegistryWebhooks from a dictionary."""
@@ -1655,6 +2116,12 @@ def as_dict(self) -> dict:
         if self.requests: body['requests'] = [v.as_dict() for v in self.requests]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListTransitionRequestsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.requests: body['requests'] = self.requests
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListTransitionRequestsResponse:
         """Deserializes the ListTransitionRequestsResponse from a dictionary."""
@@ -1687,6 +2154,15 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogBatch into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metrics: body['metrics'] = self.metrics
+        if self.params: body['params'] = self.params
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogBatch:
         """Deserializes the LogBatch from a dictionary."""
@@ -1704,6 +2180,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogBatchResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogBatchResponse:
         """Deserializes the LogBatchResponse from a dictionary."""
@@ -1725,6 +2206,13 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogInputs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.datasets: body['datasets'] = self.datasets
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogInputs:
         """Deserializes the LogInputs from a dictionary."""
@@ -1739,6 +2227,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogInputsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogInputsResponse:
         """Deserializes the LogInputsResponse from a dictionary."""
@@ -1777,6 +2270,17 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogMetric into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
+        if self.step is not None: body['step'] = self.step
+        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogMetric:
         """Deserializes the LogMetric from a dictionary."""
@@ -1796,6 +2300,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogMetricResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogMetricResponse:
         """Deserializes the LogMetricResponse from a dictionary."""
@@ -1817,6 +2326,13 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogModel into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.model_json is not None: body['model_json'] = self.model_json
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogModel:
         """Deserializes the LogModel from a dictionary."""
@@ -1831,6 +2347,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogModelResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogModelResponse:
         """Deserializes the LogModelResponse from a dictionary."""
@@ -1861,6 +2382,15 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogParam into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogParam:
         """Deserializes the LogParam from a dictionary."""
@@ -1878,6 +2408,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LogParamResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LogParamResponse:
         """Deserializes the LogParamResponse from a dictionary."""
@@ -1907,6 +2442,15 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Metric into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.step is not None: body['step'] = self.step
+        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Metric:
         """Deserializes the Metric from a dictionary."""
@@ -1953,6 +2497,19 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Model into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.description is not None: body['description'] = self.description
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.latest_versions: body['latest_versions'] = self.latest_versions
+        if self.name is not None: body['name'] = self.name
+        if self.tags: body['tags'] = self.tags
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Model:
         """Deserializes the Model from a dictionary."""
@@ -2010,6 +2567,21 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ModelDatabricks into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.description is not None: body['description'] = self.description
+        if self.id is not None: body['id'] = self.id
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.latest_versions: body['latest_versions'] = self.latest_versions
+        if self.name is not None: body['name'] = self.name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.tags: body['tags'] = self.tags
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelDatabricks:
         """Deserializes the ModelDatabricks from a dictionary."""
@@ -2039,6 +2611,13 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ModelTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelTag:
         """Deserializes the ModelTag from a dictionary."""
@@ -2106,6 +2685,25 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ModelVersion into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.current_stage is not None: body['current_stage'] = self.current_stage
+        if self.description is not None: body['description'] = self.description
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.name is not None: body['name'] = self.name
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_link is not None: body['run_link'] = self.run_link
+        if self.source is not None: body['source'] = self.source
+        if self.status is not None: body['status'] = self.status
+        if self.status_message is not None: body['status_message'] = self.status_message
+        if self.tags: body['tags'] = self.tags
+        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelVersion:
         """Deserializes the ModelVersion from a dictionary."""
@@ -2205,6 +2803,26 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ModelVersionDatabricks into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.current_stage is not None: body['current_stage'] = self.current_stage
+        if self.description is not None: body['description'] = self.description
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.name is not None: body['name'] = self.name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_link is not None: body['run_link'] = self.run_link
+        if self.source is not None: body['source'] = self.source
+        if self.status is not None: body['status'] = self.status
+        if self.status_message is not None: body['status_message'] = self.status_message
+        if self.tags: body['tags'] = self.tags
+        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelVersionDatabricks:
         """Deserializes the ModelVersionDatabricks from a dictionary."""
@@ -2247,6 +2865,13 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ModelVersionTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelVersionTag:
         """Deserializes the ModelVersionTag from a dictionary."""
@@ -2261,8 +2886,15 @@ class Param:
     value: Optional[str] = None
     """Value associated with this param."""
 
-    def as_dict(self) -> dict:
-        """Serializes the Param into a dictionary suitable for use as a JSON request body."""
+    def as_dict(self) -> dict:
+        """Serializes the Param into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Param into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.key is not None: body['key'] = self.key
         if self.value is not None: body['value'] = self.value
@@ -2309,6 +2941,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegisteredModelAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelAccessControlRequest:
         """Deserializes the RegisteredModelAccessControlRequest from a dictionary."""
@@ -2346,6 +2988,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegisteredModelAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelAccessControlResponse:
         """Deserializes the RegisteredModelAccessControlResponse from a dictionary."""
@@ -2373,6 +3026,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegisteredModelPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelPermission:
         """Deserializes the RegisteredModelPermission from a dictionary."""
@@ -2408,6 +3069,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegisteredModelPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelPermissions:
         """Deserializes the RegisteredModelPermissions from a dictionary."""
@@ -2431,6 +3100,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegisteredModelPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelPermissionsDescription:
         """Deserializes the RegisteredModelPermissionsDescription from a dictionary."""
@@ -2453,6 +3129,13 @@ def as_dict(self) -> dict:
         if self.registered_model_id is not None: body['registered_model_id'] = self.registered_model_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegisteredModelPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.registered_model_id is not None: body['registered_model_id'] = self.registered_model_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegisteredModelPermissionsRequest:
         """Deserializes the RegisteredModelPermissionsRequest from a dictionary."""
@@ -2536,6 +3219,21 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RegistryWebhook into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.description is not None: body['description'] = self.description
+        if self.events: body['events'] = self.events
+        if self.http_url_spec: body['http_url_spec'] = self.http_url_spec
+        if self.id is not None: body['id'] = self.id
+        if self.job_spec: body['job_spec'] = self.job_spec
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.model_name is not None: body['model_name'] = self.model_name
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RegistryWebhook:
         """Deserializes the RegistryWebhook from a dictionary."""
@@ -2611,6 +3309,15 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RejectTransitionRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.stage is not None: body['stage'] = self.stage
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RejectTransitionRequest:
         """Deserializes the RejectTransitionRequest from a dictionary."""
@@ -2631,6 +3338,12 @@ def as_dict(self) -> dict:
         if self.activity: body['activity'] = self.activity.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RejectTransitionRequestResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.activity: body['activity'] = self.activity
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RejectTransitionRequestResponse:
         """Deserializes the RejectTransitionRequestResponse from a dictionary."""
@@ -2652,6 +3365,13 @@ def as_dict(self) -> dict:
         if self.new_name is not None: body['new_name'] = self.new_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RenameModelRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RenameModelRequest:
         """Deserializes the RenameModelRequest from a dictionary."""
@@ -2668,6 +3388,12 @@ def as_dict(self) -> dict:
         if self.registered_model: body['registered_model'] = self.registered_model.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RenameModelResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.registered_model: body['registered_model'] = self.registered_model
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RenameModelResponse:
         """Deserializes the RenameModelResponse from a dictionary."""
@@ -2685,6 +3411,12 @@ def as_dict(self) -> dict:
         if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestoreExperiment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestoreExperiment:
         """Deserializes the RestoreExperiment from a dictionary."""
@@ -2699,6 +3431,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestoreExperimentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestoreExperimentResponse:
         """Deserializes the RestoreExperimentResponse from a dictionary."""
@@ -2716,6 +3453,12 @@ def as_dict(self) -> dict:
         if self.run_id is not None: body['run_id'] = self.run_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestoreRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run_id is not None: body['run_id'] = self.run_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestoreRun:
         """Deserializes the RestoreRun from a dictionary."""
@@ -2730,6 +3473,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestoreRunResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestoreRunResponse:
         """Deserializes the RestoreRunResponse from a dictionary."""
@@ -2757,6 +3505,14 @@ def as_dict(self) -> dict:
         if self.min_timestamp_millis is not None: body['min_timestamp_millis'] = self.min_timestamp_millis
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestoreRuns into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.max_runs is not None: body['max_runs'] = self.max_runs
+        if self.min_timestamp_millis is not None: body['min_timestamp_millis'] = self.min_timestamp_millis
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestoreRuns:
         """Deserializes the RestoreRuns from a dictionary."""
@@ -2776,6 +3532,12 @@ def as_dict(self) -> dict:
         if self.runs_restored is not None: body['runs_restored'] = self.runs_restored
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestoreRunsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.runs_restored is not None: body['runs_restored'] = self.runs_restored
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestoreRunsResponse:
         """Deserializes the RestoreRunsResponse from a dictionary."""
@@ -2801,6 +3563,14 @@ def as_dict(self) -> dict:
         if self.inputs: body['inputs'] = self.inputs.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Run into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data: body['data'] = self.data
+        if self.info: body['info'] = self.info
+        if self.inputs: body['inputs'] = self.inputs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Run:
         """Deserializes the Run from a dictionary."""
@@ -2828,6 +3598,14 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunData into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.metrics: body['metrics'] = self.metrics
+        if self.params: body['params'] = self.params
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunData:
         """Deserializes the RunData from a dictionary."""
@@ -2883,6 +3661,20 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.artifact_uri is not None: body['artifact_uri'] = self.artifact_uri
+        if self.end_time is not None: body['end_time'] = self.end_time
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.lifecycle_stage is not None: body['lifecycle_stage'] = self.lifecycle_stage
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
+        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.status is not None: body['status'] = self.status
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunInfo:
         """Deserializes the RunInfo from a dictionary."""
@@ -2918,6 +3710,12 @@ def as_dict(self) -> dict:
         if self.dataset_inputs: body['dataset_inputs'] = [v.as_dict() for v in self.dataset_inputs]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunInputs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dataset_inputs: body['dataset_inputs'] = self.dataset_inputs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunInputs:
         """Deserializes the RunInputs from a dictionary."""
@@ -2939,6 +3737,13 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RunTag:
         """Deserializes the RunTag from a dictionary."""
@@ -2975,6 +3780,16 @@ def as_dict(self) -> dict:
         if self.view_type is not None: body['view_type'] = self.view_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SearchExperiments into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.filter is not None: body['filter'] = self.filter
+        if self.max_results is not None: body['max_results'] = self.max_results
+        if self.order_by: body['order_by'] = self.order_by
+        if self.page_token is not None: body['page_token'] = self.page_token
+        if self.view_type is not None: body['view_type'] = self.view_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchExperiments:
         """Deserializes the SearchExperiments from a dictionary."""
@@ -3001,6 +3816,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SearchExperimentsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiments: body['experiments'] = self.experiments
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchExperimentsResponse:
         """Deserializes the SearchExperimentsResponse from a dictionary."""
@@ -3032,6 +3854,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SearchModelVersionsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.model_versions: body['model_versions'] = self.model_versions
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchModelVersionsResponse:
         """Deserializes the SearchModelVersionsResponse from a dictionary."""
@@ -3054,6 +3883,13 @@ def as_dict(self) -> dict:
         if self.registered_models: body['registered_models'] = [v.as_dict() for v in self.registered_models]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SearchModelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.registered_models: body['registered_models'] = self.registered_models
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchModelsResponse:
         """Deserializes the SearchModelsResponse from a dictionary."""
@@ -3105,6 +3941,17 @@ def as_dict(self) -> dict:
         if self.run_view_type is not None: body['run_view_type'] = self.run_view_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SearchRuns into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment_ids: body['experiment_ids'] = self.experiment_ids
+        if self.filter is not None: body['filter'] = self.filter
+        if self.max_results is not None: body['max_results'] = self.max_results
+        if self.order_by: body['order_by'] = self.order_by
+        if self.page_token is not None: body['page_token'] = self.page_token
+        if self.run_view_type is not None: body['run_view_type'] = self.run_view_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchRuns:
         """Deserializes the SearchRuns from a dictionary."""
@@ -3131,6 +3978,13 @@ def as_dict(self) -> dict:
         if self.runs: body['runs'] = [v.as_dict() for v in self.runs]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SearchRunsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.runs: body['runs'] = self.runs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SearchRunsResponse:
         """Deserializes the SearchRunsResponse from a dictionary."""
@@ -3166,6 +4020,14 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetExperimentTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetExperimentTag:
         """Deserializes the SetExperimentTag from a dictionary."""
@@ -3182,6 +4044,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetExperimentTagResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetExperimentTagResponse:
         """Deserializes the SetExperimentTagResponse from a dictionary."""
@@ -3210,6 +4077,14 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetModelTagRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.name is not None: body['name'] = self.name
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetModelTagRequest:
         """Deserializes the SetModelTagRequest from a dictionary."""
@@ -3224,6 +4099,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetModelTagResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetModelTagResponse:
         """Deserializes the SetModelTagResponse from a dictionary."""
@@ -3256,6 +4136,15 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetModelVersionTagRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.name is not None: body['name'] = self.name
+        if self.value is not None: body['value'] = self.value
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetModelVersionTagRequest:
         """Deserializes the SetModelVersionTagRequest from a dictionary."""
@@ -3273,6 +4162,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetModelVersionTagResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetModelVersionTagResponse:
         """Deserializes the SetModelVersionTagResponse from a dictionary."""
@@ -3305,6 +4199,15 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetTag:
         """Deserializes the SetTag from a dictionary."""
@@ -3322,6 +4225,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetTagResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetTagResponse:
         """Deserializes the SetTagResponse from a dictionary."""
@@ -3375,6 +4283,13 @@ def as_dict(self) -> dict:
         if self.status_code is not None: body['status_code'] = self.status_code
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TestRegistryWebhook into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.body is not None: body['body'] = self.body
+        if self.status_code is not None: body['status_code'] = self.status_code
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TestRegistryWebhook:
         """Deserializes the TestRegistryWebhook from a dictionary."""
@@ -3397,6 +4312,13 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TestRegistryWebhookRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.event is not None: body['event'] = self.event
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TestRegistryWebhookRequest:
         """Deserializes the TestRegistryWebhookRequest from a dictionary."""
@@ -3414,6 +4336,12 @@ def as_dict(self) -> dict:
         if self.webhook: body['webhook'] = self.webhook.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TestRegistryWebhookResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.webhook: body['webhook'] = self.webhook
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TestRegistryWebhookResponse:
         """Deserializes the TestRegistryWebhookResponse from a dictionary."""
@@ -3456,6 +4384,17 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TransitionModelVersionStageDatabricks into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.archive_existing_versions is not None:
+            body['archive_existing_versions'] = self.archive_existing_versions
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.stage is not None: body['stage'] = self.stage
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TransitionModelVersionStageDatabricks:
         """Deserializes the TransitionModelVersionStageDatabricks from a dictionary."""
@@ -3503,6 +4442,16 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TransitionRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.available_actions: body['available_actions'] = self.available_actions
+        if self.comment is not None: body['comment'] = self.comment
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.to_stage is not None: body['to_stage'] = self.to_stage
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TransitionRequest:
         """Deserializes the TransitionRequest from a dictionary."""
@@ -3523,6 +4472,12 @@ def as_dict(self) -> dict:
         if self.model_version: body['model_version'] = self.model_version.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TransitionStageResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.model_version: body['model_version'] = self.model_version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TransitionStageResponse:
         """Deserializes the TransitionStageResponse from a dictionary."""
@@ -3544,6 +4499,13 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateComment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateComment:
         """Deserializes the UpdateComment from a dictionary."""
@@ -3561,6 +4523,12 @@ def as_dict(self) -> dict:
         if self.comment: body['comment'] = self.comment.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCommentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment: body['comment'] = self.comment
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCommentResponse:
         """Deserializes the UpdateCommentResponse from a dictionary."""
@@ -3582,6 +4550,13 @@ def as_dict(self) -> dict:
         if self.new_name is not None: body['new_name'] = self.new_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateExperiment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.experiment_id is not None: body['experiment_id'] = self.experiment_id
+        if self.new_name is not None: body['new_name'] = self.new_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExperiment:
         """Deserializes the UpdateExperiment from a dictionary."""
@@ -3596,6 +4571,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateExperimentResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateExperimentResponse:
         """Deserializes the UpdateExperimentResponse from a dictionary."""
@@ -3617,6 +4597,13 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateModelRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateModelRequest:
         """Deserializes the UpdateModelRequest from a dictionary."""
@@ -3631,6 +4618,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateModelResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateModelResponse:
         """Deserializes the UpdateModelResponse from a dictionary."""
@@ -3656,6 +4648,14 @@ def as_dict(self) -> dict:
         if self.version is not None: body['version'] = self.version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateModelVersionRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.version is not None: body['version'] = self.version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateModelVersionRequest:
         """Deserializes the UpdateModelVersionRequest from a dictionary."""
@@ -3672,6 +4672,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateModelVersionResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateModelVersionResponse:
         """Deserializes the UpdateModelVersionResponse from a dictionary."""
@@ -3740,6 +4745,17 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateRegistryWebhook into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.events: body['events'] = self.events
+        if self.http_url_spec: body['http_url_spec'] = self.http_url_spec
+        if self.id is not None: body['id'] = self.id
+        if self.job_spec: body['job_spec'] = self.job_spec
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRegistryWebhook:
         """Deserializes the UpdateRegistryWebhook from a dictionary."""
@@ -3775,6 +4791,15 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateRun into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.end_time is not None: body['end_time'] = self.end_time
+        if self.run_id is not None: body['run_id'] = self.run_id
+        if self.run_uuid is not None: body['run_uuid'] = self.run_uuid
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRun:
         """Deserializes the UpdateRun from a dictionary."""
@@ -3795,6 +4820,12 @@ def as_dict(self) -> dict:
         if self.run_info: body['run_info'] = self.run_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateRunResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.run_info: body['run_info'] = self.run_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRunResponse:
         """Deserializes the UpdateRunResponse from a dictionary."""
@@ -3819,6 +4850,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateWebhookResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateWebhookResponse:
         """Deserializes the UpdateWebhookResponse from a dictionary."""
@@ -4596,7 +5632,8 @@ def set_permissions(
     ) -> ExperimentPermissions:
         """Set experiment permissions.
         
-        Sets permissions on an experiment. Experiments can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param experiment_id: str
           The experiment for which to get or manage permissions.
@@ -5571,8 +6608,8 @@ def set_permissions(
     ) -> RegisteredModelPermissions:
         """Set registered model permissions.
         
-        Sets permissions on a registered model. Registered models can inherit permissions from their root
-        object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param registered_model_id: str
           The registered model for which to get or manage permissions.
diff --git a/databricks/sdk/service/oauth2.py b/databricks/sdk/service/oauth2.py
index 0c439ae7e..37d464af6 100755
--- a/databricks/sdk/service/oauth2.py
+++ b/databricks/sdk/service/oauth2.py
@@ -31,6 +31,10 @@ class CreateCustomAppIntegration:
     token_access_policy: Optional[TokenAccessPolicy] = None
     """Token access policy"""
 
+    user_authorized_scopes: Optional[List[str]] = None
+    """Scopes that will need to be consented by end user to mint the access token. If the user does not
+    authorize the access token will not be minted. Must be a subset of scopes."""
+
     def as_dict(self) -> dict:
         """Serializes the CreateCustomAppIntegration into a dictionary suitable for use as a JSON request body."""
         body = {}
@@ -39,6 +43,19 @@ def as_dict(self) -> dict:
         if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls]
         if self.scopes: body['scopes'] = [v for v in self.scopes]
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
+        if self.user_authorized_scopes:
+            body['user_authorized_scopes'] = [v for v in self.user_authorized_scopes]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCustomAppIntegration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.confidential is not None: body['confidential'] = self.confidential
+        if self.name is not None: body['name'] = self.name
+        if self.redirect_urls: body['redirect_urls'] = self.redirect_urls
+        if self.scopes: body['scopes'] = self.scopes
+        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
+        if self.user_authorized_scopes: body['user_authorized_scopes'] = self.user_authorized_scopes
         return body
 
     @classmethod
@@ -48,7 +65,8 @@ def from_dict(cls, d: Dict[str, any]) -> CreateCustomAppIntegration:
                    name=d.get('name', None),
                    redirect_urls=d.get('redirect_urls', None),
                    scopes=d.get('scopes', None),
-                   token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy))
+                   token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy),
+                   user_authorized_scopes=d.get('user_authorized_scopes', None))
 
 
 @dataclass
@@ -71,6 +89,14 @@ def as_dict(self) -> dict:
         if self.integration_id is not None: body['integration_id'] = self.integration_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCustomAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.client_id is not None: body['client_id'] = self.client_id
+        if self.client_secret is not None: body['client_secret'] = self.client_secret
+        if self.integration_id is not None: body['integration_id'] = self.integration_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCustomAppIntegrationOutput:
         """Deserializes the CreateCustomAppIntegrationOutput from a dictionary."""
@@ -94,6 +120,13 @@ def as_dict(self) -> dict:
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreatePublishedAppIntegration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.app_id is not None: body['app_id'] = self.app_id
+        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePublishedAppIntegration:
         """Deserializes the CreatePublishedAppIntegration from a dictionary."""
@@ -112,6 +145,12 @@ def as_dict(self) -> dict:
         if self.integration_id is not None: body['integration_id'] = self.integration_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreatePublishedAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.integration_id is not None: body['integration_id'] = self.integration_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePublishedAppIntegrationOutput:
         """Deserializes the CreatePublishedAppIntegrationOutput from a dictionary."""
@@ -149,6 +188,17 @@ def as_dict(self) -> dict:
         if self.update_time is not None: body['update_time'] = self.update_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateServicePrincipalSecretResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.id is not None: body['id'] = self.id
+        if self.secret is not None: body['secret'] = self.secret
+        if self.secret_hash is not None: body['secret_hash'] = self.secret_hash
+        if self.status is not None: body['status'] = self.status
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateServicePrincipalSecretResponse:
         """Deserializes the CreateServicePrincipalSecretResponse from a dictionary."""
@@ -161,32 +211,15 @@ def from_dict(cls, d: Dict[str, any]) -> CreateServicePrincipalSecretResponse:
 
 
 @dataclass
-class DataPlaneInfo:
-    authorization_details: Optional[str] = None
-    """Authorization details as a string."""
-
-    endpoint_url: Optional[str] = None
-    """The URL of the endpoint for this operation in the dataplane."""
+class DeleteCustomAppIntegrationOutput:
 
     def as_dict(self) -> dict:
-        """Serializes the DataPlaneInfo into a dictionary suitable for use as a JSON request body."""
+        """Serializes the DeleteCustomAppIntegrationOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.authorization_details is not None: body['authorization_details'] = self.authorization_details
-        if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url
         return body
 
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> DataPlaneInfo:
-        """Deserializes the DataPlaneInfo from a dictionary."""
-        return cls(authorization_details=d.get('authorization_details', None),
-                   endpoint_url=d.get('endpoint_url', None))
-
-
-@dataclass
-class DeleteCustomAppIntegrationOutput:
-
-    def as_dict(self) -> dict:
-        """Serializes the DeleteCustomAppIntegrationOutput into a dictionary suitable for use as a JSON request body."""
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteCustomAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
         body = {}
         return body
 
@@ -204,6 +237,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeletePublishedAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeletePublishedAppIntegrationOutput:
         """Deserializes the DeletePublishedAppIntegrationOutput from a dictionary."""
@@ -218,12 +256,76 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
         return cls()
 
 
+@dataclass
+class FederationPolicy:
+    create_time: Optional[str] = None
+    """Creation time of the federation policy."""
+
+    description: Optional[str] = None
+    """Description of the federation policy."""
+
+    name: Optional[str] = None
+    """Resource name for the federation policy. Example values include
+    `accounts//federationPolicies/my-federation-policy` for Account Federation Policies,
+    and
+    `accounts//servicePrincipals//federationPolicies/my-federation-policy`
+    for Service Principal Federation Policies. Typically an output parameter, which does not need to
+    be specified in create or update requests. If specified in a request, must match the value in
+    the request URL."""
+
+    oidc_policy: Optional[OidcFederationPolicy] = None
+    """Specifies the policy to use for validating OIDC claims in your federated tokens."""
+
+    uid: Optional[str] = None
+    """Unique, immutable id of the federation policy."""
+
+    update_time: Optional[str] = None
+    """Last update time of the federation policy."""
+
+    def as_dict(self) -> dict:
+        """Serializes the FederationPolicy into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.oidc_policy: body['oidc_policy'] = self.oidc_policy.as_dict()
+        if self.uid is not None: body['uid'] = self.uid
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FederationPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.oidc_policy: body['oidc_policy'] = self.oidc_policy
+        if self.uid is not None: body['uid'] = self.uid
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> FederationPolicy:
+        """Deserializes the FederationPolicy from a dictionary."""
+        return cls(create_time=d.get('create_time', None),
+                   description=d.get('description', None),
+                   name=d.get('name', None),
+                   oidc_policy=_from_dict(d, 'oidc_policy', OidcFederationPolicy),
+                   uid=d.get('uid', None),
+                   update_time=d.get('update_time', None))
+
+
 @dataclass
 class GetCustomAppIntegrationOutput:
     client_id: Optional[str] = None
@@ -252,6 +354,10 @@ class GetCustomAppIntegrationOutput:
     token_access_policy: Optional[TokenAccessPolicy] = None
     """Token access policy"""
 
+    user_authorized_scopes: Optional[List[str]] = None
+    """Scopes that will need to be consented by end user to mint the access token. If the user does not
+    authorize the access token will not be minted. Must be a subset of scopes."""
+
     def as_dict(self) -> dict:
         """Serializes the GetCustomAppIntegrationOutput into a dictionary suitable for use as a JSON request body."""
         body = {}
@@ -265,6 +371,24 @@ def as_dict(self) -> dict:
         if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls]
         if self.scopes: body['scopes'] = [v for v in self.scopes]
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
+        if self.user_authorized_scopes:
+            body['user_authorized_scopes'] = [v for v in self.user_authorized_scopes]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetCustomAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.client_id is not None: body['client_id'] = self.client_id
+        if self.confidential is not None: body['confidential'] = self.confidential
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.creator_username is not None: body['creator_username'] = self.creator_username
+        if self.integration_id is not None: body['integration_id'] = self.integration_id
+        if self.name is not None: body['name'] = self.name
+        if self.redirect_urls: body['redirect_urls'] = self.redirect_urls
+        if self.scopes: body['scopes'] = self.scopes
+        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
+        if self.user_authorized_scopes: body['user_authorized_scopes'] = self.user_authorized_scopes
         return body
 
     @classmethod
@@ -279,7 +403,8 @@ def from_dict(cls, d: Dict[str, any]) -> GetCustomAppIntegrationOutput:
                    name=d.get('name', None),
                    redirect_urls=d.get('redirect_urls', None),
                    scopes=d.get('scopes', None),
-                   token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy))
+                   token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy),
+                   user_authorized_scopes=d.get('user_authorized_scopes', None))
 
 
 @dataclass
@@ -296,6 +421,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetCustomAppIntegrationsOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apps: body['apps'] = self.apps
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetCustomAppIntegrationsOutput:
         """Deserializes the GetCustomAppIntegrationsOutput from a dictionary."""
@@ -332,6 +464,17 @@ def as_dict(self) -> dict:
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPublishedAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.app_id is not None: body['app_id'] = self.app_id
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.integration_id is not None: body['integration_id'] = self.integration_id
+        if self.name is not None: body['name'] = self.name
+        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppIntegrationOutput:
         """Deserializes the GetPublishedAppIntegrationOutput from a dictionary."""
@@ -357,6 +500,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPublishedAppIntegrationsOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apps: body['apps'] = self.apps
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppIntegrationsOutput:
         """Deserializes the GetPublishedAppIntegrationsOutput from a dictionary."""
@@ -380,6 +530,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPublishedAppsOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apps: body['apps'] = self.apps
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppsOutput:
         """Deserializes the GetPublishedAppsOutput from a dictionary."""
@@ -387,21 +544,118 @@ def from_dict(cls, d: Dict[str, any]) -> GetPublishedAppsOutput:
                    next_page_token=d.get('next_page_token', None))
 
 
+@dataclass
+class ListFederationPoliciesResponse:
+    next_page_token: Optional[str] = None
+
+    policies: Optional[List[FederationPolicy]] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the ListFederationPoliciesResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.policies: body['policies'] = [v.as_dict() for v in self.policies]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListFederationPoliciesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.policies: body['policies'] = self.policies
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ListFederationPoliciesResponse:
+        """Deserializes the ListFederationPoliciesResponse from a dictionary."""
+        return cls(next_page_token=d.get('next_page_token', None),
+                   policies=_repeated_dict(d, 'policies', FederationPolicy))
+
+
 @dataclass
 class ListServicePrincipalSecretsResponse:
+    next_page_token: Optional[str] = None
+    """A token, which can be sent as `page_token` to retrieve the next page."""
+
     secrets: Optional[List[SecretInfo]] = None
     """List of the secrets"""
 
     def as_dict(self) -> dict:
         """Serializes the ListServicePrincipalSecretsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         if self.secrets: body['secrets'] = [v.as_dict() for v in self.secrets]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListServicePrincipalSecretsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.secrets: body['secrets'] = self.secrets
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListServicePrincipalSecretsResponse:
         """Deserializes the ListServicePrincipalSecretsResponse from a dictionary."""
-        return cls(secrets=_repeated_dict(d, 'secrets', SecretInfo))
+        return cls(next_page_token=d.get('next_page_token', None),
+                   secrets=_repeated_dict(d, 'secrets', SecretInfo))
+
+
+@dataclass
+class OidcFederationPolicy:
+    """Specifies the policy to use for validating OIDC claims in your federated tokens."""
+
+    audiences: Optional[List[str]] = None
+    """The allowed token audiences, as specified in the 'aud' claim of federated tokens. The audience
+    identifier is intended to represent the recipient of the token. Can be any non-empty string
+    value. As long as the audience in the token matches at least one audience in the policy, the
+    token is considered a match. If audiences is unspecified, defaults to your Databricks account
+    id."""
+
+    issuer: Optional[str] = None
+    """The required token issuer, as specified in the 'iss' claim of federated tokens."""
+
+    jwks_json: Optional[str] = None
+    """The public keys used to validate the signature of federated tokens, in JWKS format. If
+    unspecified (recommended), Databricks automatically fetches the public keys from your issuer’s
+    well known endpoint. Databricks strongly recommends relying on your issuer’s well known
+    endpoint for discovering public keys."""
+
+    subject: Optional[str] = None
+    """The required token subject, as specified in the subject claim of federated tokens. Must be
+    specified for service principal federation policies. Must not be specified for account
+    federation policies."""
+
+    subject_claim: Optional[str] = None
+    """The claim that contains the subject of the token. If unspecified, the default value is 'sub'."""
+
+    def as_dict(self) -> dict:
+        """Serializes the OidcFederationPolicy into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.audiences: body['audiences'] = [v for v in self.audiences]
+        if self.issuer is not None: body['issuer'] = self.issuer
+        if self.jwks_json is not None: body['jwks_json'] = self.jwks_json
+        if self.subject is not None: body['subject'] = self.subject
+        if self.subject_claim is not None: body['subject_claim'] = self.subject_claim
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the OidcFederationPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.audiences: body['audiences'] = self.audiences
+        if self.issuer is not None: body['issuer'] = self.issuer
+        if self.jwks_json is not None: body['jwks_json'] = self.jwks_json
+        if self.subject is not None: body['subject'] = self.subject
+        if self.subject_claim is not None: body['subject_claim'] = self.subject_claim
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> OidcFederationPolicy:
+        """Deserializes the OidcFederationPolicy from a dictionary."""
+        return cls(audiences=d.get('audiences', None),
+                   issuer=d.get('issuer', None),
+                   jwks_json=d.get('jwks_json', None),
+                   subject=d.get('subject', None),
+                   subject_claim=d.get('subject_claim', None))
 
 
 @dataclass
@@ -441,6 +695,19 @@ def as_dict(self) -> dict:
         if self.scopes: body['scopes'] = [v for v in self.scopes]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PublishedAppOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.app_id is not None: body['app_id'] = self.app_id
+        if self.client_id is not None: body['client_id'] = self.client_id
+        if self.description is not None: body['description'] = self.description
+        if self.is_confidential_client is not None:
+            body['is_confidential_client'] = self.is_confidential_client
+        if self.name is not None: body['name'] = self.name
+        if self.redirect_urls: body['redirect_urls'] = self.redirect_urls
+        if self.scopes: body['scopes'] = self.scopes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PublishedAppOutput:
         """Deserializes the PublishedAppOutput from a dictionary."""
@@ -480,6 +747,16 @@ def as_dict(self) -> dict:
         if self.update_time is not None: body['update_time'] = self.update_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SecretInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.id is not None: body['id'] = self.id
+        if self.secret_hash is not None: body['secret_hash'] = self.secret_hash
+        if self.status is not None: body['status'] = self.status
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SecretInfo:
         """Deserializes the SecretInfo from a dictionary."""
@@ -507,6 +784,15 @@ def as_dict(self) -> dict:
             body['refresh_token_ttl_in_minutes'] = self.refresh_token_ttl_in_minutes
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenAccessPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_token_ttl_in_minutes is not None:
+            body['access_token_ttl_in_minutes'] = self.access_token_ttl_in_minutes
+        if self.refresh_token_ttl_in_minutes is not None:
+            body['refresh_token_ttl_in_minutes'] = self.refresh_token_ttl_in_minutes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenAccessPolicy:
         """Deserializes the TokenAccessPolicy from a dictionary."""
@@ -521,15 +807,36 @@ class UpdateCustomAppIntegration:
     redirect_urls: Optional[List[str]] = None
     """List of OAuth redirect urls to be updated in the custom OAuth app integration"""
 
+    scopes: Optional[List[str]] = None
+    """List of OAuth scopes to be updated in the custom OAuth app integration, similar to redirect URIs
+    this will fully replace the existing values instead of appending"""
+
     token_access_policy: Optional[TokenAccessPolicy] = None
     """Token access policy to be updated in the custom OAuth app integration"""
 
+    user_authorized_scopes: Optional[List[str]] = None
+    """Scopes that will need to be consented by end user to mint the access token. If the user does not
+    authorize the access token will not be minted. Must be a subset of scopes."""
+
     def as_dict(self) -> dict:
         """Serializes the UpdateCustomAppIntegration into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.integration_id is not None: body['integration_id'] = self.integration_id
         if self.redirect_urls: body['redirect_urls'] = [v for v in self.redirect_urls]
+        if self.scopes: body['scopes'] = [v for v in self.scopes]
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
+        if self.user_authorized_scopes:
+            body['user_authorized_scopes'] = [v for v in self.user_authorized_scopes]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCustomAppIntegration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.integration_id is not None: body['integration_id'] = self.integration_id
+        if self.redirect_urls: body['redirect_urls'] = self.redirect_urls
+        if self.scopes: body['scopes'] = self.scopes
+        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
+        if self.user_authorized_scopes: body['user_authorized_scopes'] = self.user_authorized_scopes
         return body
 
     @classmethod
@@ -537,7 +844,9 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateCustomAppIntegration:
         """Deserializes the UpdateCustomAppIntegration from a dictionary."""
         return cls(integration_id=d.get('integration_id', None),
                    redirect_urls=d.get('redirect_urls', None),
-                   token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy))
+                   scopes=d.get('scopes', None),
+                   token_access_policy=_from_dict(d, 'token_access_policy', TokenAccessPolicy),
+                   user_authorized_scopes=d.get('user_authorized_scopes', None))
 
 
 @dataclass
@@ -548,6 +857,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCustomAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCustomAppIntegrationOutput:
         """Deserializes the UpdateCustomAppIntegrationOutput from a dictionary."""
@@ -568,6 +882,13 @@ def as_dict(self) -> dict:
         if self.token_access_policy: body['token_access_policy'] = self.token_access_policy.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdatePublishedAppIntegration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.integration_id is not None: body['integration_id'] = self.integration_id
+        if self.token_access_policy: body['token_access_policy'] = self.token_access_policy
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdatePublishedAppIntegration:
         """Deserializes the UpdatePublishedAppIntegration from a dictionary."""
@@ -583,12 +904,179 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdatePublishedAppIntegrationOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdatePublishedAppIntegrationOutput:
         """Deserializes the UpdatePublishedAppIntegrationOutput from a dictionary."""
         return cls()
 
 
+class AccountFederationPolicyAPI:
+    """These APIs manage account federation policies.
+    
+    Account federation policies allow users and service principals in your Databricks account to securely
+    access Databricks APIs using tokens from your trusted identity providers (IdPs).
+    
+    With token federation, your users and service principals can exchange tokens from your IdP for Databricks
+    OAuth tokens, which can be used to access Databricks APIs. Token federation eliminates the need to manage
+    Databricks secrets, and allows you to centralize management of token issuance policies in your IdP.
+    Databricks token federation is typically used in combination with [SCIM], so users in your IdP are
+    synchronized into your Databricks account.
+    
+    Token federation is configured in your Databricks account using an account federation policy. An account
+    federation policy specifies: * which IdP, or issuer, your Databricks account should accept tokens from *
+    how to determine which Databricks user, or subject, a token is issued for
+    
+    To configure a federation policy, you provide the following: * The required token __issuer__, as specified
+    in the “iss” claim of your tokens. The issuer is an https URL that identifies your IdP. * The allowed
+    token __audiences__, as specified in the “aud” claim of your tokens. This identifier is intended to
+    represent the recipient of the token. As long as the audience in the token matches at least one audience
+    in the policy, the token is considered a match. If unspecified, the default value is your Databricks
+    account id. * The __subject claim__, which indicates which token claim contains the Databricks username of
+    the user the token was issued for. If unspecified, the default value is “sub”. * Optionally, the
+    public keys used to validate the signature of your tokens, in JWKS format. If unspecified (recommended),
+    Databricks automatically fetches the public keys from your issuer’s well known endpoint. Databricks
+    strongly recommends relying on your issuer’s well known endpoint for discovering public keys.
+    
+    An example federation policy is: ``` issuer: "https://idp.mycompany.com/oidc" audiences: ["databricks"]
+    subject_claim: "sub" ```
+    
+    An example JWT token body that matches this policy and could be used to authenticate to Databricks as user
+    `username@mycompany.com` is: ``` { "iss": "https://idp.mycompany.com/oidc", "aud": "databricks", "sub":
+    "username@mycompany.com" } ```
+    
+    You may also need to configure your IdP to generate tokens for your users to exchange with Databricks, if
+    your users do not already have the ability to generate tokens that are compatible with your federation
+    policy.
+    
+    You do not need to configure an OAuth application in Databricks to use token federation.
+    
+    [SCIM]: https://docs.databricks.com/admin/users-groups/scim/index.html"""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def create(self,
+               *,
+               policy: Optional[FederationPolicy] = None,
+               policy_id: Optional[str] = None) -> FederationPolicy:
+        """Create account federation policy.
+        
+        :param policy: :class:`FederationPolicy` (optional)
+        :param policy_id: str (optional)
+          The identifier for the federation policy. The identifier must contain only lowercase alphanumeric
+          characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks.
+        
+        :returns: :class:`FederationPolicy`
+        """
+        body = policy.as_dict()
+        query = {}
+        if policy_id is not None: query['policy_id'] = policy_id
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST',
+                           f'/api/2.0/accounts/{self._api.account_id}/federationPolicies',
+                           query=query,
+                           body=body,
+                           headers=headers)
+        return FederationPolicy.from_dict(res)
+
+    def delete(self, policy_id: str):
+        """Delete account federation policy.
+        
+        :param policy_id: str
+          The identifier for the federation policy.
+        
+        
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        self._api.do('DELETE',
+                     f'/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}',
+                     headers=headers)
+
+    def get(self, policy_id: str) -> FederationPolicy:
+        """Get account federation policy.
+        
+        :param policy_id: str
+          The identifier for the federation policy.
+        
+        :returns: :class:`FederationPolicy`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET',
+                           f'/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}',
+                           headers=headers)
+        return FederationPolicy.from_dict(res)
+
+    def list(self,
+             *,
+             page_size: Optional[int] = None,
+             page_token: Optional[str] = None) -> Iterator[FederationPolicy]:
+        """List account federation policies.
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`FederationPolicy`
+        """
+
+        query = {}
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do('GET',
+                                f'/api/2.0/accounts/{self._api.account_id}/federationPolicies',
+                                query=query,
+                                headers=headers)
+            if 'policies' in json:
+                for v in json['policies']:
+                    yield FederationPolicy.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+    def update(self,
+               policy_id: str,
+               *,
+               policy: Optional[FederationPolicy] = None,
+               update_mask: Optional[str] = None) -> FederationPolicy:
+        """Update account federation policy.
+        
+        :param policy_id: str
+          The identifier for the federation policy.
+        :param policy: :class:`FederationPolicy` (optional)
+        :param update_mask: str (optional)
+          The field mask specifies which fields of the policy to update. To specify multiple fields in the
+          field mask, use comma as the separator (no space). The special value '*' indicates that all fields
+          should be updated (full replacement). If unspecified, all fields that are set in the policy provided
+          in the update request will overwrite the corresponding fields in the existing policy. Example value:
+          'description,oidc_policy.audiences'.
+        
+        :returns: :class:`FederationPolicy`
+        """
+        body = policy.as_dict()
+        query = {}
+        if update_mask is not None: query['update_mask'] = update_mask
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH',
+                           f'/api/2.0/accounts/{self._api.account_id}/federationPolicies/{policy_id}',
+                           query=query,
+                           body=body,
+                           headers=headers)
+        return FederationPolicy.from_dict(res)
+
+
 class CustomAppIntegrationAPI:
     """These APIs enable administrators to manage custom OAuth app integrations, which is required for
     adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud."""
@@ -602,7 +1090,8 @@ def create(self,
                name: Optional[str] = None,
                redirect_urls: Optional[List[str]] = None,
                scopes: Optional[List[str]] = None,
-               token_access_policy: Optional[TokenAccessPolicy] = None) -> CreateCustomAppIntegrationOutput:
+               token_access_policy: Optional[TokenAccessPolicy] = None,
+               user_authorized_scopes: Optional[List[str]] = None) -> CreateCustomAppIntegrationOutput:
         """Create Custom OAuth App Integration.
         
         Create Custom OAuth App Integration.
@@ -620,6 +1109,9 @@ def create(self,
           profile, email.
         :param token_access_policy: :class:`TokenAccessPolicy` (optional)
           Token access policy
+        :param user_authorized_scopes: List[str] (optional)
+          Scopes that will need to be consented by end user to mint the access token. If the user does not
+          authorize the access token will not be minted. Must be a subset of scopes.
         
         :returns: :class:`CreateCustomAppIntegrationOutput`
         """
@@ -629,6 +1121,8 @@ def create(self,
         if redirect_urls is not None: body['redirect_urls'] = [v for v in redirect_urls]
         if scopes is not None: body['scopes'] = [v for v in scopes]
         if token_access_policy is not None: body['token_access_policy'] = token_access_policy.as_dict()
+        if user_authorized_scopes is not None:
+            body['user_authorized_scopes'] = [v for v in user_authorized_scopes]
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('POST',
@@ -661,6 +1155,7 @@ def get(self, integration_id: str) -> GetCustomAppIntegrationOutput:
         Gets the Custom OAuth App Integration for the given integration id.
         
         :param integration_id: str
+          The OAuth app integration ID.
         
         :returns: :class:`GetCustomAppIntegrationOutput`
         """
@@ -711,7 +1206,9 @@ def update(self,
                integration_id: str,
                *,
                redirect_urls: Optional[List[str]] = None,
-               token_access_policy: Optional[TokenAccessPolicy] = None):
+               scopes: Optional[List[str]] = None,
+               token_access_policy: Optional[TokenAccessPolicy] = None,
+               user_authorized_scopes: Optional[List[str]] = None):
         """Updates Custom OAuth App Integration.
         
         Updates an existing custom OAuth App Integration. You can retrieve the custom OAuth app integration
@@ -720,14 +1217,23 @@ def update(self,
         :param integration_id: str
         :param redirect_urls: List[str] (optional)
           List of OAuth redirect urls to be updated in the custom OAuth app integration
+        :param scopes: List[str] (optional)
+          List of OAuth scopes to be updated in the custom OAuth app integration, similar to redirect URIs
+          this will fully replace the existing values instead of appending
         :param token_access_policy: :class:`TokenAccessPolicy` (optional)
           Token access policy to be updated in the custom OAuth app integration
+        :param user_authorized_scopes: List[str] (optional)
+          Scopes that will need to be consented by end user to mint the access token. If the user does not
+          authorize the access token will not be minted. Must be a subset of scopes.
         
         
         """
         body = {}
         if redirect_urls is not None: body['redirect_urls'] = [v for v in redirect_urls]
+        if scopes is not None: body['scopes'] = [v for v in scopes]
         if token_access_policy is not None: body['token_access_policy'] = token_access_policy.as_dict()
+        if user_authorized_scopes is not None:
+            body['user_authorized_scopes'] = [v for v in user_authorized_scopes]
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         self._api.do(
@@ -905,6 +1411,186 @@ def update(self, integration_id: str, *, token_access_policy: Optional[TokenAcce
             headers=headers)
 
 
+class ServicePrincipalFederationPolicyAPI:
+    """These APIs manage service principal federation policies.
+    
+    Service principal federation, also known as Workload Identity Federation, allows your automated workloads
+    running outside of Databricks to securely access Databricks APIs without the need for Databricks secrets.
+    With Workload Identity Federation, your application (or workload) authenticates to Databricks as a
+    Databricks service principal, using tokens provided by the workload runtime.
+    
+    Databricks strongly recommends using Workload Identity Federation to authenticate to Databricks from
+    automated workloads, over alternatives such as OAuth client secrets or Personal Access Tokens, whenever
+    possible. Workload Identity Federation is supported by many popular services, including Github Actions,
+    Azure DevOps, GitLab, Terraform Cloud, and Kubernetes clusters, among others.
+    
+    Workload identity federation is configured in your Databricks account using a service principal federation
+    policy. A service principal federation policy specifies: * which IdP, or issuer, the service principal is
+    allowed to authenticate from * which workload identity, or subject, is allowed to authenticate as the
+    Databricks service principal
+    
+    To configure a federation policy, you provide the following: * The required token __issuer__, as specified
+    in the “iss” claim of workload identity tokens. The issuer is an https URL that identifies the
+    workload identity provider. * The required token __subject__, as specified in the “sub” claim of
+    workload identity tokens. The subject uniquely identifies the workload in the workload runtime
+    environment. * The allowed token __audiences__, as specified in the “aud” claim of workload identity
+    tokens. The audience is intended to represent the recipient of the token. As long as the audience in the
+    token matches at least one audience in the policy, the token is considered a match. If unspecified, the
+    default value is your Databricks account id. * Optionally, the public keys used to validate the signature
+    of the workload identity tokens, in JWKS format. If unspecified (recommended), Databricks automatically
+    fetches the public keys from the issuer’s well known endpoint. Databricks strongly recommends relying on
+    the issuer’s well known endpoint for discovering public keys.
+    
+    An example service principal federation policy, for a Github Actions workload, is: ``` issuer:
+    "https://token.actions.githubusercontent.com" audiences: ["https://github.com/my-github-org"] subject:
+    "repo:my-github-org/my-repo:environment:prod" ```
+    
+    An example JWT token body that matches this policy and could be used to authenticate to Databricks is: ```
+    { "iss": "https://token.actions.githubusercontent.com", "aud": "https://github.com/my-github-org", "sub":
+    "repo:my-github-org/my-repo:environment:prod" } ```
+    
+    You may also need to configure the workload runtime to generate tokens for your workloads.
+    
+    You do not need to configure an OAuth application in Databricks to use token federation."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def create(self,
+               service_principal_id: int,
+               *,
+               policy: Optional[FederationPolicy] = None,
+               policy_id: Optional[str] = None) -> FederationPolicy:
+        """Create service principal federation policy.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy: :class:`FederationPolicy` (optional)
+        :param policy_id: str (optional)
+          The identifier for the federation policy. The identifier must contain only lowercase alphanumeric
+          characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks.
+        
+        :returns: :class:`FederationPolicy`
+        """
+        body = policy.as_dict()
+        query = {}
+        if policy_id is not None: query['policy_id'] = policy_id
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do(
+            'POST',
+            f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies',
+            query=query,
+            body=body,
+            headers=headers)
+        return FederationPolicy.from_dict(res)
+
+    def delete(self, service_principal_id: int, policy_id: str):
+        """Delete service principal federation policy.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy_id: str
+          The identifier for the federation policy.
+        
+        
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        self._api.do(
+            'DELETE',
+            f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}',
+            headers=headers)
+
+    def get(self, service_principal_id: int, policy_id: str) -> FederationPolicy:
+        """Get service principal federation policy.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy_id: str
+          The identifier for the federation policy.
+        
+        :returns: :class:`FederationPolicy`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'GET',
+            f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}',
+            headers=headers)
+        return FederationPolicy.from_dict(res)
+
+    def list(self,
+             service_principal_id: int,
+             *,
+             page_size: Optional[int] = None,
+             page_token: Optional[str] = None) -> Iterator[FederationPolicy]:
+        """List service principal federation policies.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`FederationPolicy`
+        """
+
+        query = {}
+        if page_size is not None: query['page_size'] = page_size
+        if page_token is not None: query['page_token'] = page_token
+        headers = {'Accept': 'application/json', }
+
+        while True:
+            json = self._api.do(
+                'GET',
+                f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies',
+                query=query,
+                headers=headers)
+            if 'policies' in json:
+                for v in json['policies']:
+                    yield FederationPolicy.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
+
+    def update(self,
+               service_principal_id: int,
+               policy_id: str,
+               *,
+               policy: Optional[FederationPolicy] = None,
+               update_mask: Optional[str] = None) -> FederationPolicy:
+        """Update service principal federation policy.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy_id: str
+          The identifier for the federation policy.
+        :param policy: :class:`FederationPolicy` (optional)
+        :param update_mask: str (optional)
+          The field mask specifies which fields of the policy to update. To specify multiple fields in the
+          field mask, use comma as the separator (no space). The special value '*' indicates that all fields
+          should be updated (full replacement). If unspecified, all fields that are set in the policy provided
+          in the update request will overwrite the corresponding fields in the existing policy. Example value:
+          'description,oidc_policy.audiences'.
+        
+        :returns: :class:`FederationPolicy`
+        """
+        body = policy.as_dict()
+        query = {}
+        if update_mask is not None: query['update_mask'] = update_mask
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do(
+            'PATCH',
+            f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/federationPolicies/{policy_id}',
+            query=query,
+            body=body,
+            headers=headers)
+        return FederationPolicy.from_dict(res)
+
+
 class ServicePrincipalSecretsAPI:
     """These APIs enable administrators to manage service principal secrets.
     
@@ -960,7 +1646,7 @@ def delete(self, service_principal_id: int, secret_id: str):
             f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets/{secret_id}',
             headers=headers)
 
-    def list(self, service_principal_id: int) -> Iterator[SecretInfo]:
+    def list(self, service_principal_id: int, *, page_token: Optional[str] = None) -> Iterator[SecretInfo]:
         """List service principal secrets.
         
         List all secrets associated with the given service principal. This operation only returns information
@@ -968,15 +1654,30 @@ def list(self, service_principal_id: int) -> Iterator[SecretInfo]:
         
         :param service_principal_id: int
           The service principal ID.
+        :param page_token: str (optional)
+          An opaque page token which was the `next_page_token` in the response of the previous request to list
+          the secrets for this service principal. Provide this token to retrieve the next page of secret
+          entries. When providing a `page_token`, all other parameters provided to the request must match the
+          previous request. To list all of the secrets for a service principal, it is necessary to continue
+          requesting pages of entries until the response contains no `next_page_token`. Note that the number
+          of entries returned must not be used to determine when the listing is complete.
         
         :returns: Iterator over :class:`SecretInfo`
         """
 
+        query = {}
+        if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
-        json = self._api.do(
-            'GET',
-            f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets',
-            headers=headers)
-        parsed = ListServicePrincipalSecretsResponse.from_dict(json).secrets
-        return parsed if parsed is not None else []
+        while True:
+            json = self._api.do(
+                'GET',
+                f'/api/2.0/accounts/{self._api.account_id}/servicePrincipals/{service_principal_id}/credentials/secrets',
+                query=query,
+                headers=headers)
+            if 'secrets' in json:
+                for v in json['secrets']:
+                    yield SecretInfo.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
diff --git a/databricks/sdk/service/pipelines.py b/databricks/sdk/service/pipelines.py
index 9c12f8788..db5d698d6 100755
--- a/databricks/sdk/service/pipelines.py
+++ b/databricks/sdk/service/pipelines.py
@@ -11,7 +11,7 @@
 from typing import Callable, Dict, Iterator, List, Optional
 
 from ..errors import OperationFailed
-from ._internal import Wait, _enum, _from_dict, _repeated_dict
+from ._internal import Wait, _enum, _from_dict, _repeated_dict, _repeated_enum
 
 _LOG = logging.getLogger('databricks.sdk')
 
@@ -61,7 +61,7 @@ class CreatePipeline:
     """Filters on which Pipeline packages to include in the deployed graph."""
 
     gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None
-    """The definition of a gateway pipeline to support CDC."""
+    """The definition of a gateway pipeline to support change data capture."""
 
     id: Optional[str] = None
     """Unique identifier for this pipeline."""
@@ -82,6 +82,17 @@ class CreatePipeline:
     photon: Optional[bool] = None
     """Whether Photon is enabled for this pipeline."""
 
+    restart_window: Optional[RestartWindow] = None
+    """Restart window of this pipeline."""
+
+    run_as: Optional[RunAs] = None
+    """Write-only setting, available only in Create/Update calls. Specifies the user or service
+    principal that the pipeline runs as. If not specified, the pipeline runs as the user who created
+    the pipeline.
+    
+    Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
+    is thrown."""
+
     schema: Optional[str] = None
     """The default schema (database) where tables are read from or published to. The presence of this
     field implies that the pipeline is in direct publishing mode."""
@@ -122,6 +133,8 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
         if self.photon is not None: body['photon'] = self.photon
+        if self.restart_window: body['restart_window'] = self.restart_window.as_dict()
+        if self.run_as: body['run_as'] = self.run_as.as_dict()
         if self.schema is not None: body['schema'] = self.schema
         if self.serverless is not None: body['serverless'] = self.serverless
         if self.storage is not None: body['storage'] = self.storage
@@ -129,6 +142,37 @@ def as_dict(self) -> dict:
         if self.trigger: body['trigger'] = self.trigger.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreatePipeline into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.channel is not None: body['channel'] = self.channel
+        if self.clusters: body['clusters'] = self.clusters
+        if self.configuration: body['configuration'] = self.configuration
+        if self.continuous is not None: body['continuous'] = self.continuous
+        if self.deployment: body['deployment'] = self.deployment
+        if self.development is not None: body['development'] = self.development
+        if self.dry_run is not None: body['dry_run'] = self.dry_run
+        if self.edition is not None: body['edition'] = self.edition
+        if self.filters: body['filters'] = self.filters
+        if self.gateway_definition: body['gateway_definition'] = self.gateway_definition
+        if self.id is not None: body['id'] = self.id
+        if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition
+        if self.libraries: body['libraries'] = self.libraries
+        if self.name is not None: body['name'] = self.name
+        if self.notifications: body['notifications'] = self.notifications
+        if self.photon is not None: body['photon'] = self.photon
+        if self.restart_window: body['restart_window'] = self.restart_window
+        if self.run_as: body['run_as'] = self.run_as
+        if self.schema is not None: body['schema'] = self.schema
+        if self.serverless is not None: body['serverless'] = self.serverless
+        if self.storage is not None: body['storage'] = self.storage
+        if self.target is not None: body['target'] = self.target
+        if self.trigger: body['trigger'] = self.trigger
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePipeline:
         """Deserializes the CreatePipeline from a dictionary."""
@@ -151,6 +195,8 @@ def from_dict(cls, d: Dict[str, any]) -> CreatePipeline:
                    name=d.get('name', None),
                    notifications=_repeated_dict(d, 'notifications', Notifications),
                    photon=d.get('photon', None),
+                   restart_window=_from_dict(d, 'restart_window', RestartWindow),
+                   run_as=_from_dict(d, 'run_as', RunAs),
                    schema=d.get('schema', None),
                    serverless=d.get('serverless', None),
                    storage=d.get('storage', None),
@@ -173,6 +219,13 @@ def as_dict(self) -> dict:
         if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreatePipelineResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.effective_settings: body['effective_settings'] = self.effective_settings
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePipelineResponse:
         """Deserializes the CreatePipelineResponse from a dictionary."""
@@ -193,6 +246,13 @@ def as_dict(self) -> dict:
         if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CronTrigger into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.quartz_cron_schedule is not None: body['quartz_cron_schedule'] = self.quartz_cron_schedule
+        if self.timezone_id is not None: body['timezone_id'] = self.timezone_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CronTrigger:
         """Deserializes the CronTrigger from a dictionary."""
@@ -215,12 +275,32 @@ def as_dict(self) -> dict:
         if self.seq_no is not None: body['seq_no'] = self.seq_no
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DataPlaneId into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.instance is not None: body['instance'] = self.instance
+        if self.seq_no is not None: body['seq_no'] = self.seq_no
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DataPlaneId:
         """Deserializes the DataPlaneId from a dictionary."""
         return cls(instance=d.get('instance', None), seq_no=d.get('seq_no', None))
 
 
+class DayOfWeek(Enum):
+    """Days of week in which the restart is allowed to happen (within a five-hour window starting at
+    start_hour). If not specified all days of the week will be used."""
+
+    FRIDAY = 'FRIDAY'
+    MONDAY = 'MONDAY'
+    SATURDAY = 'SATURDAY'
+    SUNDAY = 'SUNDAY'
+    THURSDAY = 'THURSDAY'
+    TUESDAY = 'TUESDAY'
+    WEDNESDAY = 'WEDNESDAY'
+
+
 @dataclass
 class DeletePipelineResponse:
 
@@ -229,6 +309,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeletePipelineResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeletePipelineResponse:
         """Deserializes the DeletePipelineResponse from a dictionary."""
@@ -285,7 +370,7 @@ class EditPipeline:
     """Filters on which Pipeline packages to include in the deployed graph."""
 
     gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None
-    """The definition of a gateway pipeline to support CDC."""
+    """The definition of a gateway pipeline to support change data capture."""
 
     id: Optional[str] = None
     """Unique identifier for this pipeline."""
@@ -309,6 +394,17 @@ class EditPipeline:
     pipeline_id: Optional[str] = None
     """Unique identifier for this pipeline."""
 
+    restart_window: Optional[RestartWindow] = None
+    """Restart window of this pipeline."""
+
+    run_as: Optional[RunAs] = None
+    """Write-only setting, available only in Create/Update calls. Specifies the user or service
+    principal that the pipeline runs as. If not specified, the pipeline runs as the user who created
+    the pipeline.
+    
+    Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
+    is thrown."""
+
     schema: Optional[str] = None
     """The default schema (database) where tables are read from or published to. The presence of this
     field implies that the pipeline is in direct publishing mode."""
@@ -351,6 +447,8 @@ def as_dict(self) -> dict:
         if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
         if self.photon is not None: body['photon'] = self.photon
         if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.restart_window: body['restart_window'] = self.restart_window.as_dict()
+        if self.run_as: body['run_as'] = self.run_as.as_dict()
         if self.schema is not None: body['schema'] = self.schema
         if self.serverless is not None: body['serverless'] = self.serverless
         if self.storage is not None: body['storage'] = self.storage
@@ -358,6 +456,39 @@ def as_dict(self) -> dict:
         if self.trigger: body['trigger'] = self.trigger.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditPipeline into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_duplicate_names is not None: body['allow_duplicate_names'] = self.allow_duplicate_names
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.channel is not None: body['channel'] = self.channel
+        if self.clusters: body['clusters'] = self.clusters
+        if self.configuration: body['configuration'] = self.configuration
+        if self.continuous is not None: body['continuous'] = self.continuous
+        if self.deployment: body['deployment'] = self.deployment
+        if self.development is not None: body['development'] = self.development
+        if self.edition is not None: body['edition'] = self.edition
+        if self.expected_last_modified is not None:
+            body['expected_last_modified'] = self.expected_last_modified
+        if self.filters: body['filters'] = self.filters
+        if self.gateway_definition: body['gateway_definition'] = self.gateway_definition
+        if self.id is not None: body['id'] = self.id
+        if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition
+        if self.libraries: body['libraries'] = self.libraries
+        if self.name is not None: body['name'] = self.name
+        if self.notifications: body['notifications'] = self.notifications
+        if self.photon is not None: body['photon'] = self.photon
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.restart_window: body['restart_window'] = self.restart_window
+        if self.run_as: body['run_as'] = self.run_as
+        if self.schema is not None: body['schema'] = self.schema
+        if self.serverless is not None: body['serverless'] = self.serverless
+        if self.storage is not None: body['storage'] = self.storage
+        if self.target is not None: body['target'] = self.target
+        if self.trigger: body['trigger'] = self.trigger
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditPipeline:
         """Deserializes the EditPipeline from a dictionary."""
@@ -381,6 +512,8 @@ def from_dict(cls, d: Dict[str, any]) -> EditPipeline:
                    notifications=_repeated_dict(d, 'notifications', Notifications),
                    photon=d.get('photon', None),
                    pipeline_id=d.get('pipeline_id', None),
+                   restart_window=_from_dict(d, 'restart_window', RestartWindow),
+                   run_as=_from_dict(d, 'run_as', RunAs),
                    schema=d.get('schema', None),
                    serverless=d.get('serverless', None),
                    storage=d.get('storage', None),
@@ -396,6 +529,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditPipelineResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditPipelineResponse:
         """Deserializes the EditPipelineResponse from a dictionary."""
@@ -417,6 +555,13 @@ def as_dict(self) -> dict:
         if self.fatal is not None: body['fatal'] = self.fatal
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ErrorDetail into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exceptions: body['exceptions'] = self.exceptions
+        if self.fatal is not None: body['fatal'] = self.fatal
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ErrorDetail:
         """Deserializes the ErrorDetail from a dictionary."""
@@ -444,6 +589,12 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FileLibrary into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FileLibrary:
         """Deserializes the FileLibrary from a dictionary."""
@@ -465,6 +616,13 @@ def as_dict(self) -> dict:
         if self.include: body['include'] = [v for v in self.include]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Filters into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.exclude: body['exclude'] = self.exclude
+        if self.include: body['include'] = self.include
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Filters:
         """Deserializes the Filters from a dictionary."""
@@ -482,6 +640,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPipelinePermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPipelinePermissionLevelsResponse:
         """Deserializes the GetPipelinePermissionLevelsResponse from a dictionary."""
@@ -544,6 +708,24 @@ def as_dict(self) -> dict:
         if self.state is not None: body['state'] = self.state.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetPipelineResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cause is not None: body['cause'] = self.cause
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.effective_budget_policy_id is not None:
+            body['effective_budget_policy_id'] = self.effective_budget_policy_id
+        if self.health is not None: body['health'] = self.health
+        if self.last_modified is not None: body['last_modified'] = self.last_modified
+        if self.latest_updates: body['latest_updates'] = self.latest_updates
+        if self.name is not None: body['name'] = self.name
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
+        if self.spec: body['spec'] = self.spec
+        if self.state is not None: body['state'] = self.state
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetPipelineResponse:
         """Deserializes the GetPipelineResponse from a dictionary."""
@@ -579,6 +761,12 @@ def as_dict(self) -> dict:
         if self.update: body['update'] = self.update.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetUpdateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.update: body['update'] = self.update
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetUpdateResponse:
         """Deserializes the GetUpdateResponse from a dictionary."""
@@ -588,13 +776,13 @@ def from_dict(cls, d: Dict[str, any]) -> GetUpdateResponse:
 @dataclass
 class IngestionConfig:
     report: Optional[ReportSpec] = None
-    """Select tables from a specific source report."""
+    """Select a specific source report."""
 
     schema: Optional[SchemaSpec] = None
-    """Select tables from a specific source schema."""
+    """Select all tables from a specific source schema."""
 
     table: Optional[TableSpec] = None
-    """Select tables from a specific source table."""
+    """Select a specific source table."""
 
     def as_dict(self) -> dict:
         """Serializes the IngestionConfig into a dictionary suitable for use as a JSON request body."""
@@ -604,6 +792,14 @@ def as_dict(self) -> dict:
         if self.table: body['table'] = self.table.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the IngestionConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.report: body['report'] = self.report
+        if self.schema: body['schema'] = self.schema
+        if self.table: body['table'] = self.table
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> IngestionConfig:
         """Deserializes the IngestionConfig from a dictionary."""
@@ -615,7 +811,11 @@ def from_dict(cls, d: Dict[str, any]) -> IngestionConfig:
 @dataclass
 class IngestionGatewayPipelineDefinition:
     connection_id: Optional[str] = None
-    """Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the
+    """[Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this
+    gateway pipeline uses to communicate with the source."""
+
+    connection_name: Optional[str] = None
+    """Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the
     source."""
 
     gateway_storage_catalog: Optional[str] = None
@@ -633,6 +833,19 @@ def as_dict(self) -> dict:
         """Serializes the IngestionGatewayPipelineDefinition into a dictionary suitable for use as a JSON request body."""
         body = {}
         if self.connection_id is not None: body['connection_id'] = self.connection_id
+        if self.connection_name is not None: body['connection_name'] = self.connection_name
+        if self.gateway_storage_catalog is not None:
+            body['gateway_storage_catalog'] = self.gateway_storage_catalog
+        if self.gateway_storage_name is not None: body['gateway_storage_name'] = self.gateway_storage_name
+        if self.gateway_storage_schema is not None:
+            body['gateway_storage_schema'] = self.gateway_storage_schema
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the IngestionGatewayPipelineDefinition into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.connection_id is not None: body['connection_id'] = self.connection_id
+        if self.connection_name is not None: body['connection_name'] = self.connection_name
         if self.gateway_storage_catalog is not None:
             body['gateway_storage_catalog'] = self.gateway_storage_catalog
         if self.gateway_storage_name is not None: body['gateway_storage_name'] = self.gateway_storage_name
@@ -644,6 +857,7 @@ def as_dict(self) -> dict:
     def from_dict(cls, d: Dict[str, any]) -> IngestionGatewayPipelineDefinition:
         """Deserializes the IngestionGatewayPipelineDefinition from a dictionary."""
         return cls(connection_id=d.get('connection_id', None),
+                   connection_name=d.get('connection_name', None),
                    gateway_storage_catalog=d.get('gateway_storage_catalog', None),
                    gateway_storage_name=d.get('gateway_storage_name', None),
                    gateway_storage_schema=d.get('gateway_storage_schema', None))
@@ -652,12 +866,12 @@ def from_dict(cls, d: Dict[str, any]) -> IngestionGatewayPipelineDefinition:
 @dataclass
 class IngestionPipelineDefinition:
     connection_name: Optional[str] = None
-    """Immutable. The Unity Catalog connection this ingestion pipeline uses to communicate with the
-    source. Specify either ingestion_gateway_id or connection_name."""
+    """Immutable. The Unity Catalog connection that this ingestion pipeline uses to communicate with
+    the source. This is used with connectors for applications like Salesforce, Workday, and so on."""
 
     ingestion_gateway_id: Optional[str] = None
-    """Immutable. Identifier for the ingestion gateway used by this ingestion pipeline to communicate
-    with the source. Specify either ingestion_gateway_id or connection_name."""
+    """Immutable. Identifier for the gateway that is used by this ingestion pipeline to communicate
+    with the source database. This is used with connectors to databases like SQL Server."""
 
     objects: Optional[List[IngestionConfig]] = None
     """Required. Settings specifying tables to replicate and the destination for the replicated tables."""
@@ -675,6 +889,15 @@ def as_dict(self) -> dict:
         if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the IngestionPipelineDefinition into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.connection_name is not None: body['connection_name'] = self.connection_name
+        if self.ingestion_gateway_id is not None: body['ingestion_gateway_id'] = self.ingestion_gateway_id
+        if self.objects: body['objects'] = self.objects
+        if self.table_configuration: body['table_configuration'] = self.table_configuration
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> IngestionPipelineDefinition:
         """Deserializes the IngestionPipelineDefinition from a dictionary."""
@@ -703,6 +926,14 @@ def as_dict(self) -> dict:
         if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListPipelineEventsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.events: body['events'] = self.events
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListPipelineEventsResponse:
         """Deserializes the ListPipelineEventsResponse from a dictionary."""
@@ -726,6 +957,13 @@ def as_dict(self) -> dict:
         if self.statuses: body['statuses'] = [v.as_dict() for v in self.statuses]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListPipelinesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.statuses: body['statuses'] = self.statuses
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListPipelinesResponse:
         """Deserializes the ListPipelinesResponse from a dictionary."""
@@ -752,6 +990,14 @@ def as_dict(self) -> dict:
         if self.updates: body['updates'] = [v.as_dict() for v in self.updates]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListUpdatesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
+        if self.updates: body['updates'] = self.updates
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListUpdatesResponse:
         """Deserializes the ListUpdatesResponse from a dictionary."""
@@ -768,6 +1014,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ManualTrigger into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ManualTrigger:
         """Deserializes the ManualTrigger from a dictionary."""
@@ -793,6 +1044,12 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NotebookLibrary into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NotebookLibrary:
         """Deserializes the NotebookLibrary from a dictionary."""
@@ -819,6 +1076,13 @@ def as_dict(self) -> dict:
         if self.email_recipients: body['email_recipients'] = [v for v in self.email_recipients]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Notifications into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alerts: body['alerts'] = self.alerts
+        if self.email_recipients: body['email_recipients'] = self.email_recipients
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Notifications:
         """Deserializes the Notifications from a dictionary."""
@@ -901,6 +1165,28 @@ def as_dict(self) -> dict:
         if self.update_id is not None: body['update_id'] = self.update_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Origin into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.batch_id is not None: body['batch_id'] = self.batch_id
+        if self.cloud is not None: body['cloud'] = self.cloud
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.dataset_name is not None: body['dataset_name'] = self.dataset_name
+        if self.flow_id is not None: body['flow_id'] = self.flow_id
+        if self.flow_name is not None: body['flow_name'] = self.flow_name
+        if self.host is not None: body['host'] = self.host
+        if self.maintenance_id is not None: body['maintenance_id'] = self.maintenance_id
+        if self.materialization_name is not None: body['materialization_name'] = self.materialization_name
+        if self.org_id is not None: body['org_id'] = self.org_id
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.pipeline_name is not None: body['pipeline_name'] = self.pipeline_name
+        if self.region is not None: body['region'] = self.region
+        if self.request_id is not None: body['request_id'] = self.request_id
+        if self.table_id is not None: body['table_id'] = self.table_id
+        if self.uc_resource_id is not None: body['uc_resource_id'] = self.uc_resource_id
+        if self.update_id is not None: body['update_id'] = self.update_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Origin:
         """Deserializes the Origin from a dictionary."""
@@ -947,6 +1233,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineAccessControlRequest:
         """Deserializes the PipelineAccessControlRequest from a dictionary."""
@@ -984,6 +1280,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineAccessControlResponse:
         """Deserializes the PipelineAccessControlResponse from a dictionary."""
@@ -1123,6 +1430,33 @@ def as_dict(self) -> dict:
         if self.ssh_public_keys: body['ssh_public_keys'] = [v for v in self.ssh_public_keys]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineCluster into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apply_policy_default_values is not None:
+            body['apply_policy_default_values'] = self.apply_policy_default_values
+        if self.autoscale: body['autoscale'] = self.autoscale
+        if self.aws_attributes: body['aws_attributes'] = self.aws_attributes
+        if self.azure_attributes: body['azure_attributes'] = self.azure_attributes
+        if self.cluster_log_conf: body['cluster_log_conf'] = self.cluster_log_conf
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.driver_instance_pool_id is not None:
+            body['driver_instance_pool_id'] = self.driver_instance_pool_id
+        if self.driver_node_type_id is not None: body['driver_node_type_id'] = self.driver_node_type_id
+        if self.enable_local_disk_encryption is not None:
+            body['enable_local_disk_encryption'] = self.enable_local_disk_encryption
+        if self.gcp_attributes: body['gcp_attributes'] = self.gcp_attributes
+        if self.init_scripts: body['init_scripts'] = self.init_scripts
+        if self.instance_pool_id is not None: body['instance_pool_id'] = self.instance_pool_id
+        if self.label is not None: body['label'] = self.label
+        if self.node_type_id is not None: body['node_type_id'] = self.node_type_id
+        if self.num_workers is not None: body['num_workers'] = self.num_workers
+        if self.policy_id is not None: body['policy_id'] = self.policy_id
+        if self.spark_conf: body['spark_conf'] = self.spark_conf
+        if self.spark_env_vars: body['spark_env_vars'] = self.spark_env_vars
+        if self.ssh_public_keys: body['ssh_public_keys'] = self.ssh_public_keys
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineCluster:
         """Deserializes the PipelineCluster from a dictionary."""
@@ -1171,6 +1505,14 @@ def as_dict(self) -> dict:
         if self.mode is not None: body['mode'] = self.mode.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineClusterAutoscale into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.max_workers is not None: body['max_workers'] = self.max_workers
+        if self.min_workers is not None: body['min_workers'] = self.min_workers
+        if self.mode is not None: body['mode'] = self.mode
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineClusterAutoscale:
         """Deserializes the PipelineClusterAutoscale from a dictionary."""
@@ -1204,6 +1546,13 @@ def as_dict(self) -> dict:
         if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineDeployment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.kind is not None: body['kind'] = self.kind
+        if self.metadata_file_path is not None: body['metadata_file_path'] = self.metadata_file_path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineDeployment:
         """Deserializes the PipelineDeployment from a dictionary."""
@@ -1254,6 +1603,20 @@ def as_dict(self) -> dict:
         if self.timestamp is not None: body['timestamp'] = self.timestamp
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineEvent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.error: body['error'] = self.error
+        if self.event_type is not None: body['event_type'] = self.event_type
+        if self.id is not None: body['id'] = self.id
+        if self.level is not None: body['level'] = self.level
+        if self.maturity_level is not None: body['maturity_level'] = self.maturity_level
+        if self.message is not None: body['message'] = self.message
+        if self.origin: body['origin'] = self.origin
+        if self.sequence: body['sequence'] = self.sequence
+        if self.timestamp is not None: body['timestamp'] = self.timestamp
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineEvent:
         """Deserializes the PipelineEvent from a dictionary."""
@@ -1295,6 +1658,16 @@ def as_dict(self) -> dict:
         if self.whl is not None: body['whl'] = self.whl
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineLibrary into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.file: body['file'] = self.file
+        if self.jar is not None: body['jar'] = self.jar
+        if self.maven: body['maven'] = self.maven
+        if self.notebook: body['notebook'] = self.notebook
+        if self.whl is not None: body['whl'] = self.whl
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineLibrary:
         """Deserializes the PipelineLibrary from a dictionary."""
@@ -1322,6 +1695,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelinePermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelinePermission:
         """Deserializes the PipelinePermission from a dictionary."""
@@ -1356,6 +1737,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelinePermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelinePermissions:
         """Deserializes the PipelinePermissions from a dictionary."""
@@ -1379,6 +1768,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelinePermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelinePermissionsDescription:
         """Deserializes the PipelinePermissionsDescription from a dictionary."""
@@ -1401,6 +1797,13 @@ def as_dict(self) -> dict:
         if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelinePermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelinePermissionsRequest:
         """Deserializes the PipelinePermissionsRequest from a dictionary."""
@@ -1444,7 +1847,7 @@ class PipelineSpec:
     """Filters on which Pipeline packages to include in the deployed graph."""
 
     gateway_definition: Optional[IngestionGatewayPipelineDefinition] = None
-    """The definition of a gateway pipeline to support CDC."""
+    """The definition of a gateway pipeline to support change data capture."""
 
     id: Optional[str] = None
     """Unique identifier for this pipeline."""
@@ -1465,6 +1868,9 @@ class PipelineSpec:
     photon: Optional[bool] = None
     """Whether Photon is enabled for this pipeline."""
 
+    restart_window: Optional[RestartWindow] = None
+    """Restart window of this pipeline."""
+
     schema: Optional[str] = None
     """The default schema (database) where tables are read from or published to. The presence of this
     field implies that the pipeline is in direct publishing mode."""
@@ -1503,6 +1909,7 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         if self.notifications: body['notifications'] = [v.as_dict() for v in self.notifications]
         if self.photon is not None: body['photon'] = self.photon
+        if self.restart_window: body['restart_window'] = self.restart_window.as_dict()
         if self.schema is not None: body['schema'] = self.schema
         if self.serverless is not None: body['serverless'] = self.serverless
         if self.storage is not None: body['storage'] = self.storage
@@ -1510,6 +1917,34 @@ def as_dict(self) -> dict:
         if self.trigger: body['trigger'] = self.trigger.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.channel is not None: body['channel'] = self.channel
+        if self.clusters: body['clusters'] = self.clusters
+        if self.configuration: body['configuration'] = self.configuration
+        if self.continuous is not None: body['continuous'] = self.continuous
+        if self.deployment: body['deployment'] = self.deployment
+        if self.development is not None: body['development'] = self.development
+        if self.edition is not None: body['edition'] = self.edition
+        if self.filters: body['filters'] = self.filters
+        if self.gateway_definition: body['gateway_definition'] = self.gateway_definition
+        if self.id is not None: body['id'] = self.id
+        if self.ingestion_definition: body['ingestion_definition'] = self.ingestion_definition
+        if self.libraries: body['libraries'] = self.libraries
+        if self.name is not None: body['name'] = self.name
+        if self.notifications: body['notifications'] = self.notifications
+        if self.photon is not None: body['photon'] = self.photon
+        if self.restart_window: body['restart_window'] = self.restart_window
+        if self.schema is not None: body['schema'] = self.schema
+        if self.serverless is not None: body['serverless'] = self.serverless
+        if self.storage is not None: body['storage'] = self.storage
+        if self.target is not None: body['target'] = self.target
+        if self.trigger: body['trigger'] = self.trigger
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineSpec:
         """Deserializes the PipelineSpec from a dictionary."""
@@ -1530,6 +1965,7 @@ def from_dict(cls, d: Dict[str, any]) -> PipelineSpec:
                    name=d.get('name', None),
                    notifications=_repeated_dict(d, 'notifications', Notifications),
                    photon=d.get('photon', None),
+                   restart_window=_from_dict(d, 'restart_window', RestartWindow),
                    schema=d.get('schema', None),
                    serverless=d.get('serverless', None),
                    storage=d.get('storage', None),
@@ -1591,6 +2027,19 @@ def as_dict(self) -> dict:
         if self.state is not None: body['state'] = self.state.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineStateInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
+        if self.health is not None: body['health'] = self.health
+        if self.latest_updates: body['latest_updates'] = self.latest_updates
+        if self.name is not None: body['name'] = self.name
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
+        if self.state is not None: body['state'] = self.state
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineStateInfo:
         """Deserializes the PipelineStateInfo from a dictionary."""
@@ -1624,6 +2073,13 @@ def as_dict(self) -> dict:
         if self.manual: body['manual'] = self.manual.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PipelineTrigger into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cron: body['cron'] = self.cron
+        if self.manual: body['manual'] = self.manual
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PipelineTrigger:
         """Deserializes the PipelineTrigger from a dictionary."""
@@ -1658,6 +2114,16 @@ def as_dict(self) -> dict:
         if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ReportSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog
+        if self.destination_schema is not None: body['destination_schema'] = self.destination_schema
+        if self.destination_table is not None: body['destination_table'] = self.destination_table
+        if self.source_url is not None: body['source_url'] = self.source_url
+        if self.table_configuration: body['table_configuration'] = self.table_configuration
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ReportSpec:
         """Deserializes the ReportSpec from a dictionary."""
@@ -1668,6 +2134,84 @@ def from_dict(cls, d: Dict[str, any]) -> ReportSpec:
                    table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig))
 
 
+@dataclass
+class RestartWindow:
+    start_hour: int
+    """An integer between 0 and 23 denoting the start hour for the restart window in the 24-hour day.
+    Continuous pipeline restart is triggered only within a five-hour window starting at this hour."""
+
+    days_of_week: Optional[List[DayOfWeek]] = None
+    """Days of week in which the restart is allowed to happen (within a five-hour window starting at
+    start_hour). If not specified all days of the week will be used."""
+
+    time_zone_id: Optional[str] = None
+    """Time zone id of restart window. See
+    https://docs.databricks.com/sql/language-manual/sql-ref-syntax-aux-conf-mgmt-set-timezone.html
+    for details. If not specified, UTC will be used."""
+
+    def as_dict(self) -> dict:
+        """Serializes the RestartWindow into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.days_of_week: body['days_of_week'] = [v.value for v in self.days_of_week]
+        if self.start_hour is not None: body['start_hour'] = self.start_hour
+        if self.time_zone_id is not None: body['time_zone_id'] = self.time_zone_id
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestartWindow into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.days_of_week: body['days_of_week'] = self.days_of_week
+        if self.start_hour is not None: body['start_hour'] = self.start_hour
+        if self.time_zone_id is not None: body['time_zone_id'] = self.time_zone_id
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> RestartWindow:
+        """Deserializes the RestartWindow from a dictionary."""
+        return cls(days_of_week=_repeated_enum(d, 'days_of_week', DayOfWeek),
+                   start_hour=d.get('start_hour', None),
+                   time_zone_id=d.get('time_zone_id', None))
+
+
+@dataclass
+class RunAs:
+    """Write-only setting, available only in Create/Update calls. Specifies the user or service
+    principal that the pipeline runs as. If not specified, the pipeline runs as the user who created
+    the pipeline.
+    
+    Only `user_name` or `service_principal_name` can be specified. If both are specified, an error
+    is thrown."""
+
+    service_principal_name: Optional[str] = None
+    """Application ID of an active service principal. Setting this field requires the
+    `servicePrincipal/user` role."""
+
+    user_name: Optional[str] = None
+    """The email of an active workspace user. Users can only set this field to their own email."""
+
+    def as_dict(self) -> dict:
+        """Serializes the RunAs into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RunAs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> RunAs:
+        """Deserializes the RunAs from a dictionary."""
+        return cls(service_principal_name=d.get('service_principal_name', None),
+                   user_name=d.get('user_name', None))
+
+
 @dataclass
 class SchemaSpec:
     destination_catalog: Optional[str] = None
@@ -1699,6 +2243,16 @@ def as_dict(self) -> dict:
         if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SchemaSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog
+        if self.destination_schema is not None: body['destination_schema'] = self.destination_schema
+        if self.source_catalog is not None: body['source_catalog'] = self.source_catalog
+        if self.source_schema is not None: body['source_schema'] = self.source_schema
+        if self.table_configuration: body['table_configuration'] = self.table_configuration
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SchemaSpec:
         """Deserializes the SchemaSpec from a dictionary."""
@@ -1724,6 +2278,13 @@ def as_dict(self) -> dict:
         if self.data_plane_id: body['data_plane_id'] = self.data_plane_id.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Sequencing into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.control_plane_seq_no is not None: body['control_plane_seq_no'] = self.control_plane_seq_no
+        if self.data_plane_id: body['data_plane_id'] = self.data_plane_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Sequencing:
         """Deserializes the Sequencing from a dictionary."""
@@ -1750,6 +2311,14 @@ def as_dict(self) -> dict:
         if self.stack: body['stack'] = [v.as_dict() for v in self.stack]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SerializedException into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.class_name is not None: body['class_name'] = self.class_name
+        if self.message is not None: body['message'] = self.message
+        if self.stack: body['stack'] = self.stack
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SerializedException:
         """Deserializes the SerializedException from a dictionary."""
@@ -1781,6 +2350,15 @@ def as_dict(self) -> dict:
         if self.method_name is not None: body['method_name'] = self.method_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StackFrame into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.declaring_class is not None: body['declaring_class'] = self.declaring_class
+        if self.file_name is not None: body['file_name'] = self.file_name
+        if self.line_number is not None: body['line_number'] = self.line_number
+        if self.method_name is not None: body['method_name'] = self.method_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StackFrame:
         """Deserializes the StackFrame from a dictionary."""
@@ -1825,6 +2403,17 @@ def as_dict(self) -> dict:
         if self.validate_only is not None: body['validate_only'] = self.validate_only
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StartUpdate into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cause is not None: body['cause'] = self.cause
+        if self.full_refresh is not None: body['full_refresh'] = self.full_refresh
+        if self.full_refresh_selection: body['full_refresh_selection'] = self.full_refresh_selection
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.refresh_selection: body['refresh_selection'] = self.refresh_selection
+        if self.validate_only is not None: body['validate_only'] = self.validate_only
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StartUpdate:
         """Deserializes the StartUpdate from a dictionary."""
@@ -1856,6 +2445,12 @@ def as_dict(self) -> dict:
         if self.update_id is not None: body['update_id'] = self.update_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StartUpdateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.update_id is not None: body['update_id'] = self.update_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StartUpdateResponse:
         """Deserializes the StartUpdateResponse from a dictionary."""
@@ -1870,6 +2465,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StopPipelineResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StopPipelineResponse:
         """Deserializes the StopPipelineResponse from a dictionary."""
@@ -1913,6 +2513,18 @@ def as_dict(self) -> dict:
         if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TableSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog
+        if self.destination_schema is not None: body['destination_schema'] = self.destination_schema
+        if self.destination_table is not None: body['destination_table'] = self.destination_table
+        if self.source_catalog is not None: body['source_catalog'] = self.source_catalog
+        if self.source_schema is not None: body['source_schema'] = self.source_schema
+        if self.source_table is not None: body['source_table'] = self.source_table
+        if self.table_configuration: body['table_configuration'] = self.table_configuration
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableSpec:
         """Deserializes the TableSpec from a dictionary."""
@@ -1951,6 +2563,16 @@ def as_dict(self) -> dict:
         if self.sequence_by: body['sequence_by'] = [v for v in self.sequence_by]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TableSpecificConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.primary_keys: body['primary_keys'] = self.primary_keys
+        if self.salesforce_include_formula_fields is not None:
+            body['salesforce_include_formula_fields'] = self.salesforce_include_formula_fields
+        if self.scd_type is not None: body['scd_type'] = self.scd_type
+        if self.sequence_by: body['sequence_by'] = self.sequence_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TableSpecificConfig:
         """Deserializes the TableSpecificConfig from a dictionary."""
@@ -2025,6 +2647,22 @@ def as_dict(self) -> dict:
         if self.validate_only is not None: body['validate_only'] = self.validate_only
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cause is not None: body['cause'] = self.cause
+        if self.cluster_id is not None: body['cluster_id'] = self.cluster_id
+        if self.config: body['config'] = self.config
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.full_refresh is not None: body['full_refresh'] = self.full_refresh
+        if self.full_refresh_selection: body['full_refresh_selection'] = self.full_refresh_selection
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.refresh_selection: body['refresh_selection'] = self.refresh_selection
+        if self.state is not None: body['state'] = self.state
+        if self.update_id is not None: body['update_id'] = self.update_id
+        if self.validate_only is not None: body['validate_only'] = self.validate_only
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateInfo:
         """Deserializes the UpdateInfo from a dictionary."""
@@ -2084,6 +2722,14 @@ def as_dict(self) -> dict:
         if self.update_id is not None: body['update_id'] = self.update_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateStateInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.state is not None: body['state'] = self.state
+        if self.update_id is not None: body['update_id'] = self.update_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateStateInfo:
         """Deserializes the UpdateStateInfo from a dictionary."""
@@ -2122,13 +2768,13 @@ class PipelinesAPI:
     def __init__(self, api_client):
         self._api = api_client
 
-    def wait_get_pipeline_idle(
+    def wait_get_pipeline_running(
             self,
             pipeline_id: str,
             timeout=timedelta(minutes=20),
             callback: Optional[Callable[[GetPipelineResponse], None]] = None) -> GetPipelineResponse:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (PipelineState.IDLE, )
+        target_states = (PipelineState.RUNNING, )
         failure_states = (PipelineState.FAILED, )
         status_message = 'polling...'
         attempt = 1
@@ -2141,7 +2787,7 @@ def wait_get_pipeline_idle(
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach IDLE, got {status}: {status_message}'
+                msg = f'failed to reach RUNNING, got {status}: {status_message}'
                 raise OperationFailed(msg)
             prefix = f"pipeline_id={pipeline_id}"
             sleep = attempt
@@ -2153,13 +2799,13 @@ def wait_get_pipeline_idle(
             attempt += 1
         raise TimeoutError(f'timed out after {timeout}: {status_message}')
 
-    def wait_get_pipeline_running(
+    def wait_get_pipeline_idle(
             self,
             pipeline_id: str,
             timeout=timedelta(minutes=20),
             callback: Optional[Callable[[GetPipelineResponse], None]] = None) -> GetPipelineResponse:
         deadline = time.time() + timeout.total_seconds()
-        target_states = (PipelineState.RUNNING, )
+        target_states = (PipelineState.IDLE, )
         failure_states = (PipelineState.FAILED, )
         status_message = 'polling...'
         attempt = 1
@@ -2172,7 +2818,7 @@ def wait_get_pipeline_running(
             if callback:
                 callback(poll)
             if status in failure_states:
-                msg = f'failed to reach RUNNING, got {status}: {status_message}'
+                msg = f'failed to reach IDLE, got {status}: {status_message}'
                 raise OperationFailed(msg)
             prefix = f"pipeline_id={pipeline_id}"
             sleep = attempt
@@ -2205,6 +2851,8 @@ def create(self,
                name: Optional[str] = None,
                notifications: Optional[List[Notifications]] = None,
                photon: Optional[bool] = None,
+               restart_window: Optional[RestartWindow] = None,
+               run_as: Optional[RunAs] = None,
                schema: Optional[str] = None,
                serverless: Optional[bool] = None,
                storage: Optional[str] = None,
@@ -2241,7 +2889,7 @@ def create(self,
         :param filters: :class:`Filters` (optional)
           Filters on which Pipeline packages to include in the deployed graph.
         :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
-          The definition of a gateway pipeline to support CDC.
+          The definition of a gateway pipeline to support change data capture.
         :param id: str (optional)
           Unique identifier for this pipeline.
         :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
@@ -2255,6 +2903,14 @@ def create(self,
           List of notification settings for this pipeline.
         :param photon: bool (optional)
           Whether Photon is enabled for this pipeline.
+        :param restart_window: :class:`RestartWindow` (optional)
+          Restart window of this pipeline.
+        :param run_as: :class:`RunAs` (optional)
+          Write-only setting, available only in Create/Update calls. Specifies the user or service principal
+          that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
+          
+          Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
+          thrown.
         :param schema: str (optional)
           The default schema (database) where tables are read from or published to. The presence of this field
           implies that the pipeline is in direct publishing mode.
@@ -2290,6 +2946,8 @@ def create(self,
         if name is not None: body['name'] = name
         if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications]
         if photon is not None: body['photon'] = photon
+        if restart_window is not None: body['restart_window'] = restart_window.as_dict()
+        if run_as is not None: body['run_as'] = run_as.as_dict()
         if schema is not None: body['schema'] = schema
         if serverless is not None: body['serverless'] = serverless
         if storage is not None: body['storage'] = storage
@@ -2518,7 +3176,8 @@ def set_permissions(
             access_control_list: Optional[List[PipelineAccessControlRequest]] = None) -> PipelinePermissions:
         """Set pipeline permissions.
         
-        Sets permissions on a pipeline. Pipelines can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param pipeline_id: str
           The pipeline for which to get or manage permissions.
@@ -2622,6 +3281,8 @@ def update(self,
                name: Optional[str] = None,
                notifications: Optional[List[Notifications]] = None,
                photon: Optional[bool] = None,
+               restart_window: Optional[RestartWindow] = None,
+               run_as: Optional[RunAs] = None,
                schema: Optional[str] = None,
                serverless: Optional[bool] = None,
                storage: Optional[str] = None,
@@ -2661,7 +3322,7 @@ def update(self,
         :param filters: :class:`Filters` (optional)
           Filters on which Pipeline packages to include in the deployed graph.
         :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
-          The definition of a gateway pipeline to support CDC.
+          The definition of a gateway pipeline to support change data capture.
         :param id: str (optional)
           Unique identifier for this pipeline.
         :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
@@ -2675,6 +3336,14 @@ def update(self,
           List of notification settings for this pipeline.
         :param photon: bool (optional)
           Whether Photon is enabled for this pipeline.
+        :param restart_window: :class:`RestartWindow` (optional)
+          Restart window of this pipeline.
+        :param run_as: :class:`RunAs` (optional)
+          Write-only setting, available only in Create/Update calls. Specifies the user or service principal
+          that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
+          
+          Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
+          thrown.
         :param schema: str (optional)
           The default schema (database) where tables are read from or published to. The presence of this field
           implies that the pipeline is in direct publishing mode.
@@ -2710,6 +3379,8 @@ def update(self,
         if name is not None: body['name'] = name
         if notifications is not None: body['notifications'] = [v.as_dict() for v in notifications]
         if photon is not None: body['photon'] = photon
+        if restart_window is not None: body['restart_window'] = restart_window.as_dict()
+        if run_as is not None: body['run_as'] = run_as.as_dict()
         if schema is not None: body['schema'] = schema
         if serverless is not None: body['serverless'] = serverless
         if storage is not None: body['storage'] = storage
diff --git a/databricks/sdk/service/provisioning.py b/databricks/sdk/service/provisioning.py
index 1dc6f3b8d..c54120ad8 100755
--- a/databricks/sdk/service/provisioning.py
+++ b/databricks/sdk/service/provisioning.py
@@ -28,6 +28,12 @@ def as_dict(self) -> dict:
         if self.sts_role: body['sts_role'] = self.sts_role.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AwsCredentials into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.sts_role: body['sts_role'] = self.sts_role
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsCredentials:
         """Deserializes the AwsCredentials from a dictionary."""
@@ -60,6 +66,16 @@ def as_dict(self) -> dict:
             body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AwsKeyInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key_alias is not None: body['key_alias'] = self.key_alias
+        if self.key_arn is not None: body['key_arn'] = self.key_arn
+        if self.key_region is not None: body['key_region'] = self.key_region
+        if self.reuse_key_for_cluster_volumes is not None:
+            body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AwsKeyInfo:
         """Deserializes the AwsKeyInfo from a dictionary."""
@@ -84,6 +100,13 @@ def as_dict(self) -> dict:
         if self.subscription_id is not None: body['subscription_id'] = self.subscription_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AzureWorkspaceInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.resource_group is not None: body['resource_group'] = self.resource_group
+        if self.subscription_id is not None: body['subscription_id'] = self.subscription_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureWorkspaceInfo:
         """Deserializes the AzureWorkspaceInfo from a dictionary."""
@@ -104,6 +127,12 @@ def as_dict(self) -> dict:
         if self.gcp: body['gcp'] = self.gcp.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CloudResourceContainer into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.gcp: body['gcp'] = self.gcp
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CloudResourceContainer:
         """Deserializes the CloudResourceContainer from a dictionary."""
@@ -133,6 +162,15 @@ def as_dict(self) -> dict:
             body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateAwsKeyInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key_alias is not None: body['key_alias'] = self.key_alias
+        if self.key_arn is not None: body['key_arn'] = self.key_arn
+        if self.reuse_key_for_cluster_volumes is not None:
+            body['reuse_key_for_cluster_volumes'] = self.reuse_key_for_cluster_volumes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateAwsKeyInfo:
         """Deserializes the CreateAwsKeyInfo from a dictionary."""
@@ -151,6 +189,12 @@ def as_dict(self) -> dict:
         if self.sts_role: body['sts_role'] = self.sts_role.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCredentialAwsCredentials into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.sts_role: body['sts_role'] = self.sts_role
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCredentialAwsCredentials:
         """Deserializes the CreateCredentialAwsCredentials from a dictionary."""
@@ -171,6 +215,13 @@ def as_dict(self) -> dict:
         if self.credentials_name is not None: body['credentials_name'] = self.credentials_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCredentialRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_credentials: body['aws_credentials'] = self.aws_credentials
+        if self.credentials_name is not None: body['credentials_name'] = self.credentials_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCredentialRequest:
         """Deserializes the CreateCredentialRequest from a dictionary."""
@@ -189,6 +240,12 @@ def as_dict(self) -> dict:
         if self.role_arn is not None: body['role_arn'] = self.role_arn
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCredentialStsRole into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.role_arn is not None: body['role_arn'] = self.role_arn
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCredentialStsRole:
         """Deserializes the CreateCredentialStsRole from a dictionary."""
@@ -212,6 +269,14 @@ def as_dict(self) -> dict:
         if self.use_cases: body['use_cases'] = [v.value for v in self.use_cases]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCustomerManagedKeyRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_key_info: body['aws_key_info'] = self.aws_key_info
+        if self.gcp_key_info: body['gcp_key_info'] = self.gcp_key_info
+        if self.use_cases: body['use_cases'] = self.use_cases
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCustomerManagedKeyRequest:
         """Deserializes the CreateCustomerManagedKeyRequest from a dictionary."""
@@ -231,6 +296,12 @@ def as_dict(self) -> dict:
         if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateGcpKeyInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateGcpKeyInfo:
         """Deserializes the CreateGcpKeyInfo from a dictionary."""
@@ -275,6 +346,17 @@ def as_dict(self) -> dict:
         if self.vpc_id is not None: body['vpc_id'] = self.vpc_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateNetworkRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.gcp_network_info: body['gcp_network_info'] = self.gcp_network_info
+        if self.network_name is not None: body['network_name'] = self.network_name
+        if self.security_group_ids: body['security_group_ids'] = self.security_group_ids
+        if self.subnet_ids: body['subnet_ids'] = self.subnet_ids
+        if self.vpc_endpoints: body['vpc_endpoints'] = self.vpc_endpoints
+        if self.vpc_id is not None: body['vpc_id'] = self.vpc_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateNetworkRequest:
         """Deserializes the CreateNetworkRequest from a dictionary."""
@@ -302,6 +384,14 @@ def as_dict(self) -> dict:
             body['storage_configuration_name'] = self.storage_configuration_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateStorageConfigurationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.root_bucket_info: body['root_bucket_info'] = self.root_bucket_info
+        if self.storage_configuration_name is not None:
+            body['storage_configuration_name'] = self.storage_configuration_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateStorageConfigurationRequest:
         """Deserializes the CreateStorageConfigurationRequest from a dictionary."""
@@ -332,6 +422,15 @@ def as_dict(self) -> dict:
         if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateVpcEndpointRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id
+        if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info
+        if self.region is not None: body['region'] = self.region
+        if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateVpcEndpointRequest:
         """Deserializes the CreateVpcEndpointRequest from a dictionary."""
@@ -412,6 +511,9 @@ class CreateWorkspaceRequest:
     gke_config: Optional[GkeConfig] = None
     """The configurations for the GKE cluster of a Databricks workspace."""
 
+    is_no_public_ip_enabled: Optional[bool] = None
+    """Whether no public IP is enabled for the workspace."""
+
     location: Optional[str] = None
     """The Google Cloud region of the workspace data plane in your Google account. For example,
     `us-east4`."""
@@ -460,6 +562,8 @@ def as_dict(self) -> dict:
         if self.gcp_managed_network_config:
             body['gcp_managed_network_config'] = self.gcp_managed_network_config.as_dict()
         if self.gke_config: body['gke_config'] = self.gke_config.as_dict()
+        if self.is_no_public_ip_enabled is not None:
+            body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled
         if self.location is not None: body['location'] = self.location
         if self.managed_services_customer_managed_key_id is not None:
             body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id
@@ -474,6 +578,34 @@ def as_dict(self) -> dict:
         if self.workspace_name is not None: body['workspace_name'] = self.workspace_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateWorkspaceRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_region is not None: body['aws_region'] = self.aws_region
+        if self.cloud is not None: body['cloud'] = self.cloud
+        if self.cloud_resource_container: body['cloud_resource_container'] = self.cloud_resource_container
+        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.deployment_name is not None: body['deployment_name'] = self.deployment_name
+        if self.gcp_managed_network_config:
+            body['gcp_managed_network_config'] = self.gcp_managed_network_config
+        if self.gke_config: body['gke_config'] = self.gke_config
+        if self.is_no_public_ip_enabled is not None:
+            body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled
+        if self.location is not None: body['location'] = self.location
+        if self.managed_services_customer_managed_key_id is not None:
+            body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id
+        if self.network_id is not None: body['network_id'] = self.network_id
+        if self.pricing_tier is not None: body['pricing_tier'] = self.pricing_tier
+        if self.private_access_settings_id is not None:
+            body['private_access_settings_id'] = self.private_access_settings_id
+        if self.storage_configuration_id is not None:
+            body['storage_configuration_id'] = self.storage_configuration_id
+        if self.storage_customer_managed_key_id is not None:
+            body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id
+        if self.workspace_name is not None: body['workspace_name'] = self.workspace_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateWorkspaceRequest:
         """Deserializes the CreateWorkspaceRequest from a dictionary."""
@@ -486,6 +618,7 @@ def from_dict(cls, d: Dict[str, any]) -> CreateWorkspaceRequest:
                    gcp_managed_network_config=_from_dict(d, 'gcp_managed_network_config',
                                                          GcpManagedNetworkConfig),
                    gke_config=_from_dict(d, 'gke_config', GkeConfig),
+                   is_no_public_ip_enabled=d.get('is_no_public_ip_enabled', None),
                    location=d.get('location', None),
                    managed_services_customer_managed_key_id=d.get('managed_services_customer_managed_key_id',
                                                                   None),
@@ -523,6 +656,16 @@ def as_dict(self) -> dict:
         if self.credentials_name is not None: body['credentials_name'] = self.credentials_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Credential into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.aws_credentials: body['aws_credentials'] = self.aws_credentials
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
+        if self.credentials_name is not None: body['credentials_name'] = self.credentials_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Credential:
         """Deserializes the Credential from a dictionary."""
@@ -550,6 +693,12 @@ def as_dict(self) -> dict:
         if self.project_id is not None: body['project_id'] = self.project_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CustomerFacingGcpCloudResourceContainer into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.project_id is not None: body['project_id'] = self.project_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CustomerFacingGcpCloudResourceContainer:
         """Deserializes the CustomerFacingGcpCloudResourceContainer from a dictionary."""
@@ -586,6 +735,18 @@ def as_dict(self) -> dict:
         if self.use_cases: body['use_cases'] = [v.value for v in self.use_cases]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CustomerManagedKey into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.aws_key_info: body['aws_key_info'] = self.aws_key_info
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.customer_managed_key_id is not None:
+            body['customer_managed_key_id'] = self.customer_managed_key_id
+        if self.gcp_key_info: body['gcp_key_info'] = self.gcp_key_info
+        if self.use_cases: body['use_cases'] = self.use_cases
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CustomerManagedKey:
         """Deserializes the CustomerManagedKey from a dictionary."""
@@ -605,6 +766,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -632,6 +798,45 @@ class ErrorType(Enum):
     VPC = 'vpc'
 
 
+@dataclass
+class ExternalCustomerInfo:
+    authoritative_user_email: Optional[str] = None
+    """Email of the authoritative user."""
+
+    authoritative_user_full_name: Optional[str] = None
+    """The authoritative user full name."""
+
+    customer_name: Optional[str] = None
+    """The legal entity name for the external workspace"""
+
+    def as_dict(self) -> dict:
+        """Serializes the ExternalCustomerInfo into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.authoritative_user_email is not None:
+            body['authoritative_user_email'] = self.authoritative_user_email
+        if self.authoritative_user_full_name is not None:
+            body['authoritative_user_full_name'] = self.authoritative_user_full_name
+        if self.customer_name is not None: body['customer_name'] = self.customer_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExternalCustomerInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.authoritative_user_email is not None:
+            body['authoritative_user_email'] = self.authoritative_user_email
+        if self.authoritative_user_full_name is not None:
+            body['authoritative_user_full_name'] = self.authoritative_user_full_name
+        if self.customer_name is not None: body['customer_name'] = self.customer_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ExternalCustomerInfo:
+        """Deserializes the ExternalCustomerInfo from a dictionary."""
+        return cls(authoritative_user_email=d.get('authoritative_user_email', None),
+                   authoritative_user_full_name=d.get('authoritative_user_full_name', None),
+                   customer_name=d.get('customer_name', None))
+
+
 @dataclass
 class GcpKeyInfo:
     kms_key_id: str
@@ -643,6 +848,12 @@ def as_dict(self) -> dict:
         if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GcpKeyInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.kms_key_id is not None: body['kms_key_id'] = self.kms_key_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GcpKeyInfo:
         """Deserializes the GcpKeyInfo from a dictionary."""
@@ -692,6 +903,16 @@ def as_dict(self) -> dict:
         if self.subnet_cidr is not None: body['subnet_cidr'] = self.subnet_cidr
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GcpManagedNetworkConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.gke_cluster_pod_ip_range is not None:
+            body['gke_cluster_pod_ip_range'] = self.gke_cluster_pod_ip_range
+        if self.gke_cluster_service_ip_range is not None:
+            body['gke_cluster_service_ip_range'] = self.gke_cluster_service_ip_range
+        if self.subnet_cidr is not None: body['subnet_cidr'] = self.subnet_cidr
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GcpManagedNetworkConfig:
         """Deserializes the GcpManagedNetworkConfig from a dictionary."""
@@ -737,6 +958,17 @@ def as_dict(self) -> dict:
         if self.vpc_id is not None: body['vpc_id'] = self.vpc_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GcpNetworkInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.network_project_id is not None: body['network_project_id'] = self.network_project_id
+        if self.pod_ip_range_name is not None: body['pod_ip_range_name'] = self.pod_ip_range_name
+        if self.service_ip_range_name is not None: body['service_ip_range_name'] = self.service_ip_range_name
+        if self.subnet_id is not None: body['subnet_id'] = self.subnet_id
+        if self.subnet_region is not None: body['subnet_region'] = self.subnet_region
+        if self.vpc_id is not None: body['vpc_id'] = self.vpc_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GcpNetworkInfo:
         """Deserializes the GcpNetworkInfo from a dictionary."""
@@ -777,6 +1009,16 @@ def as_dict(self) -> dict:
         if self.service_attachment_id is not None: body['service_attachment_id'] = self.service_attachment_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GcpVpcEndpointInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.endpoint_region is not None: body['endpoint_region'] = self.endpoint_region
+        if self.project_id is not None: body['project_id'] = self.project_id
+        if self.psc_connection_id is not None: body['psc_connection_id'] = self.psc_connection_id
+        if self.psc_endpoint_name is not None: body['psc_endpoint_name'] = self.psc_endpoint_name
+        if self.service_attachment_id is not None: body['service_attachment_id'] = self.service_attachment_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GcpVpcEndpointInfo:
         """Deserializes the GcpVpcEndpointInfo from a dictionary."""
@@ -813,6 +1055,13 @@ def as_dict(self) -> dict:
         if self.master_ip_range is not None: body['master_ip_range'] = self.master_ip_range
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GkeConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.connectivity_type is not None: body['connectivity_type'] = self.connectivity_type
+        if self.master_ip_range is not None: body['master_ip_range'] = self.master_ip_range
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GkeConfig:
         """Deserializes the GkeConfig from a dictionary."""
@@ -905,6 +1154,24 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Network into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.error_messages: body['error_messages'] = self.error_messages
+        if self.gcp_network_info: body['gcp_network_info'] = self.gcp_network_info
+        if self.network_id is not None: body['network_id'] = self.network_id
+        if self.network_name is not None: body['network_name'] = self.network_name
+        if self.security_group_ids: body['security_group_ids'] = self.security_group_ids
+        if self.subnet_ids: body['subnet_ids'] = self.subnet_ids
+        if self.vpc_endpoints: body['vpc_endpoints'] = self.vpc_endpoints
+        if self.vpc_id is not None: body['vpc_id'] = self.vpc_id
+        if self.vpc_status is not None: body['vpc_status'] = self.vpc_status
+        if self.warning_messages: body['warning_messages'] = self.warning_messages
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Network:
         """Deserializes the Network from a dictionary."""
@@ -939,6 +1206,13 @@ def as_dict(self) -> dict:
         if self.error_type is not None: body['error_type'] = self.error_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NetworkHealth into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.error_message is not None: body['error_message'] = self.error_message
+        if self.error_type is not None: body['error_type'] = self.error_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NetworkHealth:
         """Deserializes the NetworkHealth from a dictionary."""
@@ -966,6 +1240,13 @@ def as_dict(self) -> dict:
         if self.rest_api: body['rest_api'] = [v for v in self.rest_api]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NetworkVpcEndpoints into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dataplane_relay: body['dataplane_relay'] = self.dataplane_relay
+        if self.rest_api: body['rest_api'] = self.rest_api
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NetworkVpcEndpoints:
         """Deserializes the NetworkVpcEndpoints from a dictionary."""
@@ -987,6 +1268,13 @@ def as_dict(self) -> dict:
         if self.warning_type is not None: body['warning_type'] = self.warning_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NetworkWarning into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.warning_message is not None: body['warning_message'] = self.warning_message
+        if self.warning_type is not None: body['warning_type'] = self.warning_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NetworkWarning:
         """Deserializes the NetworkWarning from a dictionary."""
@@ -1064,6 +1352,20 @@ def as_dict(self) -> dict:
         if self.region is not None: body['region'] = self.region
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PrivateAccessSettings into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.allowed_vpc_endpoint_ids: body['allowed_vpc_endpoint_ids'] = self.allowed_vpc_endpoint_ids
+        if self.private_access_level is not None: body['private_access_level'] = self.private_access_level
+        if self.private_access_settings_id is not None:
+            body['private_access_settings_id'] = self.private_access_settings_id
+        if self.private_access_settings_name is not None:
+            body['private_access_settings_name'] = self.private_access_settings_name
+        if self.public_access_enabled is not None: body['public_access_enabled'] = self.public_access_enabled
+        if self.region is not None: body['region'] = self.region
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PrivateAccessSettings:
         """Deserializes the PrivateAccessSettings from a dictionary."""
@@ -1084,6 +1386,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ReplaceResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ReplaceResponse:
         """Deserializes the ReplaceResponse from a dictionary."""
@@ -1103,6 +1410,12 @@ def as_dict(self) -> dict:
         if self.bucket_name is not None: body['bucket_name'] = self.bucket_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RootBucketInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.bucket_name is not None: body['bucket_name'] = self.bucket_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RootBucketInfo:
         """Deserializes the RootBucketInfo from a dictionary."""
@@ -1138,6 +1451,18 @@ def as_dict(self) -> dict:
             body['storage_configuration_name'] = self.storage_configuration_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StorageConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.root_bucket_info: body['root_bucket_info'] = self.root_bucket_info
+        if self.storage_configuration_id is not None:
+            body['storage_configuration_id'] = self.storage_configuration_id
+        if self.storage_configuration_name is not None:
+            body['storage_configuration_name'] = self.storage_configuration_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StorageConfiguration:
         """Deserializes the StorageConfiguration from a dictionary."""
@@ -1164,6 +1489,13 @@ def as_dict(self) -> dict:
         if self.role_arn is not None: body['role_arn'] = self.role_arn
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StsRole into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.external_id is not None: body['external_id'] = self.external_id
+        if self.role_arn is not None: body['role_arn'] = self.role_arn
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StsRole:
         """Deserializes the StsRole from a dictionary."""
@@ -1178,6 +1510,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
         """Deserializes the UpdateResponse from a dictionary."""
@@ -1210,6 +1547,10 @@ class UpdateWorkspaceRequest:
     customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC
     to a customer-managed VPC by updating the workspace to add a network configuration ID."""
 
+    private_access_settings_id: Optional[str] = None
+    """The ID of the workspace's private access settings configuration object. This parameter is
+    available only for updating failed workspaces."""
+
     storage_configuration_id: Optional[str] = None
     """The ID of the workspace's storage configuration object. This parameter is available only for
     updating failed workspaces."""
@@ -1232,6 +1573,28 @@ def as_dict(self) -> dict:
         if self.network_connectivity_config_id is not None:
             body['network_connectivity_config_id'] = self.network_connectivity_config_id
         if self.network_id is not None: body['network_id'] = self.network_id
+        if self.private_access_settings_id is not None:
+            body['private_access_settings_id'] = self.private_access_settings_id
+        if self.storage_configuration_id is not None:
+            body['storage_configuration_id'] = self.storage_configuration_id
+        if self.storage_customer_managed_key_id is not None:
+            body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateWorkspaceRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_region is not None: body['aws_region'] = self.aws_region
+        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.managed_services_customer_managed_key_id is not None:
+            body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id
+        if self.network_connectivity_config_id is not None:
+            body['network_connectivity_config_id'] = self.network_connectivity_config_id
+        if self.network_id is not None: body['network_id'] = self.network_id
+        if self.private_access_settings_id is not None:
+            body['private_access_settings_id'] = self.private_access_settings_id
         if self.storage_configuration_id is not None:
             body['storage_configuration_id'] = self.storage_configuration_id
         if self.storage_customer_managed_key_id is not None:
@@ -1249,6 +1612,7 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateWorkspaceRequest:
                                                                   None),
                    network_connectivity_config_id=d.get('network_connectivity_config_id', None),
                    network_id=d.get('network_id', None),
+                   private_access_settings_id=d.get('private_access_settings_id', None),
                    storage_configuration_id=d.get('storage_configuration_id', None),
                    storage_customer_managed_key_id=d.get('storage_customer_managed_key_id', None),
                    workspace_id=d.get('workspace_id', None))
@@ -1307,6 +1671,19 @@ def as_dict(self) -> dict:
         if self.region is not None: body['region'] = self.region
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpsertPrivateAccessSettingsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allowed_vpc_endpoint_ids: body['allowed_vpc_endpoint_ids'] = self.allowed_vpc_endpoint_ids
+        if self.private_access_level is not None: body['private_access_level'] = self.private_access_level
+        if self.private_access_settings_id is not None:
+            body['private_access_settings_id'] = self.private_access_settings_id
+        if self.private_access_settings_name is not None:
+            body['private_access_settings_name'] = self.private_access_settings_name
+        if self.public_access_enabled is not None: body['public_access_enabled'] = self.public_access_enabled
+        if self.region is not None: body['region'] = self.region
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpsertPrivateAccessSettingsRequest:
         """Deserializes the UpsertPrivateAccessSettingsRequest from a dictionary."""
@@ -1378,6 +1755,22 @@ def as_dict(self) -> dict:
         if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the VpcEndpoint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.aws_account_id is not None: body['aws_account_id'] = self.aws_account_id
+        if self.aws_endpoint_service_id is not None:
+            body['aws_endpoint_service_id'] = self.aws_endpoint_service_id
+        if self.aws_vpc_endpoint_id is not None: body['aws_vpc_endpoint_id'] = self.aws_vpc_endpoint_id
+        if self.gcp_vpc_endpoint_info: body['gcp_vpc_endpoint_info'] = self.gcp_vpc_endpoint_info
+        if self.region is not None: body['region'] = self.region
+        if self.state is not None: body['state'] = self.state
+        if self.use_case is not None: body['use_case'] = self.use_case
+        if self.vpc_endpoint_id is not None: body['vpc_endpoint_id'] = self.vpc_endpoint_id
+        if self.vpc_endpoint_name is not None: body['vpc_endpoint_name'] = self.vpc_endpoint_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> VpcEndpoint:
         """Deserializes the VpcEndpoint from a dictionary."""
@@ -1443,6 +1836,10 @@ class Workspace:
     
     This value must be unique across all non-deleted deployments across all AWS regions."""
 
+    external_customer_info: Optional[ExternalCustomerInfo] = None
+    """If this workspace is for a external customer, then external_customer_info is populated. If this
+    workspace is not for a external customer, then external_customer_info is empty."""
+
     gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None
     """The network settings for the workspace. The configurations are only for Databricks-managed VPCs.
     It is ignored if you specify a customer-managed VPC in the `network_id` field.", All the IP
@@ -1466,6 +1863,9 @@ class Workspace:
     gke_config: Optional[GkeConfig] = None
     """The configurations for the GKE cluster of a Databricks workspace."""
 
+    is_no_public_ip_enabled: Optional[bool] = None
+    """Whether no public IP is enabled for the workspace."""
+
     location: Optional[str] = None
     """The Google Cloud region of the workspace data plane in your Google account (for example,
     `us-east4`)."""
@@ -1524,9 +1924,12 @@ def as_dict(self) -> dict:
         if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
         if self.custom_tags: body['custom_tags'] = self.custom_tags
         if self.deployment_name is not None: body['deployment_name'] = self.deployment_name
+        if self.external_customer_info: body['external_customer_info'] = self.external_customer_info.as_dict()
         if self.gcp_managed_network_config:
             body['gcp_managed_network_config'] = self.gcp_managed_network_config.as_dict()
         if self.gke_config: body['gke_config'] = self.gke_config.as_dict()
+        if self.is_no_public_ip_enabled is not None:
+            body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled
         if self.location is not None: body['location'] = self.location
         if self.managed_services_customer_managed_key_id is not None:
             body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id
@@ -1545,6 +1948,42 @@ def as_dict(self) -> dict:
             body['workspace_status_message'] = self.workspace_status_message
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Workspace into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.aws_region is not None: body['aws_region'] = self.aws_region
+        if self.azure_workspace_info: body['azure_workspace_info'] = self.azure_workspace_info
+        if self.cloud is not None: body['cloud'] = self.cloud
+        if self.cloud_resource_container: body['cloud_resource_container'] = self.cloud_resource_container
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.credentials_id is not None: body['credentials_id'] = self.credentials_id
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        if self.deployment_name is not None: body['deployment_name'] = self.deployment_name
+        if self.external_customer_info: body['external_customer_info'] = self.external_customer_info
+        if self.gcp_managed_network_config:
+            body['gcp_managed_network_config'] = self.gcp_managed_network_config
+        if self.gke_config: body['gke_config'] = self.gke_config
+        if self.is_no_public_ip_enabled is not None:
+            body['is_no_public_ip_enabled'] = self.is_no_public_ip_enabled
+        if self.location is not None: body['location'] = self.location
+        if self.managed_services_customer_managed_key_id is not None:
+            body['managed_services_customer_managed_key_id'] = self.managed_services_customer_managed_key_id
+        if self.network_id is not None: body['network_id'] = self.network_id
+        if self.pricing_tier is not None: body['pricing_tier'] = self.pricing_tier
+        if self.private_access_settings_id is not None:
+            body['private_access_settings_id'] = self.private_access_settings_id
+        if self.storage_configuration_id is not None:
+            body['storage_configuration_id'] = self.storage_configuration_id
+        if self.storage_customer_managed_key_id is not None:
+            body['storage_customer_managed_key_id'] = self.storage_customer_managed_key_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        if self.workspace_name is not None: body['workspace_name'] = self.workspace_name
+        if self.workspace_status is not None: body['workspace_status'] = self.workspace_status
+        if self.workspace_status_message is not None:
+            body['workspace_status_message'] = self.workspace_status_message
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Workspace:
         """Deserializes the Workspace from a dictionary."""
@@ -1557,9 +1996,11 @@ def from_dict(cls, d: Dict[str, any]) -> Workspace:
                    credentials_id=d.get('credentials_id', None),
                    custom_tags=d.get('custom_tags', None),
                    deployment_name=d.get('deployment_name', None),
+                   external_customer_info=_from_dict(d, 'external_customer_info', ExternalCustomerInfo),
                    gcp_managed_network_config=_from_dict(d, 'gcp_managed_network_config',
                                                          GcpManagedNetworkConfig),
                    gke_config=_from_dict(d, 'gke_config', GkeConfig),
+                   is_no_public_ip_enabled=d.get('is_no_public_ip_enabled', None),
                    location=d.get('location', None),
                    managed_services_customer_managed_key_id=d.get('managed_services_customer_managed_key_id',
                                                                   None),
@@ -2399,6 +2840,7 @@ def create(self,
                deployment_name: Optional[str] = None,
                gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None,
                gke_config: Optional[GkeConfig] = None,
+               is_no_public_ip_enabled: Optional[bool] = None,
                location: Optional[str] = None,
                managed_services_customer_managed_key_id: Optional[str] = None,
                network_id: Optional[str] = None,
@@ -2477,6 +2919,8 @@ def create(self,
           [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html
         :param gke_config: :class:`GkeConfig` (optional)
           The configurations for the GKE cluster of a Databricks workspace.
+        :param is_no_public_ip_enabled: bool (optional)
+          Whether no public IP is enabled for the workspace.
         :param location: str (optional)
           The Google Cloud region of the workspace data plane in your Google account. For example, `us-east4`.
         :param managed_services_customer_managed_key_id: str (optional)
@@ -2519,6 +2963,7 @@ def create(self,
         if gcp_managed_network_config is not None:
             body['gcp_managed_network_config'] = gcp_managed_network_config.as_dict()
         if gke_config is not None: body['gke_config'] = gke_config.as_dict()
+        if is_no_public_ip_enabled is not None: body['is_no_public_ip_enabled'] = is_no_public_ip_enabled
         if location is not None: body['location'] = location
         if managed_services_customer_managed_key_id is not None:
             body['managed_services_customer_managed_key_id'] = managed_services_customer_managed_key_id
@@ -2552,6 +2997,7 @@ def create_and_wait(
         deployment_name: Optional[str] = None,
         gcp_managed_network_config: Optional[GcpManagedNetworkConfig] = None,
         gke_config: Optional[GkeConfig] = None,
+        is_no_public_ip_enabled: Optional[bool] = None,
         location: Optional[str] = None,
         managed_services_customer_managed_key_id: Optional[str] = None,
         network_id: Optional[str] = None,
@@ -2568,6 +3014,7 @@ def create_and_wait(
                            deployment_name=deployment_name,
                            gcp_managed_network_config=gcp_managed_network_config,
                            gke_config=gke_config,
+                           is_no_public_ip_enabled=is_no_public_ip_enabled,
                            location=location,
                            managed_services_customer_managed_key_id=managed_services_customer_managed_key_id,
                            network_id=network_id,
@@ -2653,6 +3100,7 @@ def update(self,
                managed_services_customer_managed_key_id: Optional[str] = None,
                network_connectivity_config_id: Optional[str] = None,
                network_id: Optional[str] = None,
+               private_access_settings_id: Optional[str] = None,
                storage_configuration_id: Optional[str] = None,
                storage_customer_managed_key_id: Optional[str] = None) -> Wait[Workspace]:
         """Update workspace configuration.
@@ -2771,6 +3219,9 @@ def update(self,
           The ID of the workspace's network configuration object. Used only if you already use a
           customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a
           customer-managed VPC by updating the workspace to add a network configuration ID.
+        :param private_access_settings_id: str (optional)
+          The ID of the workspace's private access settings configuration object. This parameter is available
+          only for updating failed workspaces.
         :param storage_configuration_id: str (optional)
           The ID of the workspace's storage configuration object. This parameter is available only for
           updating failed workspaces.
@@ -2791,6 +3242,8 @@ def update(self,
         if network_connectivity_config_id is not None:
             body['network_connectivity_config_id'] = network_connectivity_config_id
         if network_id is not None: body['network_id'] = network_id
+        if private_access_settings_id is not None:
+            body['private_access_settings_id'] = private_access_settings_id
         if storage_configuration_id is not None: body['storage_configuration_id'] = storage_configuration_id
         if storage_customer_managed_key_id is not None:
             body['storage_customer_managed_key_id'] = storage_customer_managed_key_id
@@ -2814,6 +3267,7 @@ def update_and_wait(
         managed_services_customer_managed_key_id: Optional[str] = None,
         network_connectivity_config_id: Optional[str] = None,
         network_id: Optional[str] = None,
+        private_access_settings_id: Optional[str] = None,
         storage_configuration_id: Optional[str] = None,
         storage_customer_managed_key_id: Optional[str] = None,
         timeout=timedelta(minutes=20)) -> Workspace:
@@ -2823,6 +3277,7 @@ def update_and_wait(
                            managed_services_customer_managed_key_id=managed_services_customer_managed_key_id,
                            network_connectivity_config_id=network_connectivity_config_id,
                            network_id=network_id,
+                           private_access_settings_id=private_access_settings_id,
                            storage_configuration_id=storage_configuration_id,
                            storage_customer_managed_key_id=storage_customer_managed_key_id,
                            workspace_id=workspace_id).result(timeout=timeout)
diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py
index 7639d96fb..c10e43572 100755
--- a/databricks/sdk/service/serving.py
+++ b/databricks/sdk/service/serving.py
@@ -12,14 +12,11 @@
 
 import requests
 
-from ..data_plane import DataPlaneService
 from ..errors import OperationFailed
 from ._internal import Wait, _enum, _from_dict, _repeated_dict
 
 _LOG = logging.getLogger('databricks.sdk')
 
-from databricks.sdk.service import oauth2
-
 # all definitions in this file are in alphabetical order
 
 
@@ -43,6 +40,14 @@ def as_dict(self) -> dict:
             body['ai21labs_api_key_plaintext'] = self.ai21labs_api_key_plaintext
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Ai21LabsConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ai21labs_api_key is not None: body['ai21labs_api_key'] = self.ai21labs_api_key
+        if self.ai21labs_api_key_plaintext is not None:
+            body['ai21labs_api_key_plaintext'] = self.ai21labs_api_key_plaintext
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Ai21LabsConfig:
         """Deserializes the Ai21LabsConfig from a dictionary."""
@@ -76,6 +81,15 @@ def as_dict(self) -> dict:
         if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AiGatewayConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.guardrails: body['guardrails'] = self.guardrails
+        if self.inference_table_config: body['inference_table_config'] = self.inference_table_config
+        if self.rate_limits: body['rate_limits'] = self.rate_limits
+        if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayConfig:
         """Deserializes the AiGatewayConfig from a dictionary."""
@@ -111,6 +125,15 @@ def as_dict(self) -> dict:
         if self.valid_topics: body['valid_topics'] = [v for v in self.valid_topics]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AiGatewayGuardrailParameters into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.invalid_keywords: body['invalid_keywords'] = self.invalid_keywords
+        if self.pii: body['pii'] = self.pii
+        if self.safety is not None: body['safety'] = self.safety
+        if self.valid_topics: body['valid_topics'] = self.valid_topics
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrailParameters:
         """Deserializes the AiGatewayGuardrailParameters from a dictionary."""
@@ -122,11 +145,8 @@ def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrailParameters:
 
 @dataclass
 class AiGatewayGuardrailPiiBehavior:
-    behavior: AiGatewayGuardrailPiiBehaviorBehavior
-    """Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input
-    guardrail and the request contains PII, the request is not sent to the model server and 400
-    status code is returned; if 'BLOCK' is set for the output guardrail and the model response
-    contains PII, the PII info in the response is redacted and 400 status code is returned."""
+    behavior: Optional[AiGatewayGuardrailPiiBehaviorBehavior] = None
+    """Configuration for input guardrail filters."""
 
     def as_dict(self) -> dict:
         """Serializes the AiGatewayGuardrailPiiBehavior into a dictionary suitable for use as a JSON request body."""
@@ -134,6 +154,12 @@ def as_dict(self) -> dict:
         if self.behavior is not None: body['behavior'] = self.behavior.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AiGatewayGuardrailPiiBehavior into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.behavior is not None: body['behavior'] = self.behavior
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrailPiiBehavior:
         """Deserializes the AiGatewayGuardrailPiiBehavior from a dictionary."""
@@ -141,10 +167,6 @@ def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrailPiiBehavior:
 
 
 class AiGatewayGuardrailPiiBehaviorBehavior(Enum):
-    """Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input
-    guardrail and the request contains PII, the request is not sent to the model server and 400
-    status code is returned; if 'BLOCK' is set for the output guardrail and the model response
-    contains PII, the PII info in the response is redacted and 400 status code is returned."""
 
     BLOCK = 'BLOCK'
     NONE = 'NONE'
@@ -165,6 +187,13 @@ def as_dict(self) -> dict:
         if self.output: body['output'] = self.output.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AiGatewayGuardrails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.input: body['input'] = self.input
+        if self.output: body['output'] = self.output
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayGuardrails:
         """Deserializes the AiGatewayGuardrails from a dictionary."""
@@ -198,6 +227,15 @@ def as_dict(self) -> dict:
         if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AiGatewayInferenceTableConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayInferenceTableConfig:
         """Deserializes the AiGatewayInferenceTableConfig from a dictionary."""
@@ -227,6 +265,14 @@ def as_dict(self) -> dict:
         if self.renewal_period is not None: body['renewal_period'] = self.renewal_period.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AiGatewayRateLimit into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.calls is not None: body['calls'] = self.calls
+        if self.key is not None: body['key'] = self.key
+        if self.renewal_period is not None: body['renewal_period'] = self.renewal_period
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayRateLimit:
         """Deserializes the AiGatewayRateLimit from a dictionary."""
@@ -236,15 +282,12 @@ def from_dict(cls, d: Dict[str, any]) -> AiGatewayRateLimit:
 
 
 class AiGatewayRateLimitKey(Enum):
-    """Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint'
-    being the default if not specified."""
 
     ENDPOINT = 'endpoint'
     USER = 'user'
 
 
 class AiGatewayRateLimitRenewalPeriod(Enum):
-    """Renewal period field for a rate limit. Currently, only 'minute' is supported."""
 
     MINUTE = 'minute'
 
@@ -260,6 +303,12 @@ def as_dict(self) -> dict:
         if self.enabled is not None: body['enabled'] = self.enabled
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AiGatewayUsageTrackingConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enabled is not None: body['enabled'] = self.enabled
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AiGatewayUsageTrackingConfig:
         """Deserializes the AiGatewayUsageTrackingConfig from a dictionary."""
@@ -277,9 +326,9 @@ class AmazonBedrockConfig:
 
     aws_access_key_id: Optional[str] = None
     """The Databricks secret key reference for an AWS access key ID with permissions to interact with
-    Bedrock services. If you prefer to paste your API key directly, see `aws_access_key_id`. You
-    must provide an API key using one of the following fields: `aws_access_key_id` or
-    `aws_access_key_id_plaintext`."""
+    Bedrock services. If you prefer to paste your API key directly, see
+    `aws_access_key_id_plaintext`. You must provide an API key using one of the following fields:
+    `aws_access_key_id` or `aws_access_key_id_plaintext`."""
 
     aws_access_key_id_plaintext: Optional[str] = None
     """An AWS access key ID with permissions to interact with Bedrock services provided as a plaintext
@@ -312,6 +361,19 @@ def as_dict(self) -> dict:
         if self.bedrock_provider is not None: body['bedrock_provider'] = self.bedrock_provider.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AmazonBedrockConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_access_key_id is not None: body['aws_access_key_id'] = self.aws_access_key_id
+        if self.aws_access_key_id_plaintext is not None:
+            body['aws_access_key_id_plaintext'] = self.aws_access_key_id_plaintext
+        if self.aws_region is not None: body['aws_region'] = self.aws_region
+        if self.aws_secret_access_key is not None: body['aws_secret_access_key'] = self.aws_secret_access_key
+        if self.aws_secret_access_key_plaintext is not None:
+            body['aws_secret_access_key_plaintext'] = self.aws_secret_access_key_plaintext
+        if self.bedrock_provider is not None: body['bedrock_provider'] = self.bedrock_provider
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AmazonBedrockConfig:
         """Deserializes the AmazonBedrockConfig from a dictionary."""
@@ -324,8 +386,6 @@ def from_dict(cls, d: Dict[str, any]) -> AmazonBedrockConfig:
 
 
 class AmazonBedrockConfigBedrockProvider(Enum):
-    """The underlying provider in Amazon Bedrock. Supported values (case insensitive) include:
-    Anthropic, Cohere, AI21Labs, Amazon."""
 
     AI21LABS = 'ai21labs'
     AMAZON = 'amazon'
@@ -353,6 +413,14 @@ def as_dict(self) -> dict:
             body['anthropic_api_key_plaintext'] = self.anthropic_api_key_plaintext
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AnthropicConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.anthropic_api_key is not None: body['anthropic_api_key'] = self.anthropic_api_key
+        if self.anthropic_api_key_plaintext is not None:
+            body['anthropic_api_key_plaintext'] = self.anthropic_api_key_plaintext
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AnthropicConfig:
         """Deserializes the AnthropicConfig from a dictionary."""
@@ -386,6 +454,15 @@ def as_dict(self) -> dict:
         if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AutoCaptureConfigInput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AutoCaptureConfigInput:
         """Deserializes the AutoCaptureConfigInput from a dictionary."""
@@ -398,18 +475,21 @@ def from_dict(cls, d: Dict[str, any]) -> AutoCaptureConfigInput:
 @dataclass
 class AutoCaptureConfigOutput:
     catalog_name: Optional[str] = None
-    """The name of the catalog in Unity Catalog."""
+    """The name of the catalog in Unity Catalog. NOTE: On update, you cannot change the catalog name if
+    the inference table is already enabled."""
 
     enabled: Optional[bool] = None
     """Indicates whether the inference table is enabled."""
 
     schema_name: Optional[str] = None
-    """The name of the schema in Unity Catalog."""
+    """The name of the schema in Unity Catalog. NOTE: On update, you cannot change the schema name if
+    the inference table is already enabled."""
 
     state: Optional[AutoCaptureState] = None
 
     table_name_prefix: Optional[str] = None
-    """The prefix of the table in Unity Catalog."""
+    """The prefix of the table in Unity Catalog. NOTE: On update, you cannot change the prefix name if
+    the inference table is already enabled."""
 
     def as_dict(self) -> dict:
         """Serializes the AutoCaptureConfigOutput into a dictionary suitable for use as a JSON request body."""
@@ -421,6 +501,16 @@ def as_dict(self) -> dict:
         if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AutoCaptureConfigOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.schema_name is not None: body['schema_name'] = self.schema_name
+        if self.state: body['state'] = self.state
+        if self.table_name_prefix is not None: body['table_name_prefix'] = self.table_name_prefix
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AutoCaptureConfigOutput:
         """Deserializes the AutoCaptureConfigOutput from a dictionary."""
@@ -441,6 +531,12 @@ def as_dict(self) -> dict:
         if self.payload_table: body['payload_table'] = self.payload_table.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AutoCaptureState into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.payload_table: body['payload_table'] = self.payload_table
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AutoCaptureState:
         """Deserializes the AutoCaptureState from a dictionary."""
@@ -458,6 +554,12 @@ def as_dict(self) -> dict:
         if self.logs is not None: body['logs'] = self.logs
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BuildLogsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.logs is not None: body['logs'] = self.logs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BuildLogsResponse:
         """Deserializes the BuildLogsResponse from a dictionary."""
@@ -479,6 +581,13 @@ def as_dict(self) -> dict:
         if self.role is not None: body['role'] = self.role.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ChatMessage into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.content is not None: body['content'] = self.content
+        if self.role is not None: body['role'] = self.role
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ChatMessage:
         """Deserializes the ChatMessage from a dictionary."""
@@ -518,6 +627,15 @@ def as_dict(self) -> dict:
             body['cohere_api_key_plaintext'] = self.cohere_api_key_plaintext
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CohereConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cohere_api_base is not None: body['cohere_api_base'] = self.cohere_api_base
+        if self.cohere_api_key is not None: body['cohere_api_key'] = self.cohere_api_key
+        if self.cohere_api_key_plaintext is not None:
+            body['cohere_api_key_plaintext'] = self.cohere_api_key_plaintext
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CohereConfig:
         """Deserializes the CohereConfig from a dictionary."""
@@ -532,7 +650,11 @@ class CreateServingEndpoint:
     """The name of the serving endpoint. This field is required and must be unique across a Databricks
     workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores."""
 
-    config: EndpointCoreConfigInput
+    ai_gateway: Optional[AiGatewayConfig] = None
+    """The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned
+    throughput endpoints are currently supported."""
+
+    config: Optional[EndpointCoreConfigInput] = None
     """The core config of the serving endpoint."""
 
     ai_gateway: Optional[AiGatewayConfig] = None
@@ -560,6 +682,17 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateServingEndpoint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ai_gateway: body['ai_gateway'] = self.ai_gateway
+        if self.config: body['config'] = self.config
+        if self.name is not None: body['name'] = self.name
+        if self.rate_limits: body['rate_limits'] = self.rate_limits
+        if self.route_optimized is not None: body['route_optimized'] = self.route_optimized
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateServingEndpoint:
         """Deserializes the CreateServingEndpoint from a dictionary."""
@@ -571,6 +704,37 @@ def from_dict(cls, d: Dict[str, any]) -> CreateServingEndpoint:
                    tags=_repeated_dict(d, 'tags', EndpointTag))
 
 
+@dataclass
+class DataPlaneInfo:
+    """Details necessary to query this object's API through the DataPlane APIs."""
+
+    authorization_details: Optional[str] = None
+    """Authorization details as a string."""
+
+    endpoint_url: Optional[str] = None
+    """The URL of the endpoint for this operation in the dataplane."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DataPlaneInfo into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.authorization_details is not None: body['authorization_details'] = self.authorization_details
+        if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DataPlaneInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.authorization_details is not None: body['authorization_details'] = self.authorization_details
+        if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DataPlaneInfo:
+        """Deserializes the DataPlaneInfo from a dictionary."""
+        return cls(authorization_details=d.get('authorization_details', None),
+                   endpoint_url=d.get('endpoint_url', None))
+
+
 @dataclass
 class DatabricksModelServingConfig:
     databricks_workspace_url: str
@@ -601,6 +765,16 @@ def as_dict(self) -> dict:
             body['databricks_workspace_url'] = self.databricks_workspace_url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DatabricksModelServingConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.databricks_api_token is not None: body['databricks_api_token'] = self.databricks_api_token
+        if self.databricks_api_token_plaintext is not None:
+            body['databricks_api_token_plaintext'] = self.databricks_api_token_plaintext
+        if self.databricks_workspace_url is not None:
+            body['databricks_workspace_url'] = self.databricks_workspace_url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DatabricksModelServingConfig:
         """Deserializes the DatabricksModelServingConfig from a dictionary."""
@@ -625,6 +799,14 @@ def as_dict(self) -> dict:
         if self.index: body['index'] = [v for v in self.index]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DataframeSplitInput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.columns: body['columns'] = self.columns
+        if self.data: body['data'] = self.data
+        if self.index: body['index'] = self.index
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DataframeSplitInput:
         """Deserializes the DataframeSplitInput from a dictionary."""
@@ -639,6 +821,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -663,6 +850,14 @@ def as_dict(self) -> dict:
         if self.object is not None: body['object'] = self.object.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EmbeddingsV1ResponseEmbeddingElement into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.embedding: body['embedding'] = self.embedding
+        if self.index is not None: body['index'] = self.index
+        if self.object is not None: body['object'] = self.object
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EmbeddingsV1ResponseEmbeddingElement:
         """Deserializes the EmbeddingsV1ResponseEmbeddingElement from a dictionary."""
@@ -681,21 +876,22 @@ class EmbeddingsV1ResponseEmbeddingElementObject(Enum):
 class EndpointCoreConfigInput:
     auto_capture_config: Optional[AutoCaptureConfigInput] = None
     """Configuration for Inference Tables which automatically logs requests and responses to Unity
-    Catalog."""
+    Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or
+    updating existing provisioned throughput endpoints that never have inference table configured;
+    in these cases please use AI Gateway to manage inference tables."""
 
     name: Optional[str] = None
     """The name of the serving endpoint to update. This field is required."""
 
     served_entities: Optional[List[ServedEntityInput]] = None
-    """A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served
-    entities."""
+    """The list of served entities under the serving endpoint config."""
 
     served_models: Optional[List[ServedModelInput]] = None
-    """(Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A
-    serving endpoint can have up to 15 served models."""
+    """(Deprecated, use served_entities instead) The list of served models under the serving endpoint
+    config."""
 
     traffic_config: Optional[TrafficConfig] = None
-    """The traffic config defining how invocations to the serving endpoint should be routed."""
+    """The traffic configuration associated with the serving endpoint config."""
 
     def as_dict(self) -> dict:
         """Serializes the EndpointCoreConfigInput into a dictionary suitable for use as a JSON request body."""
@@ -707,6 +903,16 @@ def as_dict(self) -> dict:
         if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointCoreConfigInput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config
+        if self.name is not None: body['name'] = self.name
+        if self.served_entities: body['served_entities'] = self.served_entities
+        if self.served_models: body['served_models'] = self.served_models
+        if self.traffic_config: body['traffic_config'] = self.traffic_config
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigInput:
         """Deserializes the EndpointCoreConfigInput from a dictionary."""
@@ -721,7 +927,9 @@ def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigInput:
 class EndpointCoreConfigOutput:
     auto_capture_config: Optional[AutoCaptureConfigOutput] = None
     """Configuration for Inference Tables which automatically logs requests and responses to Unity
-    Catalog."""
+    Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or
+    updating existing provisioned throughput endpoints that never have inference table configured;
+    in these cases please use AI Gateway to manage inference tables."""
 
     config_version: Optional[int] = None
     """The config version that the serving endpoint is currently serving."""
@@ -746,6 +954,16 @@ def as_dict(self) -> dict:
         if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointCoreConfigOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config
+        if self.config_version is not None: body['config_version'] = self.config_version
+        if self.served_entities: body['served_entities'] = self.served_entities
+        if self.served_models: body['served_models'] = self.served_models
+        if self.traffic_config: body['traffic_config'] = self.traffic_config
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigOutput:
         """Deserializes the EndpointCoreConfigOutput from a dictionary."""
@@ -772,6 +990,13 @@ def as_dict(self) -> dict:
         if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointCoreConfigSummary into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.served_entities: body['served_entities'] = self.served_entities
+        if self.served_models: body['served_models'] = self.served_models
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigSummary:
         """Deserializes the EndpointCoreConfigSummary from a dictionary."""
@@ -783,7 +1008,9 @@ def from_dict(cls, d: Dict[str, any]) -> EndpointCoreConfigSummary:
 class EndpointPendingConfig:
     auto_capture_config: Optional[AutoCaptureConfigOutput] = None
     """Configuration for Inference Tables which automatically logs requests and responses to Unity
-    Catalog."""
+    Catalog. Note: this field is deprecated for creating new provisioned throughput endpoints, or
+    updating existing provisioned throughput endpoints that never have inference table configured;
+    in these cases please use AI Gateway to manage inference tables."""
 
     config_version: Optional[int] = None
     """The config version that the serving endpoint is currently serving."""
@@ -812,6 +1039,17 @@ def as_dict(self) -> dict:
         if self.traffic_config: body['traffic_config'] = self.traffic_config.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointPendingConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config
+        if self.config_version is not None: body['config_version'] = self.config_version
+        if self.served_entities: body['served_entities'] = self.served_entities
+        if self.served_models: body['served_models'] = self.served_models
+        if self.start_time is not None: body['start_time'] = self.start_time
+        if self.traffic_config: body['traffic_config'] = self.traffic_config
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointPendingConfig:
         """Deserializes the EndpointPendingConfig from a dictionary."""
@@ -843,6 +1081,13 @@ def as_dict(self) -> dict:
         if self.ready is not None: body['ready'] = self.ready.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointState into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.config_update is not None: body['config_update'] = self.config_update
+        if self.ready is not None: body['ready'] = self.ready
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointState:
         """Deserializes the EndpointState from a dictionary."""
@@ -851,10 +1096,6 @@ def from_dict(cls, d: Dict[str, any]) -> EndpointState:
 
 
 class EndpointStateConfigUpdate(Enum):
-    """The state of an endpoint's config update. This informs the user if the pending_config is in
-    progress, if the update failed, or if there is no update in progress. Note that if the
-    endpoint's config_update state value is IN_PROGRESS, another update can not be made until the
-    update completes or fails."""
 
     IN_PROGRESS = 'IN_PROGRESS'
     NOT_UPDATING = 'NOT_UPDATING'
@@ -863,9 +1104,6 @@ class EndpointStateConfigUpdate(Enum):
 
 
 class EndpointStateReady(Enum):
-    """The state of an endpoint, indicating whether or not the endpoint is queryable. An endpoint is
-    READY if all of the served entities in its active configuration are ready. If any of the
-    actively served entities are in a non-ready state, the endpoint state will be NOT_READY."""
 
     NOT_READY = 'NOT_READY'
     READY = 'READY'
@@ -886,12 +1124,41 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointTag into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointTag:
         """Deserializes the EndpointTag from a dictionary."""
         return cls(key=d.get('key', None), value=d.get('value', None))
 
 
+@dataclass
+class EndpointTags:
+    tags: Optional[List[EndpointTag]] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the EndpointTags into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.tags: body['tags'] = [v.as_dict() for v in self.tags]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointTags into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.tags: body['tags'] = self.tags
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EndpointTags:
+        """Deserializes the EndpointTags from a dictionary."""
+        return cls(tags=_repeated_dict(d, 'tags', EndpointTag))
+
+
 @dataclass
 class ExportMetricsResponse:
     contents: Optional[BinaryIO] = None
@@ -902,18 +1169,89 @@ def as_dict(self) -> dict:
         if self.contents: body['contents'] = self.contents
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExportMetricsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.contents: body['contents'] = self.contents
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExportMetricsResponse:
         """Deserializes the ExportMetricsResponse from a dictionary."""
         return cls(contents=d.get('contents', None))
 
 
+@dataclass
+class ExternalFunctionRequest:
+    """Simple Proto message for testing"""
+
+    connection_name: str
+    """The connection name to use. This is required to identify the external connection."""
+
+    method: ExternalFunctionRequestHttpMethod
+    """The HTTP method to use (e.g., 'GET', 'POST')."""
+
+    path: str
+    """The relative path for the API endpoint. This is required."""
+
+    headers: Optional[str] = None
+    """Additional headers for the request. If not provided, only auth headers from connections would be
+    passed."""
+
+    json: Optional[str] = None
+    """The JSON payload to send in the request body."""
+
+    params: Optional[str] = None
+    """Query parameters for the request."""
+
+    def as_dict(self) -> dict:
+        """Serializes the ExternalFunctionRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.connection_name is not None: body['connection_name'] = self.connection_name
+        if self.headers is not None: body['headers'] = self.headers
+        if self.json is not None: body['json'] = self.json
+        if self.method is not None: body['method'] = self.method.value
+        if self.params is not None: body['params'] = self.params
+        if self.path is not None: body['path'] = self.path
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExternalFunctionRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.connection_name is not None: body['connection_name'] = self.connection_name
+        if self.headers is not None: body['headers'] = self.headers
+        if self.json is not None: body['json'] = self.json
+        if self.method is not None: body['method'] = self.method
+        if self.params is not None: body['params'] = self.params
+        if self.path is not None: body['path'] = self.path
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ExternalFunctionRequest:
+        """Deserializes the ExternalFunctionRequest from a dictionary."""
+        return cls(connection_name=d.get('connection_name', None),
+                   headers=d.get('headers', None),
+                   json=d.get('json', None),
+                   method=_enum(d, 'method', ExternalFunctionRequestHttpMethod),
+                   params=d.get('params', None),
+                   path=d.get('path', None))
+
+
+class ExternalFunctionRequestHttpMethod(Enum):
+
+    DELETE = 'DELETE'
+    GET = 'GET'
+    PATCH = 'PATCH'
+    POST = 'POST'
+    PUT = 'PUT'
+
+
 @dataclass
 class ExternalModel:
     provider: ExternalModelProvider
     """The name of the provider for the external model. Currently, the supported providers are
     'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving',
-    'google-cloud-vertex-ai', 'openai', and 'palm'.","""
+    'google-cloud-vertex-ai', 'openai', 'palm', and 'custom'."""
 
     name: str
     """The name of the external model."""
@@ -963,6 +1301,24 @@ def as_dict(self) -> dict:
         if self.task is not None: body['task'] = self.task
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExternalModel into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ai21labs_config: body['ai21labs_config'] = self.ai21labs_config
+        if self.amazon_bedrock_config: body['amazon_bedrock_config'] = self.amazon_bedrock_config
+        if self.anthropic_config: body['anthropic_config'] = self.anthropic_config
+        if self.cohere_config: body['cohere_config'] = self.cohere_config
+        if self.databricks_model_serving_config:
+            body['databricks_model_serving_config'] = self.databricks_model_serving_config
+        if self.google_cloud_vertex_ai_config:
+            body['google_cloud_vertex_ai_config'] = self.google_cloud_vertex_ai_config
+        if self.name is not None: body['name'] = self.name
+        if self.openai_config: body['openai_config'] = self.openai_config
+        if self.palm_config: body['palm_config'] = self.palm_config
+        if self.provider is not None: body['provider'] = self.provider
+        if self.task is not None: body['task'] = self.task
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExternalModel:
         """Deserializes the ExternalModel from a dictionary."""
@@ -982,9 +1338,6 @@ def from_dict(cls, d: Dict[str, any]) -> ExternalModel:
 
 
 class ExternalModelProvider(Enum):
-    """The name of the provider for the external model. Currently, the supported providers are
-    'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving',
-    'google-cloud-vertex-ai', 'openai', and 'palm'.","""
 
     AI21LABS = 'ai21labs'
     AMAZON_BEDROCK = 'amazon-bedrock'
@@ -1015,6 +1368,14 @@ def as_dict(self) -> dict:
         if self.total_tokens is not None: body['total_tokens'] = self.total_tokens
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExternalModelUsageElement into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.completion_tokens is not None: body['completion_tokens'] = self.completion_tokens
+        if self.prompt_tokens is not None: body['prompt_tokens'] = self.prompt_tokens
+        if self.total_tokens is not None: body['total_tokens'] = self.total_tokens
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExternalModelUsageElement:
         """Deserializes the ExternalModelUsageElement from a dictionary."""
@@ -1025,17 +1386,16 @@ def from_dict(cls, d: Dict[str, any]) -> ExternalModelUsageElement:
 
 @dataclass
 class FoundationModel:
+    """All fields are not sensitive as they are hard-coded in the system and made available to
+    customers."""
+
     description: Optional[str] = None
-    """The description of the foundation model."""
 
     display_name: Optional[str] = None
-    """The display name of the foundation model."""
 
     docs: Optional[str] = None
-    """The URL to the documentation of the foundation model."""
 
     name: Optional[str] = None
-    """The name of the foundation model."""
 
     def as_dict(self) -> dict:
         """Serializes the FoundationModel into a dictionary suitable for use as a JSON request body."""
@@ -1046,6 +1406,15 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FoundationModel into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.docs is not None: body['docs'] = self.docs
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FoundationModel:
         """Deserializes the FoundationModel from a dictionary."""
@@ -1057,18 +1426,24 @@ def from_dict(cls, d: Dict[str, any]) -> FoundationModel:
 
 @dataclass
 class GetOpenApiResponse:
-    """The response is an OpenAPI spec in JSON format that typically includes fields like openapi,
-    info, servers and paths, etc."""
+    contents: Optional[BinaryIO] = None
 
     def as_dict(self) -> dict:
         """Serializes the GetOpenApiResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.contents: body['contents'] = self.contents
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetOpenApiResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.contents: body['contents'] = self.contents
         return body
 
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetOpenApiResponse:
         """Deserializes the GetOpenApiResponse from a dictionary."""
-        return cls()
+        return cls(contents=d.get('contents', None))
 
 
 @dataclass
@@ -1082,6 +1457,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetServingEndpointPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetServingEndpointPermissionLevelsResponse:
         """Deserializes the GetServingEndpointPermissionLevelsResponse from a dictionary."""
@@ -1091,13 +1472,23 @@ def from_dict(cls, d: Dict[str, any]) -> GetServingEndpointPermissionLevelsRespo
 
 @dataclass
 class GoogleCloudVertexAiConfig:
+    project_id: str
+    """This is the Google Cloud project id that the service account is associated with."""
+
+    region: str
+    """This is the region for the Google Cloud Vertex AI Service. See [supported regions] for more
+    details. Some models are only available in specific regions.
+    
+    [supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations"""
+
     private_key: Optional[str] = None
     """The Databricks secret key reference for a private key for the service account which has access
     to the Google Cloud Vertex AI Service. See [Best practices for managing service account keys].
     If you prefer to paste your API key directly, see `private_key_plaintext`. You must provide an
     API key using one of the following fields: `private_key` or `private_key_plaintext`
     
-    [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys"""
+    [Best practices for managing service account keys]:
+    https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys"""
 
     private_key_plaintext: Optional[str] = None
     """The private key for the service account which has access to the Google Cloud Vertex AI Service
@@ -1105,16 +1496,8 @@ class GoogleCloudVertexAiConfig:
     prefer to reference your key using Databricks Secrets, see `private_key`. You must provide an
     API key using one of the following fields: `private_key` or `private_key_plaintext`.
     
-    [Best practices for managing service account keys]: https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys"""
-
-    project_id: Optional[str] = None
-    """This is the Google Cloud project id that the service account is associated with."""
-
-    region: Optional[str] = None
-    """This is the region for the Google Cloud Vertex AI Service. See [supported regions] for more
-    details. Some models are only available in specific regions.
-    
-    [supported regions]: https://cloud.google.com/vertex-ai/docs/general/locations"""
+    [Best practices for managing service account keys]:
+    https://cloud.google.com/iam/docs/best-practices-for-managing-service-account-keys"""
 
     def as_dict(self) -> dict:
         """Serializes the GoogleCloudVertexAiConfig into a dictionary suitable for use as a JSON request body."""
@@ -1125,6 +1508,15 @@ def as_dict(self) -> dict:
         if self.region is not None: body['region'] = self.region
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GoogleCloudVertexAiConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.private_key is not None: body['private_key'] = self.private_key
+        if self.private_key_plaintext is not None: body['private_key_plaintext'] = self.private_key_plaintext
+        if self.project_id is not None: body['project_id'] = self.project_id
+        if self.region is not None: body['region'] = self.region
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GoogleCloudVertexAiConfig:
         """Deserializes the GoogleCloudVertexAiConfig from a dictionary."""
@@ -1134,6 +1526,28 @@ def from_dict(cls, d: Dict[str, any]) -> GoogleCloudVertexAiConfig:
                    region=d.get('region', None))
 
 
+@dataclass
+class HttpRequestResponse:
+    contents: Optional[BinaryIO] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the HttpRequestResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.contents: body['contents'] = self.contents
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the HttpRequestResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.contents: body['contents'] = self.contents
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> HttpRequestResponse:
+        """Deserializes the HttpRequestResponse from a dictionary."""
+        return cls(contents=d.get('contents', None))
+
+
 @dataclass
 class ListEndpointsResponse:
     endpoints: Optional[List[ServingEndpoint]] = None
@@ -1145,6 +1559,12 @@ def as_dict(self) -> dict:
         if self.endpoints: body['endpoints'] = [v.as_dict() for v in self.endpoints]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListEndpointsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.endpoints: body['endpoints'] = self.endpoints
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListEndpointsResponse:
         """Deserializes the ListEndpointsResponse from a dictionary."""
@@ -1153,7 +1573,10 @@ def from_dict(cls, d: Dict[str, any]) -> ListEndpointsResponse:
 
 @dataclass
 class ModelDataPlaneInfo:
-    query_info: Optional[oauth2.DataPlaneInfo] = None
+    """A representation of all DataPlaneInfo for operations that can be done on a model through Data
+    Plane APIs."""
+
+    query_info: Optional[DataPlaneInfo] = None
     """Information required to query DataPlane API 'query' endpoint."""
 
     def as_dict(self) -> dict:
@@ -1162,14 +1585,22 @@ def as_dict(self) -> dict:
         if self.query_info: body['query_info'] = self.query_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ModelDataPlaneInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.query_info: body['query_info'] = self.query_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ModelDataPlaneInfo:
         """Deserializes the ModelDataPlaneInfo from a dictionary."""
-        return cls(query_info=_from_dict(d, 'query_info', oauth2.DataPlaneInfo))
+        return cls(query_info=_from_dict(d, 'query_info', DataPlaneInfo))
 
 
 @dataclass
 class OpenAiConfig:
+    """Configs needed to create an OpenAI model route."""
+
     microsoft_entra_client_id: Optional[str] = None
     """This field is only required for Azure AD OpenAI and is the Microsoft Entra Client ID."""
 
@@ -1243,6 +1674,28 @@ def as_dict(self) -> dict:
         if self.openai_organization is not None: body['openai_organization'] = self.openai_organization
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the OpenAiConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.microsoft_entra_client_id is not None:
+            body['microsoft_entra_client_id'] = self.microsoft_entra_client_id
+        if self.microsoft_entra_client_secret is not None:
+            body['microsoft_entra_client_secret'] = self.microsoft_entra_client_secret
+        if self.microsoft_entra_client_secret_plaintext is not None:
+            body['microsoft_entra_client_secret_plaintext'] = self.microsoft_entra_client_secret_plaintext
+        if self.microsoft_entra_tenant_id is not None:
+            body['microsoft_entra_tenant_id'] = self.microsoft_entra_tenant_id
+        if self.openai_api_base is not None: body['openai_api_base'] = self.openai_api_base
+        if self.openai_api_key is not None: body['openai_api_key'] = self.openai_api_key
+        if self.openai_api_key_plaintext is not None:
+            body['openai_api_key_plaintext'] = self.openai_api_key_plaintext
+        if self.openai_api_type is not None: body['openai_api_type'] = self.openai_api_type
+        if self.openai_api_version is not None: body['openai_api_version'] = self.openai_api_version
+        if self.openai_deployment_name is not None:
+            body['openai_deployment_name'] = self.openai_deployment_name
+        if self.openai_organization is not None: body['openai_organization'] = self.openai_organization
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OpenAiConfig:
         """Deserializes the OpenAiConfig from a dictionary."""
@@ -1280,6 +1733,14 @@ def as_dict(self) -> dict:
             body['palm_api_key_plaintext'] = self.palm_api_key_plaintext
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PaLmConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.palm_api_key is not None: body['palm_api_key'] = self.palm_api_key
+        if self.palm_api_key_plaintext is not None:
+            body['palm_api_key_plaintext'] = self.palm_api_key_plaintext
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PaLmConfig:
         """Deserializes the PaLmConfig from a dictionary."""
@@ -1306,6 +1767,14 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PatchServingEndpointTags into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.add_tags: body['add_tags'] = self.add_tags
+        if self.delete_tags: body['delete_tags'] = self.delete_tags
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PatchServingEndpointTags:
         """Deserializes the PatchServingEndpointTags from a dictionary."""
@@ -1317,13 +1786,10 @@ def from_dict(cls, d: Dict[str, any]) -> PatchServingEndpointTags:
 @dataclass
 class PayloadTable:
     name: Optional[str] = None
-    """The name of the payload table."""
 
     status: Optional[str] = None
-    """The status of the payload table."""
 
     status_message: Optional[str] = None
-    """The status message of the payload table."""
 
     def as_dict(self) -> dict:
         """Serializes the PayloadTable into a dictionary suitable for use as a JSON request body."""
@@ -1333,6 +1799,14 @@ def as_dict(self) -> dict:
         if self.status_message is not None: body['status_message'] = self.status_message
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PayloadTable into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.status is not None: body['status'] = self.status
+        if self.status_message is not None: body['status_message'] = self.status_message
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PayloadTable:
         """Deserializes the PayloadTable from a dictionary."""
@@ -1341,6 +1815,57 @@ def from_dict(cls, d: Dict[str, any]) -> PayloadTable:
                    status_message=d.get('status_message', None))
 
 
+@dataclass
+class PutAiGatewayRequest:
+    guardrails: Optional[AiGatewayGuardrails] = None
+    """Configuration for AI Guardrails to prevent unwanted data and unsafe data in requests and
+    responses."""
+
+    inference_table_config: Optional[AiGatewayInferenceTableConfig] = None
+    """Configuration for payload logging using inference tables. Use these tables to monitor and audit
+    data being sent to and received from model APIs and to improve model quality."""
+
+    name: Optional[str] = None
+    """The name of the serving endpoint whose AI Gateway is being updated. This field is required."""
+
+    rate_limits: Optional[List[AiGatewayRateLimit]] = None
+    """Configuration for rate limits which can be set to limit endpoint traffic."""
+
+    usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None
+    """Configuration to enable usage tracking using system tables. These tables allow you to monitor
+    operational usage on endpoints and their associated costs."""
+
+    def as_dict(self) -> dict:
+        """Serializes the PutAiGatewayRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.guardrails: body['guardrails'] = self.guardrails.as_dict()
+        if self.inference_table_config: body['inference_table_config'] = self.inference_table_config.as_dict()
+        if self.name is not None: body['name'] = self.name
+        if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits]
+        if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PutAiGatewayRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.guardrails: body['guardrails'] = self.guardrails
+        if self.inference_table_config: body['inference_table_config'] = self.inference_table_config
+        if self.name is not None: body['name'] = self.name
+        if self.rate_limits: body['rate_limits'] = self.rate_limits
+        if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> PutAiGatewayRequest:
+        """Deserializes the PutAiGatewayRequest from a dictionary."""
+        return cls(guardrails=_from_dict(d, 'guardrails', AiGatewayGuardrails),
+                   inference_table_config=_from_dict(d, 'inference_table_config',
+                                                     AiGatewayInferenceTableConfig),
+                   name=d.get('name', None),
+                   rate_limits=_repeated_dict(d, 'rate_limits', AiGatewayRateLimit),
+                   usage_tracking_config=_from_dict(d, 'usage_tracking_config', AiGatewayUsageTrackingConfig))
+
+
 @dataclass
 class PutAiGatewayResponse:
     guardrails: Optional[AiGatewayGuardrails] = None
@@ -1349,7 +1874,7 @@ class PutAiGatewayResponse:
 
     inference_table_config: Optional[AiGatewayInferenceTableConfig] = None
     """Configuration for payload logging using inference tables. Use these tables to monitor and audit
-    data being sent to and received from model APIs and to improve model quality ."""
+    data being sent to and received from model APIs and to improve model quality."""
 
     rate_limits: Optional[List[AiGatewayRateLimit]] = None
     """Configuration for rate limits which can be set to limit endpoint traffic."""
@@ -1367,6 +1892,15 @@ def as_dict(self) -> dict:
         if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PutAiGatewayResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.guardrails: body['guardrails'] = self.guardrails
+        if self.inference_table_config: body['inference_table_config'] = self.inference_table_config
+        if self.rate_limits: body['rate_limits'] = self.rate_limits
+        if self.usage_tracking_config: body['usage_tracking_config'] = self.usage_tracking_config
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutAiGatewayResponse:
         """Deserializes the PutAiGatewayResponse from a dictionary."""
@@ -1377,6 +1911,34 @@ def from_dict(cls, d: Dict[str, any]) -> PutAiGatewayResponse:
                    usage_tracking_config=_from_dict(d, 'usage_tracking_config', AiGatewayUsageTrackingConfig))
 
 
+@dataclass
+class PutRequest:
+    name: Optional[str] = None
+    """The name of the serving endpoint whose rate limits are being updated. This field is required."""
+
+    rate_limits: Optional[List[RateLimit]] = None
+    """The list of endpoint rate limits."""
+
+    def as_dict(self) -> dict:
+        """Serializes the PutRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PutRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.rate_limits: body['rate_limits'] = self.rate_limits
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> PutRequest:
+        """Deserializes the PutRequest from a dictionary."""
+        return cls(name=d.get('name', None), rate_limits=_repeated_dict(d, 'rate_limits', RateLimit))
+
+
 @dataclass
 class PutResponse:
     rate_limits: Optional[List[RateLimit]] = None
@@ -1388,6 +1950,12 @@ def as_dict(self) -> dict:
         if self.rate_limits: body['rate_limits'] = [v.as_dict() for v in self.rate_limits]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PutResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.rate_limits: body['rate_limits'] = self.rate_limits
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutResponse:
         """Deserializes the PutResponse from a dictionary."""
@@ -1473,6 +2041,25 @@ def as_dict(self) -> dict:
         if self.temperature is not None: body['temperature'] = self.temperature
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryEndpointInput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dataframe_records: body['dataframe_records'] = self.dataframe_records
+        if self.dataframe_split: body['dataframe_split'] = self.dataframe_split
+        if self.extra_params: body['extra_params'] = self.extra_params
+        if self.input: body['input'] = self.input
+        if self.inputs: body['inputs'] = self.inputs
+        if self.instances: body['instances'] = self.instances
+        if self.max_tokens is not None: body['max_tokens'] = self.max_tokens
+        if self.messages: body['messages'] = self.messages
+        if self.n is not None: body['n'] = self.n
+        if self.name is not None: body['name'] = self.name
+        if self.prompt: body['prompt'] = self.prompt
+        if self.stop: body['stop'] = self.stop
+        if self.stream is not None: body['stream'] = self.stream
+        if self.temperature is not None: body['temperature'] = self.temperature
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryEndpointInput:
         """Deserializes the QueryEndpointInput from a dictionary."""
@@ -1532,15 +2119,29 @@ class QueryEndpointResponse:
     def as_dict(self) -> dict:
         """Serializes the QueryEndpointResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.choices: body['choices'] = [v.as_dict() for v in self.choices]
+        if self.choices: body['choices'] = [v.as_dict() for v in self.choices]
+        if self.created is not None: body['created'] = self.created
+        if self.data: body['data'] = [v.as_dict() for v in self.data]
+        if self.id is not None: body['id'] = self.id
+        if self.model is not None: body['model'] = self.model
+        if self.object is not None: body['object'] = self.object.value
+        if self.predictions: body['predictions'] = [v for v in self.predictions]
+        if self.served_model_name is not None: body['served-model-name'] = self.served_model_name
+        if self.usage: body['usage'] = self.usage.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryEndpointResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.choices: body['choices'] = self.choices
         if self.created is not None: body['created'] = self.created
-        if self.data: body['data'] = [v.as_dict() for v in self.data]
+        if self.data: body['data'] = self.data
         if self.id is not None: body['id'] = self.id
         if self.model is not None: body['model'] = self.model
-        if self.object is not None: body['object'] = self.object.value
-        if self.predictions: body['predictions'] = [v for v in self.predictions]
+        if self.object is not None: body['object'] = self.object
+        if self.predictions: body['predictions'] = self.predictions
         if self.served_model_name is not None: body['served-model-name'] = self.served_model_name
-        if self.usage: body['usage'] = self.usage.as_dict()
+        if self.usage: body['usage'] = self.usage
         return body
 
     @classmethod
@@ -1586,6 +2187,14 @@ def as_dict(self) -> dict:
         if self.renewal_period is not None: body['renewal_period'] = self.renewal_period.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RateLimit into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.calls is not None: body['calls'] = self.calls
+        if self.key is not None: body['key'] = self.key
+        if self.renewal_period is not None: body['renewal_period'] = self.renewal_period
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RateLimit:
         """Deserializes the RateLimit from a dictionary."""
@@ -1595,15 +2204,12 @@ def from_dict(cls, d: Dict[str, any]) -> RateLimit:
 
 
 class RateLimitKey(Enum):
-    """Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are
-    supported, with 'endpoint' being the default if not specified."""
 
     ENDPOINT = 'endpoint'
     USER = 'user'
 
 
 class RateLimitRenewalPeriod(Enum):
-    """Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported."""
 
     MINUTE = 'minute'
 
@@ -1624,6 +2230,13 @@ def as_dict(self) -> dict:
         if self.traffic_percentage is not None: body['traffic_percentage'] = self.traffic_percentage
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Route into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.served_model_name is not None: body['served_model_name'] = self.served_model_name
+        if self.traffic_percentage is not None: body['traffic_percentage'] = self.traffic_percentage
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Route:
         """Deserializes the Route from a dictionary."""
@@ -1637,11 +2250,9 @@ class ServedEntityInput:
     """The name of the entity to be served. The entity may be a model in the Databricks Model Registry,
     a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC
     object, the full name of the object should be given in the form of
-    __catalog_name__.__schema_name__.__model_name__."""
+    **catalog_name.schema_name.model_name**."""
 
     entity_version: Optional[str] = None
-    """The version of the model in Databricks Model Registry to be served or empty if the entity is a
-    FEATURE_SPEC."""
 
     environment_vars: Optional[Dict[str, str]] = None
     """An object containing a set of optional, user-specified environment variable key-value pairs used
@@ -1670,7 +2281,7 @@ class ServedEntityInput:
     """The name of a served entity. It must be unique across an endpoint. A served entity name can
     consist of alphanumeric characters, dashes, and underscores. If not specified for an external
     model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if
-    not specified for other entities, it defaults to -."""
+    not specified for other entities, it defaults to entity_name-entity_version."""
 
     scale_to_zero_enabled: Optional[bool] = None
     """Whether the compute resources for the served entity should scale down to zero."""
@@ -1683,13 +2294,13 @@ class ServedEntityInput:
     scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size
     is 0."""
 
-    workload_type: Optional[str] = None
+    workload_type: Optional[ServingModelWorkloadType] = None
     """The workload type of the served entity. The workload type selects which type of compute to use
     in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU
     acceleration is available by selecting workload types like GPU_SMALL and others. See the
     available [GPU types].
     
-    [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types"""
+    [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types"""
 
     def as_dict(self) -> dict:
         """Serializes the ServedEntityInput into a dictionary suitable for use as a JSON request body."""
@@ -1706,6 +2317,24 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
         if self.workload_size is not None: body['workload_size'] = self.workload_size
+        if self.workload_type is not None: body['workload_type'] = self.workload_type.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServedEntityInput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.entity_name is not None: body['entity_name'] = self.entity_name
+        if self.entity_version is not None: body['entity_version'] = self.entity_version
+        if self.environment_vars: body['environment_vars'] = self.environment_vars
+        if self.external_model: body['external_model'] = self.external_model
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.max_provisioned_throughput is not None:
+            body['max_provisioned_throughput'] = self.max_provisioned_throughput
+        if self.min_provisioned_throughput is not None:
+            body['min_provisioned_throughput'] = self.min_provisioned_throughput
+        if self.name is not None: body['name'] = self.name
+        if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
+        if self.workload_size is not None: body['workload_size'] = self.workload_size
         if self.workload_type is not None: body['workload_type'] = self.workload_type
         return body
 
@@ -1722,26 +2351,22 @@ def from_dict(cls, d: Dict[str, any]) -> ServedEntityInput:
                    name=d.get('name', None),
                    scale_to_zero_enabled=d.get('scale_to_zero_enabled', None),
                    workload_size=d.get('workload_size', None),
-                   workload_type=d.get('workload_type', None))
+                   workload_type=_enum(d, 'workload_type', ServingModelWorkloadType))
 
 
 @dataclass
 class ServedEntityOutput:
     creation_timestamp: Optional[int] = None
-    """The creation timestamp of the served entity in Unix time."""
 
     creator: Optional[str] = None
-    """The email of the user who created the served entity."""
 
     entity_name: Optional[str] = None
-    """The name of the entity served. The entity may be a model in the Databricks Model Registry, a
-    model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC
-    object, the full name of the object is given in the form of
-    __catalog_name__.__schema_name__.__model_name__."""
+    """The name of the entity to be served. The entity may be a model in the Databricks Model Registry,
+    a model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC
+    object, the full name of the object should be given in the form of
+    **catalog_name.schema_name.model_name**."""
 
     entity_version: Optional[str] = None
-    """The version of the served entity in Databricks Model Registry or empty if the entity is a
-    FEATURE_SPEC."""
 
     environment_vars: Optional[Dict[str, str]] = None
     """An object containing a set of optional, user-specified environment variable key-value pairs used
@@ -1750,14 +2375,16 @@ class ServedEntityOutput:
     "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`"""
 
     external_model: Optional[ExternalModel] = None
-    """The external model that is served. NOTE: Only one of external_model, foundation_model, and
-    (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) is
-    returned based on the endpoint type."""
+    """The external model to be served. NOTE: Only one of external_model and (entity_name,
+    entity_version, workload_size, workload_type, and scale_to_zero_enabled) can be specified with
+    the latter set being used for custom model serving for a Databricks registered model. For an
+    existing endpoint with external_model, it cannot be updated to an endpoint without
+    external_model. If the endpoint is created without external_model, users cannot update it to add
+    external_model later. The task type of all external models within an endpoint must be the same."""
 
     foundation_model: Optional[FoundationModel] = None
-    """The foundation model that is served. NOTE: Only one of foundation_model, external_model, and
-    (entity_name, entity_version, workload_size, workload_type, and scale_to_zero_enabled) is
-    returned based on the endpoint type."""
+    """All fields are not sensitive as they are hard-coded in the system and made available to
+    customers."""
 
     instance_profile_arn: Optional[str] = None
     """ARN of the instance profile that the served entity uses to access AWS resources."""
@@ -1769,13 +2396,15 @@ class ServedEntityOutput:
     """The minimum tokens per second that the endpoint can scale down to."""
 
     name: Optional[str] = None
-    """The name of the served entity."""
+    """The name of a served entity. It must be unique across an endpoint. A served entity name can
+    consist of alphanumeric characters, dashes, and underscores. If not specified for an external
+    model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if
+    not specified for other entities, it defaults to entity_name-entity_version."""
 
     scale_to_zero_enabled: Optional[bool] = None
     """Whether the compute resources for the served entity should scale down to zero."""
 
     state: Optional[ServedModelState] = None
-    """Information corresponding to the state of the served entity."""
 
     workload_size: Optional[str] = None
     """The workload size of the served entity. The workload size corresponds to a range of provisioned
@@ -1783,15 +2412,15 @@ class ServedEntityOutput:
     process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency),
     "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If
     scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size
-    will be 0."""
+    is 0."""
 
-    workload_type: Optional[str] = None
+    workload_type: Optional[ServingModelWorkloadType] = None
     """The workload type of the served entity. The workload type selects which type of compute to use
     in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU
     acceleration is available by selecting workload types like GPU_SMALL and others. See the
     available [GPU types].
     
-    [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types"""
+    [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types"""
 
     def as_dict(self) -> dict:
         """Serializes the ServedEntityOutput into a dictionary suitable for use as a JSON request body."""
@@ -1812,6 +2441,28 @@ def as_dict(self) -> dict:
         if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
         if self.state: body['state'] = self.state.as_dict()
         if self.workload_size is not None: body['workload_size'] = self.workload_size
+        if self.workload_type is not None: body['workload_type'] = self.workload_type.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServedEntityOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.creator is not None: body['creator'] = self.creator
+        if self.entity_name is not None: body['entity_name'] = self.entity_name
+        if self.entity_version is not None: body['entity_version'] = self.entity_version
+        if self.environment_vars: body['environment_vars'] = self.environment_vars
+        if self.external_model: body['external_model'] = self.external_model
+        if self.foundation_model: body['foundation_model'] = self.foundation_model
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.max_provisioned_throughput is not None:
+            body['max_provisioned_throughput'] = self.max_provisioned_throughput
+        if self.min_provisioned_throughput is not None:
+            body['min_provisioned_throughput'] = self.min_provisioned_throughput
+        if self.name is not None: body['name'] = self.name
+        if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
+        if self.state: body['state'] = self.state
+        if self.workload_size is not None: body['workload_size'] = self.workload_size
         if self.workload_type is not None: body['workload_type'] = self.workload_type
         return body
 
@@ -1832,31 +2483,22 @@ def from_dict(cls, d: Dict[str, any]) -> ServedEntityOutput:
                    scale_to_zero_enabled=d.get('scale_to_zero_enabled', None),
                    state=_from_dict(d, 'state', ServedModelState),
                    workload_size=d.get('workload_size', None),
-                   workload_type=d.get('workload_type', None))
+                   workload_type=_enum(d, 'workload_type', ServingModelWorkloadType))
 
 
 @dataclass
 class ServedEntitySpec:
     entity_name: Optional[str] = None
-    """The name of the entity served. The entity may be a model in the Databricks Model Registry, a
-    model in the Unity Catalog (UC), or a function of type FEATURE_SPEC in the UC. If it is a UC
-    object, the full name of the object is given in the form of
-    __catalog_name__.__schema_name__.__model_name__."""
 
     entity_version: Optional[str] = None
-    """The version of the served entity in Databricks Model Registry or empty if the entity is a
-    FEATURE_SPEC."""
 
     external_model: Optional[ExternalModel] = None
-    """The external model that is served. NOTE: Only one of external_model, foundation_model, and
-    (entity_name, entity_version) is returned based on the endpoint type."""
 
     foundation_model: Optional[FoundationModel] = None
-    """The foundation model that is served. NOTE: Only one of foundation_model, external_model, and
-    (entity_name, entity_version) is returned based on the endpoint type."""
+    """All fields are not sensitive as they are hard-coded in the system and made available to
+    customers."""
 
     name: Optional[str] = None
-    """The name of the served entity."""
 
     def as_dict(self) -> dict:
         """Serializes the ServedEntitySpec into a dictionary suitable for use as a JSON request body."""
@@ -1868,6 +2510,16 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServedEntitySpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.entity_name is not None: body['entity_name'] = self.entity_name
+        if self.entity_version is not None: body['entity_version'] = self.entity_version
+        if self.external_model: body['external_model'] = self.external_model
+        if self.foundation_model: body['foundation_model'] = self.foundation_model
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServedEntitySpec:
         """Deserializes the ServedEntitySpec from a dictionary."""
@@ -1880,24 +2532,27 @@ def from_dict(cls, d: Dict[str, any]) -> ServedEntitySpec:
 
 @dataclass
 class ServedModelInput:
+    scale_to_zero_enabled: bool
+    """Whether the compute resources for the served entity should scale down to zero."""
+
     model_name: str
-    """The name of the model in Databricks Model Registry to be served or if the model resides in Unity
-    Catalog, the full name of model, in the form of __catalog_name__.__schema_name__.__model_name__."""
 
     model_version: str
-    """The version of the model in Databricks Model Registry or Unity Catalog to be served."""
-
-    scale_to_zero_enabled: bool
-    """Whether the compute resources for the served model should scale down to zero."""
 
     environment_vars: Optional[Dict[str, str]] = None
     """An object containing a set of optional, user-specified environment variable key-value pairs used
-    for serving this model. Note: this is an experimental feature and subject to change. Example
-    model environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY":
+    for serving this entity. Note: this is an experimental feature and subject to change. Example
+    entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY":
     "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`"""
 
     instance_profile_arn: Optional[str] = None
-    """ARN of the instance profile that the served model will use to access AWS resources."""
+    """ARN of the instance profile that the served entity uses to access AWS resources."""
+
+    max_provisioned_throughput: Optional[int] = None
+    """The maximum tokens per second that the endpoint can scale up to."""
+
+    min_provisioned_throughput: Optional[int] = None
+    """The minimum tokens per second that the endpoint can scale down to."""
 
     max_provisioned_throughput: Optional[int] = None
     """The maximum tokens per second that the endpoint can scale up to."""
@@ -1906,9 +2561,18 @@ class ServedModelInput:
     """The minimum tokens per second that the endpoint can scale down to."""
 
     name: Optional[str] = None
-    """The name of a served model. It must be unique across an endpoint. If not specified, this field
-    will default to -. A served model name can consist of alphanumeric
-    characters, dashes, and underscores."""
+    """The name of a served entity. It must be unique across an endpoint. A served entity name can
+    consist of alphanumeric characters, dashes, and underscores. If not specified for an external
+    model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if
+    not specified for other entities, it defaults to entity_name-entity_version."""
+
+    workload_size: Optional[ServedModelInputWorkloadSize] = None
+    """The workload size of the served entity. The workload size corresponds to a range of provisioned
+    concurrency that the compute autoscales between. A single unit of provisioned concurrency can
+    process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency),
+    "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If
+    scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size
+    is 0."""
 
     workload_size: Optional[ServedModelInputWorkloadSize] = None
     """The workload size of the served model. The workload size corresponds to a range of provisioned
@@ -1919,12 +2583,12 @@ class ServedModelInput:
     each workload size will be 0."""
 
     workload_type: Optional[ServedModelInputWorkloadType] = None
-    """The workload type of the served model. The workload type selects which type of compute to use in
-    the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU
+    """The workload type of the served entity. The workload type selects which type of compute to use
+    in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU
     acceleration is available by selecting workload types like GPU_SMALL and others. See the
     available [GPU types].
     
-    [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types"""
+    [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types"""
 
     def as_dict(self) -> dict:
         """Serializes the ServedModelInput into a dictionary suitable for use as a JSON request body."""
@@ -1943,6 +2607,23 @@ def as_dict(self) -> dict:
         if self.workload_type is not None: body['workload_type'] = self.workload_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServedModelInput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.environment_vars: body['environment_vars'] = self.environment_vars
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.max_provisioned_throughput is not None:
+            body['max_provisioned_throughput'] = self.max_provisioned_throughput
+        if self.min_provisioned_throughput is not None:
+            body['min_provisioned_throughput'] = self.min_provisioned_throughput
+        if self.model_name is not None: body['model_name'] = self.model_name
+        if self.model_version is not None: body['model_version'] = self.model_version
+        if self.name is not None: body['name'] = self.name
+        if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
+        if self.workload_size is not None: body['workload_size'] = self.workload_size
+        if self.workload_type is not None: body['workload_type'] = self.workload_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServedModelInput:
         """Deserializes the ServedModelInput from a dictionary."""
@@ -1959,12 +2640,6 @@ def from_dict(cls, d: Dict[str, any]) -> ServedModelInput:
 
 
 class ServedModelInputWorkloadSize(Enum):
-    """The workload size of the served model. The workload size corresponds to a range of provisioned
-    concurrency that the compute will autoscale between. A single unit of provisioned concurrency
-    can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned
-    concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned
-    concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for
-    each workload size will be 0."""
 
     LARGE = 'Large'
     MEDIUM = 'Medium'
@@ -1972,12 +2647,6 @@ class ServedModelInputWorkloadSize(Enum):
 
 
 class ServedModelInputWorkloadType(Enum):
-    """The workload type of the served model. The workload type selects which type of compute to use in
-    the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU
-    acceleration is available by selecting workload types like GPU_SMALL and others. See the
-    available [GPU types].
-    
-    [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types"""
 
     CPU = 'CPU'
     GPU_LARGE = 'GPU_LARGE'
@@ -1989,51 +2658,48 @@ class ServedModelInputWorkloadType(Enum):
 @dataclass
 class ServedModelOutput:
     creation_timestamp: Optional[int] = None
-    """The creation timestamp of the served model in Unix time."""
 
     creator: Optional[str] = None
-    """The email of the user who created the served model."""
 
     environment_vars: Optional[Dict[str, str]] = None
     """An object containing a set of optional, user-specified environment variable key-value pairs used
-    for serving this model. Note: this is an experimental feature and subject to change. Example
-    model environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY":
+    for serving this entity. Note: this is an experimental feature and subject to change. Example
+    entity environment variables that refer to Databricks secrets: `{"OPENAI_API_KEY":
     "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}`"""
 
     instance_profile_arn: Optional[str] = None
-    """ARN of the instance profile that the served model will use to access AWS resources."""
+    """ARN of the instance profile that the served entity uses to access AWS resources."""
 
     model_name: Optional[str] = None
-    """The name of the model in Databricks Model Registry or the full name of the model in Unity
-    Catalog."""
 
     model_version: Optional[str] = None
-    """The version of the model in Databricks Model Registry or Unity Catalog to be served."""
 
     name: Optional[str] = None
-    """The name of the served model."""
+    """The name of a served entity. It must be unique across an endpoint. A served entity name can
+    consist of alphanumeric characters, dashes, and underscores. If not specified for an external
+    model, this field defaults to external_model.name, with '.' and ':' replaced with '-', and if
+    not specified for other entities, it defaults to entity_name-entity_version."""
 
     scale_to_zero_enabled: Optional[bool] = None
-    """Whether the compute resources for the Served Model should scale down to zero."""
+    """Whether the compute resources for the served entity should scale down to zero."""
 
     state: Optional[ServedModelState] = None
-    """Information corresponding to the state of the Served Model."""
 
     workload_size: Optional[str] = None
-    """The workload size of the served model. The workload size corresponds to a range of provisioned
-    concurrency that the compute will autoscale between. A single unit of provisioned concurrency
-    can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned
-    concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned
-    concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for
-    each workload size will be 0."""
+    """The workload size of the served entity. The workload size corresponds to a range of provisioned
+    concurrency that the compute autoscales between. A single unit of provisioned concurrency can
+    process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency),
+    "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If
+    scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size
+    is 0."""
 
-    workload_type: Optional[str] = None
-    """The workload type of the served model. The workload type selects which type of compute to use in
-    the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU
+    workload_type: Optional[ServingModelWorkloadType] = None
+    """The workload type of the served entity. The workload type selects which type of compute to use
+    in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU
     acceleration is available by selecting workload types like GPU_SMALL and others. See the
     available [GPU types].
     
-    [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types"""
+    [GPU types]: https://docs.databricks.com/en/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types"""
 
     def as_dict(self) -> dict:
         """Serializes the ServedModelOutput into a dictionary suitable for use as a JSON request body."""
@@ -2048,6 +2714,22 @@ def as_dict(self) -> dict:
         if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
         if self.state: body['state'] = self.state.as_dict()
         if self.workload_size is not None: body['workload_size'] = self.workload_size
+        if self.workload_type is not None: body['workload_type'] = self.workload_type.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServedModelOutput into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.creator is not None: body['creator'] = self.creator
+        if self.environment_vars: body['environment_vars'] = self.environment_vars
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.model_name is not None: body['model_name'] = self.model_name
+        if self.model_version is not None: body['model_version'] = self.model_version
+        if self.name is not None: body['name'] = self.name
+        if self.scale_to_zero_enabled is not None: body['scale_to_zero_enabled'] = self.scale_to_zero_enabled
+        if self.state: body['state'] = self.state
+        if self.workload_size is not None: body['workload_size'] = self.workload_size
         if self.workload_type is not None: body['workload_type'] = self.workload_type
         return body
 
@@ -2064,20 +2746,18 @@ def from_dict(cls, d: Dict[str, any]) -> ServedModelOutput:
                    scale_to_zero_enabled=d.get('scale_to_zero_enabled', None),
                    state=_from_dict(d, 'state', ServedModelState),
                    workload_size=d.get('workload_size', None),
-                   workload_type=d.get('workload_type', None))
+                   workload_type=_enum(d, 'workload_type', ServingModelWorkloadType))
 
 
 @dataclass
 class ServedModelSpec:
     model_name: Optional[str] = None
-    """The name of the model in Databricks Model Registry or the full name of the model in Unity
-    Catalog."""
+    """Only one of model_name and entity_name should be populated"""
 
     model_version: Optional[str] = None
-    """The version of the model in Databricks Model Registry or Unity Catalog to be served."""
+    """Only one of model_version and entity_version should be populated"""
 
     name: Optional[str] = None
-    """The name of the served model."""
 
     def as_dict(self) -> dict:
         """Serializes the ServedModelSpec into a dictionary suitable for use as a JSON request body."""
@@ -2087,6 +2767,14 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServedModelSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.model_name is not None: body['model_name'] = self.model_name
+        if self.model_version is not None: body['model_version'] = self.model_version
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServedModelSpec:
         """Deserializes the ServedModelSpec from a dictionary."""
@@ -2098,18 +2786,8 @@ def from_dict(cls, d: Dict[str, any]) -> ServedModelSpec:
 @dataclass
 class ServedModelState:
     deployment: Optional[ServedModelStateDeployment] = None
-    """The state of the served entity deployment. DEPLOYMENT_CREATING indicates that the served entity
-    is not ready yet because the deployment is still being created (i.e container image is building,
-    model server is deploying for the first time, etc.). DEPLOYMENT_RECOVERING indicates that the
-    served entity was previously in a ready state but no longer is and is attempting to recover.
-    DEPLOYMENT_READY indicates that the served entity is ready to receive traffic. DEPLOYMENT_FAILED
-    indicates that there was an error trying to bring up the served entity (e.g container image
-    build failed, the model server failed to start due to a model loading error, etc.)
-    DEPLOYMENT_ABORTED indicates that the deployment was terminated likely due to a failure in
-    bringing up another served entity under the same endpoint and config version."""
 
     deployment_state_message: Optional[str] = None
-    """More information about the state of the served entity, if available."""
 
     def as_dict(self) -> dict:
         """Serializes the ServedModelState into a dictionary suitable for use as a JSON request body."""
@@ -2119,6 +2797,14 @@ def as_dict(self) -> dict:
             body['deployment_state_message'] = self.deployment_state_message
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServedModelState into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.deployment is not None: body['deployment'] = self.deployment
+        if self.deployment_state_message is not None:
+            body['deployment_state_message'] = self.deployment_state_message
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServedModelState:
         """Deserializes the ServedModelState from a dictionary."""
@@ -2127,15 +2813,6 @@ def from_dict(cls, d: Dict[str, any]) -> ServedModelState:
 
 
 class ServedModelStateDeployment(Enum):
-    """The state of the served entity deployment. DEPLOYMENT_CREATING indicates that the served entity
-    is not ready yet because the deployment is still being created (i.e container image is building,
-    model server is deploying for the first time, etc.). DEPLOYMENT_RECOVERING indicates that the
-    served entity was previously in a ready state but no longer is and is attempting to recover.
-    DEPLOYMENT_READY indicates that the served entity is ready to receive traffic. DEPLOYMENT_FAILED
-    indicates that there was an error trying to bring up the served entity (e.g container image
-    build failed, the model server failed to start due to a model loading error, etc.)
-    DEPLOYMENT_ABORTED indicates that the deployment was terminated likely due to a failure in
-    bringing up another served entity under the same endpoint and config version."""
 
     ABORTED = 'DEPLOYMENT_ABORTED'
     CREATING = 'DEPLOYMENT_CREATING'
@@ -2155,6 +2832,12 @@ def as_dict(self) -> dict:
         if self.logs is not None: body['logs'] = self.logs
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServerLogsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.logs is not None: body['logs'] = self.logs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServerLogsResponse:
         """Deserializes the ServerLogsResponse from a dictionary."""
@@ -2164,8 +2847,8 @@ def from_dict(cls, d: Dict[str, any]) -> ServerLogsResponse:
 @dataclass
 class ServingEndpoint:
     ai_gateway: Optional[AiGatewayConfig] = None
-    """The AI Gateway configuration for the serving endpoint. NOTE: Only external model endpoints are
-    currently supported."""
+    """The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned
+    throughput endpoints are currently supported."""
 
     config: Optional[EndpointCoreConfigSummary] = None
     """The config that is currently being served by the endpoint."""
@@ -2177,8 +2860,7 @@ class ServingEndpoint:
     """The email of the user who created the serving endpoint."""
 
     id: Optional[str] = None
-    """System-generated ID of the endpoint. This is used to refer to the endpoint in the Permissions
-    API"""
+    """System-generated ID of the endpoint, included to be used by the Permissions API."""
 
     last_updated_timestamp: Optional[int] = None
     """The timestamp when the endpoint was last updated by a user in Unix time."""
@@ -2211,6 +2893,22 @@ def as_dict(self) -> dict:
         if self.task is not None: body['task'] = self.task
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServingEndpoint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ai_gateway: body['ai_gateway'] = self.ai_gateway
+        if self.config: body['config'] = self.config
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.creator is not None: body['creator'] = self.creator
+        if self.id is not None: body['id'] = self.id
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.name is not None: body['name'] = self.name
+        if self.state: body['state'] = self.state
+        if self.tags: body['tags'] = self.tags
+        if self.task is not None: body['task'] = self.task
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpoint:
         """Deserializes the ServingEndpoint from a dictionary."""
@@ -2250,6 +2948,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServingEndpointAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointAccessControlRequest:
         """Deserializes the ServingEndpointAccessControlRequest from a dictionary."""
@@ -2287,6 +2995,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServingEndpointAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointAccessControlResponse:
         """Deserializes the ServingEndpointAccessControlResponse from a dictionary."""
@@ -2300,8 +3019,8 @@ def from_dict(cls, d: Dict[str, any]) -> ServingEndpointAccessControlResponse:
 @dataclass
 class ServingEndpointDetailed:
     ai_gateway: Optional[AiGatewayConfig] = None
-    """The AI Gateway configuration for the serving endpoint. NOTE: Only external model endpoints are
-    currently supported."""
+    """The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned
+    throughput endpoints are currently supported."""
 
     config: Optional[EndpointCoreConfigOutput] = None
     """The config that is currently being served by the endpoint."""
@@ -2367,6 +3086,27 @@ def as_dict(self) -> dict:
         if self.task is not None: body['task'] = self.task
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServingEndpointDetailed into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ai_gateway: body['ai_gateway'] = self.ai_gateway
+        if self.config: body['config'] = self.config
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.creator is not None: body['creator'] = self.creator
+        if self.data_plane_info: body['data_plane_info'] = self.data_plane_info
+        if self.endpoint_url is not None: body['endpoint_url'] = self.endpoint_url
+        if self.id is not None: body['id'] = self.id
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.name is not None: body['name'] = self.name
+        if self.pending_config: body['pending_config'] = self.pending_config
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.route_optimized is not None: body['route_optimized'] = self.route_optimized
+        if self.state: body['state'] = self.state
+        if self.tags: body['tags'] = self.tags
+        if self.task is not None: body['task'] = self.task
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointDetailed:
         """Deserializes the ServingEndpointDetailed from a dictionary."""
@@ -2388,7 +3128,6 @@ def from_dict(cls, d: Dict[str, any]) -> ServingEndpointDetailed:
 
 
 class ServingEndpointDetailedPermissionLevel(Enum):
-    """The permission level of the principal making the request."""
 
     CAN_MANAGE = 'CAN_MANAGE'
     CAN_QUERY = 'CAN_QUERY'
@@ -2412,6 +3151,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServingEndpointPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermission:
         """Deserializes the ServingEndpointPermission from a dictionary."""
@@ -2445,6 +3192,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServingEndpointPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissions:
         """Deserializes the ServingEndpointPermissions from a dictionary."""
@@ -2468,6 +3223,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServingEndpointPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissionsDescription:
         """Deserializes the ServingEndpointPermissionsDescription from a dictionary."""
@@ -2490,6 +3252,13 @@ def as_dict(self) -> dict:
         if self.serving_endpoint_id is not None: body['serving_endpoint_id'] = self.serving_endpoint_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServingEndpointPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.serving_endpoint_id is not None: body['serving_endpoint_id'] = self.serving_endpoint_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissionsRequest:
         """Deserializes the ServingEndpointPermissionsRequest from a dictionary."""
@@ -2498,6 +3267,15 @@ def from_dict(cls, d: Dict[str, any]) -> ServingEndpointPermissionsRequest:
                    serving_endpoint_id=d.get('serving_endpoint_id', None))
 
 
+class ServingModelWorkloadType(Enum):
+
+    CPU = 'CPU'
+    GPU_LARGE = 'GPU_LARGE'
+    GPU_MEDIUM = 'GPU_MEDIUM'
+    GPU_SMALL = 'GPU_SMALL'
+    MULTIGPU_MEDIUM = 'MULTIGPU_MEDIUM'
+
+
 @dataclass
 class TrafficConfig:
     routes: Optional[List[Route]] = None
@@ -2509,6 +3287,12 @@ def as_dict(self) -> dict:
         if self.routes: body['routes'] = [v.as_dict() for v in self.routes]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TrafficConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.routes: body['routes'] = self.routes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TrafficConfig:
         """Deserializes the TrafficConfig from a dictionary."""
@@ -2542,6 +3326,16 @@ def as_dict(self) -> dict:
         if self.text is not None: body['text'] = self.text
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the V1ResponseChoiceElement into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.finish_reason is not None: body['finishReason'] = self.finish_reason
+        if self.index is not None: body['index'] = self.index
+        if self.logprobs is not None: body['logprobs'] = self.logprobs
+        if self.message: body['message'] = self.message
+        if self.text is not None: body['text'] = self.text
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> V1ResponseChoiceElement:
         """Deserializes the V1ResponseChoiceElement from a dictionary."""
@@ -2621,9 +3415,9 @@ def build_logs(self, name: str, served_model_name: str) -> BuildLogsResponse:
 
     def create(self,
                name: str,
-               config: EndpointCoreConfigInput,
                *,
                ai_gateway: Optional[AiGatewayConfig] = None,
+               config: Optional[EndpointCoreConfigInput] = None,
                rate_limits: Optional[List[RateLimit]] = None,
                route_optimized: Optional[bool] = None,
                tags: Optional[List[EndpointTag]] = None) -> Wait[ServingEndpointDetailed]:
@@ -2632,7 +3426,10 @@ def create(self,
         :param name: str
           The name of the serving endpoint. This field is required and must be unique across a Databricks
           workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.
-        :param config: :class:`EndpointCoreConfigInput`
+        :param ai_gateway: :class:`AiGatewayConfig` (optional)
+          The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned
+          throughput endpoints are currently supported.
+        :param config: :class:`EndpointCoreConfigInput` (optional)
           The core config of the serving endpoint.
         :param ai_gateway: :class:`AiGatewayConfig` (optional)
           The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
@@ -2666,9 +3463,9 @@ def create(self,
     def create_and_wait(
         self,
         name: str,
-        config: EndpointCoreConfigInput,
         *,
         ai_gateway: Optional[AiGatewayConfig] = None,
+        config: Optional[EndpointCoreConfigInput] = None,
         rate_limits: Optional[List[RateLimit]] = None,
         route_optimized: Optional[bool] = None,
         tags: Optional[List[EndpointTag]] = None,
@@ -2684,7 +3481,6 @@ def delete(self, name: str):
         """Delete a serving endpoint.
         
         :param name: str
-          The name of the serving endpoint. This field is required.
         
         
         """
@@ -2726,7 +3522,7 @@ def get(self, name: str) -> ServingEndpointDetailed:
         res = self._api.do('GET', f'/api/2.0/serving-endpoints/{name}', headers=headers)
         return ServingEndpointDetailed.from_dict(res)
 
-    def get_open_api(self, name: str):
+    def get_open_api(self, name: str) -> GetOpenApiResponse:
         """Get the schema for a serving endpoint.
         
         Get the query schema of the serving endpoint in OpenAPI format. The schema contains information for
@@ -2735,12 +3531,13 @@ def get_open_api(self, name: str):
         :param name: str
           The name of the serving endpoint that the served model belongs to. This field is required.
         
-        
+        :returns: :class:`GetOpenApiResponse`
         """
 
-        headers = {'Accept': 'application/json', }
+        headers = {'Accept': 'text/plain', }
 
-        self._api.do('GET', f'/api/2.0/serving-endpoints/{name}/openapi', headers=headers)
+        res = self._api.do('GET', f'/api/2.0/serving-endpoints/{name}/openapi', headers=headers, raw=True)
+        return GetOpenApiResponse.from_dict(res)
 
     def get_permission_levels(self, serving_endpoint_id: str) -> GetServingEndpointPermissionLevelsResponse:
         """Get serving endpoint permission levels.
@@ -2779,6 +3576,44 @@ def get_permissions(self, serving_endpoint_id: str) -> ServingEndpointPermission
                            headers=headers)
         return ServingEndpointPermissions.from_dict(res)
 
+    def http_request(self,
+                     connection_name: str,
+                     method: ExternalFunctionRequestHttpMethod,
+                     path: str,
+                     *,
+                     headers: Optional[str] = None,
+                     json: Optional[str] = None,
+                     params: Optional[str] = None) -> HttpRequestResponse:
+        """Make external services call using the credentials stored in UC Connection.
+        
+        :param connection_name: str
+          The connection name to use. This is required to identify the external connection.
+        :param method: :class:`ExternalFunctionRequestHttpMethod`
+          The HTTP method to use (e.g., 'GET', 'POST').
+        :param path: str
+          The relative path for the API endpoint. This is required.
+        :param headers: str (optional)
+          Additional headers for the request. If not provided, only auth headers from connections would be
+          passed.
+        :param json: str (optional)
+          The JSON payload to send in the request body.
+        :param params: str (optional)
+          Query parameters for the request.
+        
+        :returns: :class:`HttpRequestResponse`
+        """
+        body = {}
+        if connection_name is not None: body['connection_name'] = connection_name
+        if headers is not None: body['headers'] = headers
+        if json is not None: body['json'] = json
+        if method is not None: body['method'] = method.value
+        if params is not None: body['params'] = params
+        if path is not None: body['path'] = path
+        headers = {'Accept': 'text/plain', 'Content-Type': 'application/json', }
+
+        res = self._api.do('POST', '/api/2.0/external-function', body=body, headers=headers, raw=True)
+        return HttpRequestResponse.from_dict(res)
+
     def list(self) -> Iterator[ServingEndpoint]:
         """Get all serving endpoints.
         
@@ -2815,7 +3650,7 @@ def patch(self,
               name: str,
               *,
               add_tags: Optional[List[EndpointTag]] = None,
-              delete_tags: Optional[List[str]] = None) -> Iterator[EndpointTag]:
+              delete_tags: Optional[List[str]] = None) -> EndpointTags:
         """Update tags of a serving endpoint.
         
         Used to batch add and delete tags from a serving endpoint with a single API call.
@@ -2827,7 +3662,7 @@ def patch(self,
         :param delete_tags: List[str] (optional)
           List of tag keys to delete
         
-        :returns: Iterator over :class:`EndpointTag`
+        :returns: :class:`EndpointTags`
         """
         body = {}
         if add_tags is not None: body['add_tags'] = [v.as_dict() for v in add_tags]
@@ -2835,7 +3670,7 @@ def patch(self,
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
         res = self._api.do('PATCH', f'/api/2.0/serving-endpoints/{name}/tags', body=body, headers=headers)
-        return [EndpointTag.from_dict(v) for v in res]
+        return EndpointTags.from_dict(res)
 
     def put(self, name: str, *, rate_limits: Optional[List[RateLimit]] = None) -> PutResponse:
         """Update rate limits of a serving endpoint.
@@ -2870,8 +3705,8 @@ def put_ai_gateway(
             usage_tracking_config: Optional[AiGatewayUsageTrackingConfig] = None) -> PutAiGatewayResponse:
         """Update AI Gateway of a serving endpoint.
         
-        Used to update the AI Gateway of a serving endpoint. NOTE: Only external model endpoints are currently
-        supported.
+        Used to update the AI Gateway of a serving endpoint. NOTE: Only external model and provisioned
+        throughput endpoints are currently supported.
         
         :param name: str
           The name of the serving endpoint whose AI Gateway is being updated. This field is required.
@@ -2994,8 +3829,8 @@ def set_permissions(
     ) -> ServingEndpointPermissions:
         """Set serving endpoint permissions.
         
-        Sets permissions on a serving endpoint. Serving endpoints can inherit permissions from their root
-        object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param serving_endpoint_id: str
           The serving endpoint for which to get or manage permissions.
@@ -3031,14 +3866,16 @@ def update_config(self,
           The name of the serving endpoint to update. This field is required.
         :param auto_capture_config: :class:`AutoCaptureConfigInput` (optional)
           Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.
+          Note: this field is deprecated for creating new provisioned throughput endpoints, or updating
+          existing provisioned throughput endpoints that never have inference table configured; in these cases
+          please use AI Gateway to manage inference tables.
         :param served_entities: List[:class:`ServedEntityInput`] (optional)
-          A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served
-          entities.
+          The list of served entities under the serving endpoint config.
         :param served_models: List[:class:`ServedModelInput`] (optional)
-          (Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A
-          serving endpoint can have up to 15 served models.
+          (Deprecated, use served_entities instead) The list of served models under the serving endpoint
+          config.
         :param traffic_config: :class:`TrafficConfig` (optional)
-          The traffic config defining how invocations to the serving endpoint should be routed.
+          The traffic configuration associated with the serving endpoint config.
         
         :returns:
           Long-running operation waiter for :class:`ServingEndpointDetailed`.
@@ -3110,6 +3947,7 @@ class ServingEndpointsDataPlaneAPI:
     def __init__(self, api_client, control_plane):
         self._api = api_client
         self._control_plane = control_plane
+        from ..data_plane import DataPlaneService
         self._data_plane_service = DataPlaneService()
 
     def query(self,
diff --git a/databricks/sdk/service/settings.py b/databricks/sdk/service/settings.py
index a6a235158..42c8c882d 100755
--- a/databricks/sdk/service/settings.py
+++ b/databricks/sdk/service/settings.py
@@ -14,6 +14,194 @@
 # all definitions in this file are in alphabetical order
 
 
+@dataclass
+class AccountIpAccessEnable:
+    acct_ip_acl_enable: BooleanMessage
+
+    etag: Optional[str] = None
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+    etag from a GET request, and pass it with the PATCH request to identify the setting version you
+    are updating."""
+
+    setting_name: Optional[str] = None
+    """Name of the corresponding setting. This field is populated in the response, but it will not be
+    respected even if it's set in the request body. The setting name in the path parameter will be
+    respected instead. Setting name is required to be 'default' if the setting only has one instance
+    per workspace."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AccountIpAccessEnable into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.acct_ip_acl_enable: body['acct_ip_acl_enable'] = self.acct_ip_acl_enable.as_dict()
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccountIpAccessEnable into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.acct_ip_acl_enable: body['acct_ip_acl_enable'] = self.acct_ip_acl_enable
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AccountIpAccessEnable:
+        """Deserializes the AccountIpAccessEnable from a dictionary."""
+        return cls(acct_ip_acl_enable=_from_dict(d, 'acct_ip_acl_enable', BooleanMessage),
+                   etag=d.get('etag', None),
+                   setting_name=d.get('setting_name', None))
+
+
+@dataclass
+class AibiDashboardEmbeddingAccessPolicy:
+    access_policy_type: AibiDashboardEmbeddingAccessPolicyAccessPolicyType
+
+    def as_dict(self) -> dict:
+        """Serializes the AibiDashboardEmbeddingAccessPolicy into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.access_policy_type is not None: body['access_policy_type'] = self.access_policy_type.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AibiDashboardEmbeddingAccessPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_policy_type is not None: body['access_policy_type'] = self.access_policy_type
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingAccessPolicy:
+        """Deserializes the AibiDashboardEmbeddingAccessPolicy from a dictionary."""
+        return cls(access_policy_type=_enum(d, 'access_policy_type',
+                                            AibiDashboardEmbeddingAccessPolicyAccessPolicyType))
+
+
+class AibiDashboardEmbeddingAccessPolicyAccessPolicyType(Enum):
+
+    ALLOW_ALL_DOMAINS = 'ALLOW_ALL_DOMAINS'
+    ALLOW_APPROVED_DOMAINS = 'ALLOW_APPROVED_DOMAINS'
+    DENY_ALL_DOMAINS = 'DENY_ALL_DOMAINS'
+
+
+@dataclass
+class AibiDashboardEmbeddingAccessPolicySetting:
+    aibi_dashboard_embedding_access_policy: AibiDashboardEmbeddingAccessPolicy
+
+    etag: Optional[str] = None
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+    etag from a GET request, and pass it with the PATCH request to identify the setting version you
+    are updating."""
+
+    setting_name: Optional[str] = None
+    """Name of the corresponding setting. This field is populated in the response, but it will not be
+    respected even if it's set in the request body. The setting name in the path parameter will be
+    respected instead. Setting name is required to be 'default' if the setting only has one instance
+    per workspace."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AibiDashboardEmbeddingAccessPolicySetting into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.aibi_dashboard_embedding_access_policy:
+            body[
+                'aibi_dashboard_embedding_access_policy'] = self.aibi_dashboard_embedding_access_policy.as_dict(
+                )
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AibiDashboardEmbeddingAccessPolicySetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aibi_dashboard_embedding_access_policy:
+            body['aibi_dashboard_embedding_access_policy'] = self.aibi_dashboard_embedding_access_policy
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingAccessPolicySetting:
+        """Deserializes the AibiDashboardEmbeddingAccessPolicySetting from a dictionary."""
+        return cls(aibi_dashboard_embedding_access_policy=_from_dict(
+            d, 'aibi_dashboard_embedding_access_policy', AibiDashboardEmbeddingAccessPolicy),
+                   etag=d.get('etag', None),
+                   setting_name=d.get('setting_name', None))
+
+
+@dataclass
+class AibiDashboardEmbeddingApprovedDomains:
+    approved_domains: Optional[List[str]] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the AibiDashboardEmbeddingApprovedDomains into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.approved_domains: body['approved_domains'] = [v for v in self.approved_domains]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AibiDashboardEmbeddingApprovedDomains into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.approved_domains: body['approved_domains'] = self.approved_domains
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingApprovedDomains:
+        """Deserializes the AibiDashboardEmbeddingApprovedDomains from a dictionary."""
+        return cls(approved_domains=d.get('approved_domains', None))
+
+
+@dataclass
+class AibiDashboardEmbeddingApprovedDomainsSetting:
+    aibi_dashboard_embedding_approved_domains: AibiDashboardEmbeddingApprovedDomains
+
+    etag: Optional[str] = None
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> update pattern to perform setting updates in order to avoid race conditions. That is, get an
+    etag from a GET request, and pass it with the PATCH request to identify the setting version you
+    are updating."""
+
+    setting_name: Optional[str] = None
+    """Name of the corresponding setting. This field is populated in the response, but it will not be
+    respected even if it's set in the request body. The setting name in the path parameter will be
+    respected instead. Setting name is required to be 'default' if the setting only has one instance
+    per workspace."""
+
+    def as_dict(self) -> dict:
+        """Serializes the AibiDashboardEmbeddingApprovedDomainsSetting into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.aibi_dashboard_embedding_approved_domains:
+            body[
+                'aibi_dashboard_embedding_approved_domains'] = self.aibi_dashboard_embedding_approved_domains.as_dict(
+                )
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AibiDashboardEmbeddingApprovedDomainsSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aibi_dashboard_embedding_approved_domains:
+            body['aibi_dashboard_embedding_approved_domains'] = self.aibi_dashboard_embedding_approved_domains
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> AibiDashboardEmbeddingApprovedDomainsSetting:
+        """Deserializes the AibiDashboardEmbeddingApprovedDomainsSetting from a dictionary."""
+        return cls(aibi_dashboard_embedding_approved_domains=_from_dict(
+            d, 'aibi_dashboard_embedding_approved_domains', AibiDashboardEmbeddingApprovedDomains),
+                   etag=d.get('etag', None),
+                   setting_name=d.get('setting_name', None))
+
+
 @dataclass
 class AutomaticClusterUpdateSetting:
     automatic_cluster_update_workspace: ClusterAutoRestartMessage
@@ -41,6 +229,15 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AutomaticClusterUpdateSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.automatic_cluster_update_workspace:
+            body['automatic_cluster_update_workspace'] = self.automatic_cluster_update_workspace
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AutomaticClusterUpdateSetting:
         """Deserializes the AutomaticClusterUpdateSetting from a dictionary."""
@@ -60,6 +257,12 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BooleanMessage into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BooleanMessage:
         """Deserializes the BooleanMessage from a dictionary."""
@@ -94,6 +297,17 @@ def as_dict(self) -> dict:
             body['restart_even_if_no_updates_available'] = self.restart_even_if_no_updates_available
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterAutoRestartMessage into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.can_toggle is not None: body['can_toggle'] = self.can_toggle
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.enablement_details: body['enablement_details'] = self.enablement_details
+        if self.maintenance_window: body['maintenance_window'] = self.maintenance_window
+        if self.restart_even_if_no_updates_available is not None:
+            body['restart_even_if_no_updates_available'] = self.restart_even_if_no_updates_available
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessage:
         """Deserializes the ClusterAutoRestartMessage from a dictionary."""
@@ -135,6 +349,17 @@ def as_dict(self) -> dict:
             body['unavailable_for_non_enterprise_tier'] = self.unavailable_for_non_enterprise_tier
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterAutoRestartMessageEnablementDetails into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.forced_for_compliance_mode is not None:
+            body['forced_for_compliance_mode'] = self.forced_for_compliance_mode
+        if self.unavailable_for_disabled_entitlement is not None:
+            body['unavailable_for_disabled_entitlement'] = self.unavailable_for_disabled_entitlement
+        if self.unavailable_for_non_enterprise_tier is not None:
+            body['unavailable_for_non_enterprise_tier'] = self.unavailable_for_non_enterprise_tier
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageEnablementDetails:
         """Deserializes the ClusterAutoRestartMessageEnablementDetails from a dictionary."""
@@ -154,6 +379,12 @@ def as_dict(self) -> dict:
             body['week_day_based_schedule'] = self.week_day_based_schedule.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterAutoRestartMessageMaintenanceWindow into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.week_day_based_schedule: body['week_day_based_schedule'] = self.week_day_based_schedule
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageMaintenanceWindow:
         """Deserializes the ClusterAutoRestartMessageMaintenanceWindow from a dictionary."""
@@ -188,6 +419,14 @@ def as_dict(self) -> dict:
         if self.window_start_time: body['window_start_time'] = self.window_start_time.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.day_of_week is not None: body['day_of_week'] = self.day_of_week
+        if self.frequency is not None: body['frequency'] = self.frequency
+        if self.window_start_time: body['window_start_time'] = self.window_start_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule:
         """Deserializes the ClusterAutoRestartMessageMaintenanceWindowWeekDayBasedSchedule from a dictionary."""
@@ -222,6 +461,13 @@ def as_dict(self) -> dict:
         if self.minutes is not None: body['minutes'] = self.minutes
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.hours is not None: body['hours'] = self.hours
+        if self.minutes is not None: body['minutes'] = self.minutes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ClusterAutoRestartMessageMaintenanceWindowWindowStartTime:
         """Deserializes the ClusterAutoRestartMessageMaintenanceWindowWindowStartTime from a dictionary."""
@@ -245,6 +491,13 @@ def as_dict(self) -> dict:
         if self.is_enabled is not None: body['is_enabled'] = self.is_enabled
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ComplianceSecurityProfile into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.compliance_standards: body['compliance_standards'] = self.compliance_standards
+        if self.is_enabled is not None: body['is_enabled'] = self.is_enabled
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ComplianceSecurityProfile:
         """Deserializes the ComplianceSecurityProfile from a dictionary."""
@@ -282,6 +535,15 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ComplianceSecurityProfileSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.compliance_security_profile_workspace:
+            body['compliance_security_profile_workspace'] = self.compliance_security_profile_workspace
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ComplianceSecurityProfileSetting:
         """Deserializes the ComplianceSecurityProfileSetting from a dictionary."""
@@ -301,7 +563,9 @@ class ComplianceStandard(Enum):
     FEDRAMP_IL5 = 'FEDRAMP_IL5'
     FEDRAMP_MODERATE = 'FEDRAMP_MODERATE'
     HIPAA = 'HIPAA'
+    HITRUST = 'HITRUST'
     IRAP_PROTECTED = 'IRAP_PROTECTED'
+    ISMAP = 'ISMAP'
     ITAR_EAR = 'ITAR_EAR'
     NONE = 'NONE'
     PCI_DSS = 'PCI_DSS'
@@ -329,6 +593,16 @@ def as_dict(self) -> dict:
         if self.slack: body['slack'] = self.slack.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Config into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.email: body['email'] = self.email
+        if self.generic_webhook: body['generic_webhook'] = self.generic_webhook
+        if self.microsoft_teams: body['microsoft_teams'] = self.microsoft_teams
+        if self.pagerduty: body['pagerduty'] = self.pagerduty
+        if self.slack: body['slack'] = self.slack
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Config:
         """Deserializes the Config from a dictionary."""
@@ -362,6 +636,14 @@ def as_dict(self) -> dict:
         if self.list_type is not None: body['list_type'] = self.list_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateIpAccessList into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ip_addresses: body['ip_addresses'] = self.ip_addresses
+        if self.label is not None: body['label'] = self.label
+        if self.list_type is not None: body['list_type'] = self.list_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateIpAccessList:
         """Deserializes the CreateIpAccessList from a dictionary."""
@@ -383,6 +665,12 @@ def as_dict(self) -> dict:
         if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateIpAccessListResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateIpAccessListResponse:
         """Deserializes the CreateIpAccessListResponse from a dictionary."""
@@ -407,6 +695,13 @@ def as_dict(self) -> dict:
         if self.region is not None: body['region'] = self.region
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateNetworkConnectivityConfigRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.region is not None: body['region'] = self.region
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateNetworkConnectivityConfigRequest:
         """Deserializes the CreateNetworkConnectivityConfigRequest from a dictionary."""
@@ -428,6 +723,13 @@ def as_dict(self) -> dict:
         if self.display_name is not None: body['display_name'] = self.display_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateNotificationDestinationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.config: body['config'] = self.config
+        if self.display_name is not None: body['display_name'] = self.display_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateNotificationDestinationRequest:
         """Deserializes the CreateNotificationDestinationRequest from a dictionary."""
@@ -455,6 +757,14 @@ def as_dict(self) -> dict:
         if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateOboTokenRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.application_id is not None: body['application_id'] = self.application_id
+        if self.comment is not None: body['comment'] = self.comment
+        if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateOboTokenRequest:
         """Deserializes the CreateOboTokenRequest from a dictionary."""
@@ -479,6 +789,13 @@ def as_dict(self) -> dict:
         if self.token_value is not None: body['token_value'] = self.token_value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateOboTokenResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.token_info: body['token_info'] = self.token_info
+        if self.token_value is not None: body['token_value'] = self.token_value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateOboTokenResponse:
         """Deserializes the CreateOboTokenResponse from a dictionary."""
@@ -506,6 +823,15 @@ def as_dict(self) -> dict:
         if self.resource_id is not None: body['resource_id'] = self.resource_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreatePrivateEndpointRuleRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_id is not None: body['group_id'] = self.group_id
+        if self.network_connectivity_config_id is not None:
+            body['network_connectivity_config_id'] = self.network_connectivity_config_id
+        if self.resource_id is not None: body['resource_id'] = self.resource_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreatePrivateEndpointRuleRequest:
         """Deserializes the CreatePrivateEndpointRuleRequest from a dictionary."""
@@ -541,6 +867,13 @@ def as_dict(self) -> dict:
         if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateTokenRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.lifetime_seconds is not None: body['lifetime_seconds'] = self.lifetime_seconds
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateTokenRequest:
         """Deserializes the CreateTokenRequest from a dictionary."""
@@ -562,6 +895,13 @@ def as_dict(self) -> dict:
         if self.token_value is not None: body['token_value'] = self.token_value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateTokenResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.token_info: body['token_info'] = self.token_info
+        if self.token_value is not None: body['token_value'] = self.token_value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateTokenResponse:
         """Deserializes the CreateTokenResponse from a dictionary."""
@@ -588,6 +928,13 @@ def as_dict(self) -> dict:
         if self.is_enforced is not None: body['is_enforced'] = self.is_enforced
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CspEnablementAccount into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.compliance_standards: body['compliance_standards'] = self.compliance_standards
+        if self.is_enforced is not None: body['is_enforced'] = self.is_enforced
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CspEnablementAccount:
         """Deserializes the CspEnablementAccount from a dictionary."""
@@ -622,6 +969,14 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CspEnablementAccountSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.csp_enablement_account: body['csp_enablement_account'] = self.csp_enablement_account
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CspEnablementAccountSetting:
         """Deserializes the CspEnablementAccountSetting from a dictionary."""
@@ -664,6 +1019,14 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DefaultNamespaceSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        if self.namespace: body['namespace'] = self.namespace
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DefaultNamespaceSetting:
         """Deserializes the DefaultNamespaceSetting from a dictionary."""
@@ -672,6 +1035,96 @@ def from_dict(cls, d: Dict[str, any]) -> DefaultNamespaceSetting:
                    setting_name=d.get('setting_name', None))
 
 
+@dataclass
+class DeleteAccountIpAccessEnableResponse:
+    """The etag is returned."""
+
+    etag: str
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+    an etag from a GET request, and pass it with the DELETE request to identify the rule set version
+    you are deleting."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteAccountIpAccessEnableResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteAccountIpAccessEnableResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteAccountIpAccessEnableResponse:
+        """Deserializes the DeleteAccountIpAccessEnableResponse from a dictionary."""
+        return cls(etag=d.get('etag', None))
+
+
+@dataclass
+class DeleteAibiDashboardEmbeddingAccessPolicySettingResponse:
+    """The etag is returned."""
+
+    etag: str
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+    an etag from a GET request, and pass it with the DELETE request to identify the rule set version
+    you are deleting."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteAibiDashboardEmbeddingAccessPolicySettingResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteAibiDashboardEmbeddingAccessPolicySettingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse:
+        """Deserializes the DeleteAibiDashboardEmbeddingAccessPolicySettingResponse from a dictionary."""
+        return cls(etag=d.get('etag', None))
+
+
+@dataclass
+class DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse:
+    """The etag is returned."""
+
+    etag: str
+    """etag used for versioning. The response is at least as fresh as the eTag provided. This is used
+    for optimistic concurrency control as a way to help prevent simultaneous writes of a setting
+    overwriting each other. It is strongly suggested that systems make use of the etag in the read
+    -> delete pattern to perform setting deletions in order to avoid race conditions. That is, get
+    an etag from a GET request, and pass it with the DELETE request to identify the rule set version
+    you are deleting."""
+
+    def as_dict(self) -> dict:
+        """Serializes the DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse:
+        """Deserializes the DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse from a dictionary."""
+        return cls(etag=d.get('etag', None))
+
+
 @dataclass
 class DeleteDefaultNamespaceSettingResponse:
     """The etag is returned."""
@@ -690,6 +1143,12 @@ def as_dict(self) -> dict:
         if self.etag is not None: body['etag'] = self.etag
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteDefaultNamespaceSettingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDefaultNamespaceSettingResponse:
         """Deserializes the DeleteDefaultNamespaceSettingResponse from a dictionary."""
@@ -714,6 +1173,12 @@ def as_dict(self) -> dict:
         if self.etag is not None: body['etag'] = self.etag
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteDisableLegacyAccessResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyAccessResponse:
         """Deserializes the DeleteDisableLegacyAccessResponse from a dictionary."""
@@ -738,6 +1203,12 @@ def as_dict(self) -> dict:
         if self.etag is not None: body['etag'] = self.etag
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteDisableLegacyDbfsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyDbfsResponse:
         """Deserializes the DeleteDisableLegacyDbfsResponse from a dictionary."""
@@ -762,6 +1233,12 @@ def as_dict(self) -> dict:
         if self.etag is not None: body['etag'] = self.etag
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteDisableLegacyFeaturesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDisableLegacyFeaturesResponse:
         """Deserializes the DeleteDisableLegacyFeaturesResponse from a dictionary."""
@@ -776,6 +1253,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteNetworkConnectivityConfigurationResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteNetworkConnectivityConfigurationResponse:
         """Deserializes the DeleteNetworkConnectivityConfigurationResponse from a dictionary."""
@@ -800,6 +1282,12 @@ def as_dict(self) -> dict:
         if self.etag is not None: body['etag'] = self.etag
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeletePersonalComputeSettingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeletePersonalComputeSettingResponse:
         """Deserializes the DeletePersonalComputeSettingResponse from a dictionary."""
@@ -814,6 +1302,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -838,6 +1331,12 @@ def as_dict(self) -> dict:
         if self.etag is not None: body['etag'] = self.etag
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteRestrictWorkspaceAdminsSettingResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRestrictWorkspaceAdminsSettingResponse:
         """Deserializes the DeleteRestrictWorkspaceAdminsSettingResponse from a dictionary."""
@@ -879,6 +1378,14 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DisableLegacyAccess into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.disable_legacy_access: body['disable_legacy_access'] = self.disable_legacy_access
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DisableLegacyAccess:
         """Deserializes the DisableLegacyAccess from a dictionary."""
@@ -913,6 +1420,14 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DisableLegacyDbfs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.disable_legacy_dbfs: body['disable_legacy_dbfs'] = self.disable_legacy_dbfs
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DisableLegacyDbfs:
         """Deserializes the DisableLegacyDbfs from a dictionary."""
@@ -948,6 +1463,14 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DisableLegacyFeatures into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.disable_legacy_features: body['disable_legacy_features'] = self.disable_legacy_features
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DisableLegacyFeatures:
         """Deserializes the DisableLegacyFeatures from a dictionary."""
@@ -956,6 +1479,270 @@ def from_dict(cls, d: Dict[str, any]) -> DisableLegacyFeatures:
                    setting_name=d.get('setting_name', None))
 
 
+@dataclass
+class EgressNetworkPolicy:
+    """The network policies applying for egress traffic. This message is used by the UI/REST API. We
+    translate this message to the format expected by the dataplane in Lakehouse Network Manager (for
+    the format expected by the dataplane, see networkconfig.textproto)."""
+
+    internet_access: Optional[EgressNetworkPolicyInternetAccessPolicy] = None
+    """The access policy enforced for egress traffic to the internet."""
+
+    def as_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicy into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.internet_access: body['internet_access'] = self.internet_access.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.internet_access: body['internet_access'] = self.internet_access
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicy:
+        """Deserializes the EgressNetworkPolicy from a dictionary."""
+        return cls(internet_access=_from_dict(d, 'internet_access', EgressNetworkPolicyInternetAccessPolicy))
+
+
+@dataclass
+class EgressNetworkPolicyInternetAccessPolicy:
+    allowed_internet_destinations: Optional[
+        List[EgressNetworkPolicyInternetAccessPolicyInternetDestination]] = None
+
+    allowed_storage_destinations: Optional[
+        List[EgressNetworkPolicyInternetAccessPolicyStorageDestination]] = None
+
+    log_only_mode: Optional[EgressNetworkPolicyInternetAccessPolicyLogOnlyMode] = None
+    """Optional. If not specified, assume the policy is enforced for all workloads."""
+
+    restriction_mode: Optional[EgressNetworkPolicyInternetAccessPolicyRestrictionMode] = None
+    """At which level can Databricks and Databricks managed compute access Internet. FULL_ACCESS:
+    Databricks can access Internet. No blocking rules will apply. RESTRICTED_ACCESS: Databricks can
+    only access explicitly allowed internet and storage destinations, as well as UC connections and
+    external locations. PRIVATE_ACCESS_ONLY (not used): Databricks can only access destinations via
+    private link."""
+
+    def as_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicyInternetAccessPolicy into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.allowed_internet_destinations:
+            body['allowed_internet_destinations'] = [v.as_dict() for v in self.allowed_internet_destinations]
+        if self.allowed_storage_destinations:
+            body['allowed_storage_destinations'] = [v.as_dict() for v in self.allowed_storage_destinations]
+        if self.log_only_mode: body['log_only_mode'] = self.log_only_mode.as_dict()
+        if self.restriction_mode is not None: body['restriction_mode'] = self.restriction_mode.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicyInternetAccessPolicy into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allowed_internet_destinations:
+            body['allowed_internet_destinations'] = self.allowed_internet_destinations
+        if self.allowed_storage_destinations:
+            body['allowed_storage_destinations'] = self.allowed_storage_destinations
+        if self.log_only_mode: body['log_only_mode'] = self.log_only_mode
+        if self.restriction_mode is not None: body['restriction_mode'] = self.restriction_mode
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicyInternetAccessPolicy:
+        """Deserializes the EgressNetworkPolicyInternetAccessPolicy from a dictionary."""
+        return cls(allowed_internet_destinations=_repeated_dict(
+            d, 'allowed_internet_destinations', EgressNetworkPolicyInternetAccessPolicyInternetDestination),
+                   allowed_storage_destinations=_repeated_dict(
+                       d, 'allowed_storage_destinations',
+                       EgressNetworkPolicyInternetAccessPolicyStorageDestination),
+                   log_only_mode=_from_dict(d, 'log_only_mode',
+                                            EgressNetworkPolicyInternetAccessPolicyLogOnlyMode),
+                   restriction_mode=_enum(d, 'restriction_mode',
+                                          EgressNetworkPolicyInternetAccessPolicyRestrictionMode))
+
+
+@dataclass
+class EgressNetworkPolicyInternetAccessPolicyInternetDestination:
+    """Users can specify accessible internet destinations when outbound access is restricted. We only
+    support domain name (FQDN) destinations for the time being, though going forwards we want to
+    support host names and IP addresses."""
+
+    destination: Optional[str] = None
+
+    protocol: Optional[
+        EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol] = None
+    """The filtering protocol used by the DP. For private and public preview, SEG will only support TCP
+    filtering (i.e. DNS based filtering, filtering by destination IP address), so protocol will be
+    set to TCP by default and hidden from the user. In the future, users may be able to select HTTP
+    filtering (i.e. SNI based filtering, filtering by FQDN)."""
+
+    type: Optional[EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicyInternetAccessPolicyInternetDestination into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.destination is not None: body['destination'] = self.destination
+        if self.protocol is not None: body['protocol'] = self.protocol.value
+        if self.type is not None: body['type'] = self.type.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicyInternetAccessPolicyInternetDestination into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination is not None: body['destination'] = self.destination
+        if self.protocol is not None: body['protocol'] = self.protocol
+        if self.type is not None: body['type'] = self.type
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicyInternetAccessPolicyInternetDestination:
+        """Deserializes the EgressNetworkPolicyInternetAccessPolicyInternetDestination from a dictionary."""
+        return cls(
+            destination=d.get('destination', None),
+            protocol=_enum(
+                d, 'protocol',
+                EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol
+            ),
+            type=_enum(d, 'type',
+                       EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType))
+
+
+class EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol(Enum):
+    """The filtering protocol used by the DP. For private and public preview, SEG will only support TCP
+    filtering (i.e. DNS based filtering, filtering by destination IP address), so protocol will be
+    set to TCP by default and hidden from the user. In the future, users may be able to select HTTP
+    filtering (i.e. SNI based filtering, filtering by FQDN)."""
+
+    TCP = 'TCP'
+
+
+class EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType(Enum):
+
+    FQDN = 'FQDN'
+
+
+@dataclass
+class EgressNetworkPolicyInternetAccessPolicyLogOnlyMode:
+    log_only_mode_type: Optional[EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType] = None
+
+    workloads: Optional[List[EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType]] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicyInternetAccessPolicyLogOnlyMode into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.log_only_mode_type is not None: body['log_only_mode_type'] = self.log_only_mode_type.value
+        if self.workloads: body['workloads'] = [v.value for v in self.workloads]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicyInternetAccessPolicyLogOnlyMode into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.log_only_mode_type is not None: body['log_only_mode_type'] = self.log_only_mode_type
+        if self.workloads: body['workloads'] = self.workloads
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicyInternetAccessPolicyLogOnlyMode:
+        """Deserializes the EgressNetworkPolicyInternetAccessPolicyLogOnlyMode from a dictionary."""
+        return cls(log_only_mode_type=_enum(
+            d, 'log_only_mode_type', EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType),
+                   workloads=_repeated_enum(d, 'workloads',
+                                            EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType))
+
+
+class EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType(Enum):
+
+    ALL_SERVICES = 'ALL_SERVICES'
+    SELECTED_SERVICES = 'SELECTED_SERVICES'
+
+
+class EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType(Enum):
+    """The values should match the list of workloads used in networkconfig.proto"""
+
+    DBSQL = 'DBSQL'
+    ML_SERVING = 'ML_SERVING'
+
+
+class EgressNetworkPolicyInternetAccessPolicyRestrictionMode(Enum):
+    """At which level can Databricks and Databricks managed compute access Internet. FULL_ACCESS:
+    Databricks can access Internet. No blocking rules will apply. RESTRICTED_ACCESS: Databricks can
+    only access explicitly allowed internet and storage destinations, as well as UC connections and
+    external locations. PRIVATE_ACCESS_ONLY (not used): Databricks can only access destinations via
+    private link."""
+
+    FULL_ACCESS = 'FULL_ACCESS'
+    PRIVATE_ACCESS_ONLY = 'PRIVATE_ACCESS_ONLY'
+    RESTRICTED_ACCESS = 'RESTRICTED_ACCESS'
+
+
+@dataclass
+class EgressNetworkPolicyInternetAccessPolicyStorageDestination:
+    """Users can specify accessible storage destinations."""
+
+    allowed_paths: Optional[List[str]] = None
+
+    azure_container: Optional[str] = None
+
+    azure_dns_zone: Optional[str] = None
+
+    azure_storage_account: Optional[str] = None
+
+    azure_storage_service: Optional[str] = None
+
+    bucket_name: Optional[str] = None
+
+    region: Optional[str] = None
+
+    type: Optional[EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicyInternetAccessPolicyStorageDestination into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.allowed_paths: body['allowed_paths'] = [v for v in self.allowed_paths]
+        if self.azure_container is not None: body['azure_container'] = self.azure_container
+        if self.azure_dns_zone is not None: body['azure_dns_zone'] = self.azure_dns_zone
+        if self.azure_storage_account is not None: body['azure_storage_account'] = self.azure_storage_account
+        if self.azure_storage_service is not None: body['azure_storage_service'] = self.azure_storage_service
+        if self.bucket_name is not None: body['bucket_name'] = self.bucket_name
+        if self.region is not None: body['region'] = self.region
+        if self.type is not None: body['type'] = self.type.value
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EgressNetworkPolicyInternetAccessPolicyStorageDestination into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allowed_paths: body['allowed_paths'] = self.allowed_paths
+        if self.azure_container is not None: body['azure_container'] = self.azure_container
+        if self.azure_dns_zone is not None: body['azure_dns_zone'] = self.azure_dns_zone
+        if self.azure_storage_account is not None: body['azure_storage_account'] = self.azure_storage_account
+        if self.azure_storage_service is not None: body['azure_storage_service'] = self.azure_storage_service
+        if self.bucket_name is not None: body['bucket_name'] = self.bucket_name
+        if self.region is not None: body['region'] = self.region
+        if self.type is not None: body['type'] = self.type
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EgressNetworkPolicyInternetAccessPolicyStorageDestination:
+        """Deserializes the EgressNetworkPolicyInternetAccessPolicyStorageDestination from a dictionary."""
+        return cls(allowed_paths=d.get('allowed_paths', None),
+                   azure_container=d.get('azure_container', None),
+                   azure_dns_zone=d.get('azure_dns_zone', None),
+                   azure_storage_account=d.get('azure_storage_account', None),
+                   azure_storage_service=d.get('azure_storage_service', None),
+                   bucket_name=d.get('bucket_name', None),
+                   region=d.get('region', None),
+                   type=_enum(
+                       d, 'type',
+                       EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType))
+
+
+class EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType(Enum):
+
+    AWS_S3 = 'AWS_S3'
+    AZURE_STORAGE = 'AZURE_STORAGE'
+    CLOUDFLARE_R2 = 'CLOUDFLARE_R2'
+    GOOGLE_CLOUD_STORAGE = 'GOOGLE_CLOUD_STORAGE'
+
+
 @dataclass
 class EmailConfig:
     addresses: Optional[List[str]] = None
@@ -967,6 +1754,12 @@ def as_dict(self) -> dict:
         if self.addresses: body['addresses'] = [v for v in self.addresses]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EmailConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.addresses: body['addresses'] = self.addresses
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EmailConfig:
         """Deserializes the EmailConfig from a dictionary."""
@@ -981,6 +1774,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Empty into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Empty:
         """Deserializes the Empty from a dictionary."""
@@ -999,6 +1797,12 @@ def as_dict(self) -> dict:
         if self.is_enabled is not None: body['is_enabled'] = self.is_enabled
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EnhancedSecurityMonitoring into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_enabled is not None: body['is_enabled'] = self.is_enabled
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnhancedSecurityMonitoring:
         """Deserializes the EnhancedSecurityMonitoring from a dictionary."""
@@ -1035,6 +1839,15 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EnhancedSecurityMonitoringSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enhanced_security_monitoring_workspace:
+            body['enhanced_security_monitoring_workspace'] = self.enhanced_security_monitoring_workspace
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnhancedSecurityMonitoringSetting:
         """Deserializes the EnhancedSecurityMonitoringSetting from a dictionary."""
@@ -1056,6 +1869,12 @@ def as_dict(self) -> dict:
         if self.is_enforced is not None: body['is_enforced'] = self.is_enforced
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EsmEnablementAccount into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.is_enforced is not None: body['is_enforced'] = self.is_enforced
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EsmEnablementAccount:
         """Deserializes the EsmEnablementAccount from a dictionary."""
@@ -1089,6 +1908,14 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EsmEnablementAccountSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.esm_enablement_account: body['esm_enablement_account'] = self.esm_enablement_account
+        if self.etag is not None: body['etag'] = self.etag
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EsmEnablementAccountSetting:
         """Deserializes the EsmEnablementAccountSetting from a dictionary."""
@@ -1126,6 +1953,16 @@ def as_dict(self) -> dict:
         if self.token_type is not None: body['tokenType'] = self.token_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExchangeToken into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential is not None: body['credential'] = self.credential
+        if self.credential_eol_time is not None: body['credentialEolTime'] = self.credential_eol_time
+        if self.owner_id is not None: body['ownerId'] = self.owner_id
+        if self.scopes: body['scopes'] = self.scopes
+        if self.token_type is not None: body['tokenType'] = self.token_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExchangeToken:
         """Deserializes the ExchangeToken from a dictionary."""
@@ -1157,6 +1994,14 @@ def as_dict(self) -> dict:
         if self.token_type: body['tokenType'] = [v.value for v in self.token_type]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExchangeTokenRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.partition_id: body['partitionId'] = self.partition_id
+        if self.scopes: body['scopes'] = self.scopes
+        if self.token_type: body['tokenType'] = self.token_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExchangeTokenRequest:
         """Deserializes the ExchangeTokenRequest from a dictionary."""
@@ -1177,6 +2022,12 @@ def as_dict(self) -> dict:
         if self.values: body['values'] = [v.as_dict() for v in self.values]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExchangeTokenResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.values: body['values'] = self.values
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExchangeTokenResponse:
         """Deserializes the ExchangeTokenResponse from a dictionary."""
@@ -1196,6 +2047,12 @@ def as_dict(self) -> dict:
         if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the FetchIpAccessListResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> FetchIpAccessListResponse:
         """Deserializes the FetchIpAccessListResponse from a dictionary."""
@@ -1233,6 +2090,17 @@ def as_dict(self) -> dict:
         if self.username_set is not None: body['username_set'] = self.username_set
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GenericWebhookConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.password is not None: body['password'] = self.password
+        if self.password_set is not None: body['password_set'] = self.password_set
+        if self.url is not None: body['url'] = self.url
+        if self.url_set is not None: body['url_set'] = self.url_set
+        if self.username is not None: body['username'] = self.username
+        if self.username_set is not None: body['username_set'] = self.username_set
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GenericWebhookConfig:
         """Deserializes the GenericWebhookConfig from a dictionary."""
@@ -1255,6 +2123,12 @@ def as_dict(self) -> dict:
         if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetIpAccessListResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetIpAccessListResponse:
         """Deserializes the GetIpAccessListResponse from a dictionary."""
@@ -1273,6 +2147,12 @@ def as_dict(self) -> dict:
         if self.ip_access_lists: body['ip_access_lists'] = [v.as_dict() for v in self.ip_access_lists]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetIpAccessListsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ip_access_lists: body['ip_access_lists'] = self.ip_access_lists
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetIpAccessListsResponse:
         """Deserializes the GetIpAccessListsResponse from a dictionary."""
@@ -1290,6 +2170,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetTokenPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetTokenPermissionLevelsResponse:
         """Deserializes the GetTokenPermissionLevelsResponse from a dictionary."""
@@ -1308,6 +2194,12 @@ def as_dict(self) -> dict:
         if self.token_info: body['token_info'] = self.token_info.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetTokenResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.token_info: body['token_info'] = self.token_info
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetTokenResponse:
         """Deserializes the GetTokenResponse from a dictionary."""
@@ -1365,6 +2257,21 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the IpAccessListInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.address_count is not None: body['address_count'] = self.address_count
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.ip_addresses: body['ip_addresses'] = self.ip_addresses
+        if self.label is not None: body['label'] = self.label
+        if self.list_id is not None: body['list_id'] = self.list_id
+        if self.list_type is not None: body['list_type'] = self.list_type
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> IpAccessListInfo:
         """Deserializes the IpAccessListInfo from a dictionary."""
@@ -1392,6 +2299,12 @@ def as_dict(self) -> dict:
         if self.ip_access_lists: body['ip_access_lists'] = [v.as_dict() for v in self.ip_access_lists]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListIpAccessListResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.ip_access_lists: body['ip_access_lists'] = self.ip_access_lists
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListIpAccessListResponse:
         """Deserializes the ListIpAccessListResponse from a dictionary."""
@@ -1413,6 +2326,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListNccAzurePrivateEndpointRulesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.items: body['items'] = self.items
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListNccAzurePrivateEndpointRulesResponse:
         """Deserializes the ListNccAzurePrivateEndpointRulesResponse from a dictionary."""
@@ -1435,6 +2355,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListNetworkConnectivityConfigurationsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.items: body['items'] = self.items
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListNetworkConnectivityConfigurationsResponse:
         """Deserializes the ListNetworkConnectivityConfigurationsResponse from a dictionary."""
@@ -1456,6 +2383,13 @@ def as_dict(self) -> dict:
         if self.results: body['results'] = [v.as_dict() for v in self.results]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListNotificationDestinationsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.results: body['results'] = self.results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListNotificationDestinationsResponse:
         """Deserializes the ListNotificationDestinationsResponse from a dictionary."""
@@ -1482,6 +2416,14 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListNotificationDestinationsResult into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.destination_type is not None: body['destination_type'] = self.destination_type
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListNotificationDestinationsResult:
         """Deserializes the ListNotificationDestinationsResult from a dictionary."""
@@ -1501,6 +2443,12 @@ def as_dict(self) -> dict:
         if self.token_infos: body['token_infos'] = [v.as_dict() for v in self.token_infos]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListPublicTokensResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.token_infos: body['token_infos'] = self.token_infos
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListPublicTokensResponse:
         """Deserializes the ListPublicTokensResponse from a dictionary."""
@@ -1520,6 +2468,12 @@ def as_dict(self) -> dict:
         if self.token_infos: body['token_infos'] = [v.as_dict() for v in self.token_infos]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListTokensResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.token_infos: body['token_infos'] = self.token_infos
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListTokensResponse:
         """Deserializes the ListTokensResponse from a dictionary."""
@@ -1551,6 +2505,13 @@ def as_dict(self) -> dict:
         if self.url_set is not None: body['url_set'] = self.url_set
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MicrosoftTeamsConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.url is not None: body['url'] = self.url
+        if self.url_set is not None: body['url_set'] = self.url_set
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MicrosoftTeamsConfig:
         """Deserializes the MicrosoftTeamsConfig from a dictionary."""
@@ -1572,6 +2533,12 @@ def as_dict(self) -> dict:
         if self.cidr_blocks: body['cidr_blocks'] = [v for v in self.cidr_blocks]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NccAwsStableIpRule into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.cidr_blocks: body['cidr_blocks'] = self.cidr_blocks
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NccAwsStableIpRule:
         """Deserializes the NccAwsStableIpRule from a dictionary."""
@@ -1621,15 +2588,31 @@ class NccAzurePrivateEndpointRule:
     updated_time: Optional[int] = None
     """Time in epoch milliseconds when this object was updated."""
 
-    def as_dict(self) -> dict:
-        """Serializes the NccAzurePrivateEndpointRule into a dictionary suitable for use as a JSON request body."""
+    def as_dict(self) -> dict:
+        """Serializes the NccAzurePrivateEndpointRule into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.connection_state is not None: body['connection_state'] = self.connection_state.value
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.deactivated is not None: body['deactivated'] = self.deactivated
+        if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at
+        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
+        if self.group_id is not None: body['group_id'] = self.group_id.value
+        if self.network_connectivity_config_id is not None:
+            body['network_connectivity_config_id'] = self.network_connectivity_config_id
+        if self.resource_id is not None: body['resource_id'] = self.resource_id
+        if self.rule_id is not None: body['rule_id'] = self.rule_id
+        if self.updated_time is not None: body['updated_time'] = self.updated_time
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NccAzurePrivateEndpointRule into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.connection_state is not None: body['connection_state'] = self.connection_state.value
+        if self.connection_state is not None: body['connection_state'] = self.connection_state
         if self.creation_time is not None: body['creation_time'] = self.creation_time
         if self.deactivated is not None: body['deactivated'] = self.deactivated
         if self.deactivated_at is not None: body['deactivated_at'] = self.deactivated_at
         if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
-        if self.group_id is not None: body['group_id'] = self.group_id.value
+        if self.group_id is not None: body['group_id'] = self.group_id
         if self.network_connectivity_config_id is not None:
             body['network_connectivity_config_id'] = self.network_connectivity_config_id
         if self.resource_id is not None: body['resource_id'] = self.resource_id
@@ -1704,6 +2687,14 @@ def as_dict(self) -> dict:
         if self.target_services: body['target_services'] = [v for v in self.target_services]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NccAzureServiceEndpointRule into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.subnets: body['subnets'] = self.subnets
+        if self.target_region is not None: body['target_region'] = self.target_region
+        if self.target_services: body['target_services'] = self.target_services
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NccAzureServiceEndpointRule:
         """Deserializes the NccAzureServiceEndpointRule from a dictionary."""
@@ -1733,6 +2724,13 @@ def as_dict(self) -> dict:
         if self.target_rules: body['target_rules'] = self.target_rules.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NccEgressConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.default_rules: body['default_rules'] = self.default_rules
+        if self.target_rules: body['target_rules'] = self.target_rules
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NccEgressConfig:
         """Deserializes the NccEgressConfig from a dictionary."""
@@ -1762,6 +2760,14 @@ def as_dict(self) -> dict:
             body['azure_service_endpoint_rule'] = self.azure_service_endpoint_rule.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NccEgressDefaultRules into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.aws_stable_ip_rule: body['aws_stable_ip_rule'] = self.aws_stable_ip_rule
+        if self.azure_service_endpoint_rule:
+            body['azure_service_endpoint_rule'] = self.azure_service_endpoint_rule
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NccEgressDefaultRules:
         """Deserializes the NccEgressDefaultRules from a dictionary."""
@@ -1784,6 +2790,13 @@ def as_dict(self) -> dict:
             body['azure_private_endpoint_rules'] = [v.as_dict() for v in self.azure_private_endpoint_rules]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NccEgressTargetRules into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.azure_private_endpoint_rules:
+            body['azure_private_endpoint_rules'] = self.azure_private_endpoint_rules
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NccEgressTargetRules:
         """Deserializes the NccEgressTargetRules from a dictionary."""
@@ -1831,6 +2844,19 @@ def as_dict(self) -> dict:
         if self.updated_time is not None: body['updated_time'] = self.updated_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NetworkConnectivityConfiguration into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.account_id is not None: body['account_id'] = self.account_id
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.egress_config: body['egress_config'] = self.egress_config
+        if self.name is not None: body['name'] = self.name
+        if self.network_connectivity_config_id is not None:
+            body['network_connectivity_config_id'] = self.network_connectivity_config_id
+        if self.region is not None: body['region'] = self.region
+        if self.updated_time is not None: body['updated_time'] = self.updated_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NetworkConnectivityConfiguration:
         """Deserializes the NetworkConnectivityConfiguration from a dictionary."""
@@ -1867,6 +2893,15 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NotificationDestination into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.config: body['config'] = self.config
+        if self.destination_type is not None: body['destination_type'] = self.destination_type
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NotificationDestination:
         """Deserializes the NotificationDestination from a dictionary."""
@@ -1891,6 +2926,13 @@ def as_dict(self) -> dict:
         if self.integration_key_set is not None: body['integration_key_set'] = self.integration_key_set
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PagerdutyConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.integration_key is not None: body['integration_key'] = self.integration_key
+        if self.integration_key_set is not None: body['integration_key_set'] = self.integration_key_set
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PagerdutyConfig:
         """Deserializes the PagerdutyConfig from a dictionary."""
@@ -1911,6 +2953,12 @@ def as_dict(self) -> dict:
         if self.workspace_id is not None: body['workspaceId'] = self.workspace_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PartitionId into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.workspace_id is not None: body['workspaceId'] = self.workspace_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PartitionId:
         """Deserializes the PartitionId from a dictionary."""
@@ -1932,6 +2980,12 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PersonalComputeMessage into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PersonalComputeMessage:
         """Deserializes the PersonalComputeMessage from a dictionary."""
@@ -1975,6 +3029,14 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PersonalComputeSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        if self.personal_compute: body['personal_compute'] = self.personal_compute
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PersonalComputeSetting:
         """Deserializes the PersonalComputeSetting from a dictionary."""
@@ -2006,6 +3068,15 @@ def as_dict(self) -> dict:
         if self.token_id is not None: body['token_id'] = self.token_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PublicTokenInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.expiry_time is not None: body['expiry_time'] = self.expiry_time
+        if self.token_id is not None: body['token_id'] = self.token_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PublicTokenInfo:
         """Deserializes the PublicTokenInfo from a dictionary."""
@@ -2046,6 +3117,16 @@ def as_dict(self) -> dict:
         if self.list_type is not None: body['list_type'] = self.list_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ReplaceIpAccessList into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.ip_access_list_id is not None: body['ip_access_list_id'] = self.ip_access_list_id
+        if self.ip_addresses: body['ip_addresses'] = self.ip_addresses
+        if self.label is not None: body['label'] = self.label
+        if self.list_type is not None: body['list_type'] = self.list_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ReplaceIpAccessList:
         """Deserializes the ReplaceIpAccessList from a dictionary."""
@@ -2064,6 +3145,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ReplaceResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ReplaceResponse:
         """Deserializes the ReplaceResponse from a dictionary."""
@@ -2080,6 +3166,12 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestrictWorkspaceAdminsMessage into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestrictWorkspaceAdminsMessage:
         """Deserializes the RestrictWorkspaceAdminsMessage from a dictionary."""
@@ -2119,6 +3211,14 @@ def as_dict(self) -> dict:
         if self.setting_name is not None: body['setting_name'] = self.setting_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestrictWorkspaceAdminsSetting into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.etag is not None: body['etag'] = self.etag
+        if self.restrict_workspace_admins: body['restrict_workspace_admins'] = self.restrict_workspace_admins
+        if self.setting_name is not None: body['setting_name'] = self.setting_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestrictWorkspaceAdminsSetting:
         """Deserializes the RestrictWorkspaceAdminsSetting from a dictionary."""
@@ -2139,6 +3239,12 @@ def as_dict(self) -> dict:
         if self.token_id is not None: body['token_id'] = self.token_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RevokeTokenRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.token_id is not None: body['token_id'] = self.token_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RevokeTokenRequest:
         """Deserializes the RevokeTokenRequest from a dictionary."""
@@ -2153,6 +3259,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RevokeTokenResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RevokeTokenResponse:
         """Deserializes the RevokeTokenResponse from a dictionary."""
@@ -2167,6 +3278,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetStatusResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetStatusResponse:
         """Deserializes the SetStatusResponse from a dictionary."""
@@ -2188,6 +3304,13 @@ def as_dict(self) -> dict:
         if self.url_set is not None: body['url_set'] = self.url_set
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SlackConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.url is not None: body['url'] = self.url
+        if self.url_set is not None: body['url_set'] = self.url_set
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SlackConfig:
         """Deserializes the SlackConfig from a dictionary."""
@@ -2205,6 +3328,12 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StringMessage into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StringMessage:
         """Deserializes the StringMessage from a dictionary."""
@@ -2235,6 +3364,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenAccessControlRequest:
         """Deserializes the TokenAccessControlRequest from a dictionary."""
@@ -2272,6 +3411,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenAccessControlResponse:
         """Deserializes the TokenAccessControlResponse from a dictionary."""
@@ -2299,6 +3449,9 @@ class TokenInfo:
     expiry_time: Optional[int] = None
     """Timestamp when the token expires."""
 
+    last_used_day: Optional[int] = None
+    """Approximate timestamp for the day the token was last used. Accurate up to 1 day."""
+
     owner_id: Optional[int] = None
     """User ID of the user that owns the token."""
 
@@ -2316,6 +3469,21 @@ def as_dict(self) -> dict:
         if self.created_by_username is not None: body['created_by_username'] = self.created_by_username
         if self.creation_time is not None: body['creation_time'] = self.creation_time
         if self.expiry_time is not None: body['expiry_time'] = self.expiry_time
+        if self.last_used_day is not None: body['last_used_day'] = self.last_used_day
+        if self.owner_id is not None: body['owner_id'] = self.owner_id
+        if self.token_id is not None: body['token_id'] = self.token_id
+        if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_by_id is not None: body['created_by_id'] = self.created_by_id
+        if self.created_by_username is not None: body['created_by_username'] = self.created_by_username
+        if self.creation_time is not None: body['creation_time'] = self.creation_time
+        if self.expiry_time is not None: body['expiry_time'] = self.expiry_time
+        if self.last_used_day is not None: body['last_used_day'] = self.last_used_day
         if self.owner_id is not None: body['owner_id'] = self.owner_id
         if self.token_id is not None: body['token_id'] = self.token_id
         if self.workspace_id is not None: body['workspace_id'] = self.workspace_id
@@ -2329,6 +3497,7 @@ def from_dict(cls, d: Dict[str, any]) -> TokenInfo:
                    created_by_username=d.get('created_by_username', None),
                    creation_time=d.get('creation_time', None),
                    expiry_time=d.get('expiry_time', None),
+                   last_used_day=d.get('last_used_day', None),
                    owner_id=d.get('owner_id', None),
                    token_id=d.get('token_id', None),
                    workspace_id=d.get('workspace_id', None))
@@ -2351,6 +3520,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenPermission:
         """Deserializes the TokenPermission from a dictionary."""
@@ -2382,6 +3559,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenPermissions:
         """Deserializes the TokenPermissions from a dictionary."""
@@ -2404,6 +3589,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenPermissionsDescription:
         """Deserializes the TokenPermissionsDescription from a dictionary."""
@@ -2422,6 +3614,12 @@ def as_dict(self) -> dict:
             body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TokenPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TokenPermissionsRequest:
         """Deserializes the TokenPermissionsRequest from a dictionary."""
@@ -2432,9 +3630,142 @@ class TokenType(Enum):
     """The type of token request. As of now, only `AZURE_ACTIVE_DIRECTORY_TOKEN` is supported."""
 
     ARCLIGHT_AZURE_EXCHANGE_TOKEN = 'ARCLIGHT_AZURE_EXCHANGE_TOKEN'
+    ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY = 'ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY'
     AZURE_ACTIVE_DIRECTORY_TOKEN = 'AZURE_ACTIVE_DIRECTORY_TOKEN'
 
 
+@dataclass
+class UpdateAccountIpAccessEnableRequest:
+    """Details required to update a setting."""
+
+    allow_missing: bool
+    """This should always be set to true for Settings API. Added for AIP compliance."""
+
+    setting: AccountIpAccessEnable
+
+    field_mask: str
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateAccountIpAccessEnableRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateAccountIpAccessEnableRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateAccountIpAccessEnableRequest:
+        """Deserializes the UpdateAccountIpAccessEnableRequest from a dictionary."""
+        return cls(allow_missing=d.get('allow_missing', None),
+                   field_mask=d.get('field_mask', None),
+                   setting=_from_dict(d, 'setting', AccountIpAccessEnable))
+
+
+@dataclass
+class UpdateAibiDashboardEmbeddingAccessPolicySettingRequest:
+    """Details required to update a setting."""
+
+    allow_missing: bool
+    """This should always be set to true for Settings API. Added for AIP compliance."""
+
+    setting: AibiDashboardEmbeddingAccessPolicySetting
+
+    field_mask: str
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateAibiDashboardEmbeddingAccessPolicySettingRequest:
+        """Deserializes the UpdateAibiDashboardEmbeddingAccessPolicySettingRequest from a dictionary."""
+        return cls(allow_missing=d.get('allow_missing', None),
+                   field_mask=d.get('field_mask', None),
+                   setting=_from_dict(d, 'setting', AibiDashboardEmbeddingAccessPolicySetting))
+
+
+@dataclass
+class UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest:
+    """Details required to update a setting."""
+
+    allow_missing: bool
+    """This should always be set to true for Settings API. Added for AIP compliance."""
+
+    setting: AibiDashboardEmbeddingApprovedDomainsSetting
+
+    field_mask: str
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
+
+    def as_dict(self) -> dict:
+        """Serializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting.as_dict()
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest:
+        """Deserializes the UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest from a dictionary."""
+        return cls(allow_missing=d.get('allow_missing', None),
+                   field_mask=d.get('field_mask', None),
+                   setting=_from_dict(d, 'setting', AibiDashboardEmbeddingApprovedDomainsSetting))
+
+
 @dataclass
 class UpdateAutomaticClusterUpdateSettingRequest:
     """Details required to update a setting."""
@@ -2445,9 +3776,15 @@ class UpdateAutomaticClusterUpdateSettingRequest:
     setting: AutomaticClusterUpdateSetting
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateAutomaticClusterUpdateSettingRequest into a dictionary suitable for use as a JSON request body."""
@@ -2457,6 +3794,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateAutomaticClusterUpdateSettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateAutomaticClusterUpdateSettingRequest:
         """Deserializes the UpdateAutomaticClusterUpdateSettingRequest from a dictionary."""
@@ -2475,9 +3820,15 @@ class UpdateComplianceSecurityProfileSettingRequest:
     setting: ComplianceSecurityProfileSetting
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateComplianceSecurityProfileSettingRequest into a dictionary suitable for use as a JSON request body."""
@@ -2487,6 +3838,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateComplianceSecurityProfileSettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateComplianceSecurityProfileSettingRequest:
         """Deserializes the UpdateComplianceSecurityProfileSettingRequest from a dictionary."""
@@ -2505,9 +3864,15 @@ class UpdateCspEnablementAccountSettingRequest:
     setting: CspEnablementAccountSetting
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateCspEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body."""
@@ -2517,6 +3882,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCspEnablementAccountSettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCspEnablementAccountSettingRequest:
         """Deserializes the UpdateCspEnablementAccountSettingRequest from a dictionary."""
@@ -2542,9 +3915,15 @@ class UpdateDefaultNamespaceSettingRequest:
     applies when using Unity Catalog-enabled compute."""
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateDefaultNamespaceSettingRequest into a dictionary suitable for use as a JSON request body."""
@@ -2554,6 +3933,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateDefaultNamespaceSettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateDefaultNamespaceSettingRequest:
         """Deserializes the UpdateDefaultNamespaceSettingRequest from a dictionary."""
@@ -2572,9 +3959,15 @@ class UpdateDisableLegacyAccessRequest:
     setting: DisableLegacyAccess
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateDisableLegacyAccessRequest into a dictionary suitable for use as a JSON request body."""
@@ -2584,6 +3977,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateDisableLegacyAccessRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyAccessRequest:
         """Deserializes the UpdateDisableLegacyAccessRequest from a dictionary."""
@@ -2602,9 +4003,15 @@ class UpdateDisableLegacyDbfsRequest:
     setting: DisableLegacyDbfs
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateDisableLegacyDbfsRequest into a dictionary suitable for use as a JSON request body."""
@@ -2614,6 +4021,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateDisableLegacyDbfsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyDbfsRequest:
         """Deserializes the UpdateDisableLegacyDbfsRequest from a dictionary."""
@@ -2632,9 +4047,15 @@ class UpdateDisableLegacyFeaturesRequest:
     setting: DisableLegacyFeatures
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateDisableLegacyFeaturesRequest into a dictionary suitable for use as a JSON request body."""
@@ -2644,6 +4065,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateDisableLegacyFeaturesRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateDisableLegacyFeaturesRequest:
         """Deserializes the UpdateDisableLegacyFeaturesRequest from a dictionary."""
@@ -2662,9 +4091,15 @@ class UpdateEnhancedSecurityMonitoringSettingRequest:
     setting: EnhancedSecurityMonitoringSetting
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateEnhancedSecurityMonitoringSettingRequest into a dictionary suitable for use as a JSON request body."""
@@ -2674,6 +4109,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateEnhancedSecurityMonitoringSettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateEnhancedSecurityMonitoringSettingRequest:
         """Deserializes the UpdateEnhancedSecurityMonitoringSettingRequest from a dictionary."""
@@ -2692,9 +4135,15 @@ class UpdateEsmEnablementAccountSettingRequest:
     setting: EsmEnablementAccountSetting
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateEsmEnablementAccountSettingRequest into a dictionary suitable for use as a JSON request body."""
@@ -2704,6 +4153,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateEsmEnablementAccountSettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateEsmEnablementAccountSettingRequest:
         """Deserializes the UpdateEsmEnablementAccountSettingRequest from a dictionary."""
@@ -2743,6 +4200,16 @@ def as_dict(self) -> dict:
         if self.list_type is not None: body['list_type'] = self.list_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateIpAccessList into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.ip_access_list_id is not None: body['ip_access_list_id'] = self.ip_access_list_id
+        if self.ip_addresses: body['ip_addresses'] = self.ip_addresses
+        if self.label is not None: body['label'] = self.label
+        if self.list_type is not None: body['list_type'] = self.list_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateIpAccessList:
         """Deserializes the UpdateIpAccessList from a dictionary."""
@@ -2762,6 +4229,7 @@ class UpdateNotificationDestinationRequest:
     """The display name for the notification destination."""
 
     id: Optional[str] = None
+    """UUID identifying notification destination."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateNotificationDestinationRequest into a dictionary suitable for use as a JSON request body."""
@@ -2771,6 +4239,14 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateNotificationDestinationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.config: body['config'] = self.config
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateNotificationDestinationRequest:
         """Deserializes the UpdateNotificationDestinationRequest from a dictionary."""
@@ -2789,9 +4265,15 @@ class UpdatePersonalComputeSettingRequest:
     setting: PersonalComputeSetting
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdatePersonalComputeSettingRequest into a dictionary suitable for use as a JSON request body."""
@@ -2801,6 +4283,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdatePersonalComputeSettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdatePersonalComputeSettingRequest:
         """Deserializes the UpdatePersonalComputeSettingRequest from a dictionary."""
@@ -2817,6 +4307,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
         """Deserializes the UpdateResponse from a dictionary."""
@@ -2833,9 +4328,15 @@ class UpdateRestrictWorkspaceAdminsSettingRequest:
     setting: RestrictWorkspaceAdminsSetting
 
     field_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateRestrictWorkspaceAdminsSettingRequest into a dictionary suitable for use as a JSON request body."""
@@ -2845,6 +4346,14 @@ def as_dict(self) -> dict:
         if self.setting: body['setting'] = self.setting.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateRestrictWorkspaceAdminsSettingRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_missing is not None: body['allow_missing'] = self.allow_missing
+        if self.field_mask is not None: body['field_mask'] = self.field_mask
+        if self.setting: body['setting'] = self.setting
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRestrictWorkspaceAdminsSettingRequest:
         """Deserializes the UpdateRestrictWorkspaceAdminsSettingRequest from a dictionary."""
@@ -3079,6 +4588,7 @@ def __init__(self, api_client):
 
         self._csp_enablement_account = CspEnablementAccountAPI(self._api)
         self._disable_legacy_features = DisableLegacyFeaturesAPI(self._api)
+        self._enable_ip_access_lists = EnableIpAccessListsAPI(self._api)
         self._esm_enablement_account = EsmEnablementAccountAPI(self._api)
         self._personal_compute = PersonalComputeAPI(self._api)
 
@@ -3092,6 +4602,11 @@ def disable_legacy_features(self) -> DisableLegacyFeaturesAPI:
         """Disable legacy features for new Databricks workspaces."""
         return self._disable_legacy_features
 
+    @property
+    def enable_ip_access_lists(self) -> EnableIpAccessListsAPI:
+        """Controls the enforcement of IP access lists for accessing the account console."""
+        return self._enable_ip_access_lists
+
     @property
     def esm_enablement_account(self) -> EsmEnablementAccountAPI:
         """The enhanced security monitoring setting at the account level controls whether to enable the feature on new workspaces."""
@@ -3103,6 +4618,197 @@ def personal_compute(self) -> PersonalComputeAPI:
         return self._personal_compute
 
 
+class AibiDashboardEmbeddingAccessPolicyAPI:
+    """Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the
+    workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS)."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def delete(self,
+               *,
+               etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse:
+        """Delete the AI/BI dashboard embedding access policy.
+        
+        Delete the AI/BI dashboard embedding access policy, reverting back to the default.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteAibiDashboardEmbeddingAccessPolicySettingResponse`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('DELETE',
+                           '/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default',
+                           query=query,
+                           headers=headers)
+        return DeleteAibiDashboardEmbeddingAccessPolicySettingResponse.from_dict(res)
+
+    def get(self, *, etag: Optional[str] = None) -> AibiDashboardEmbeddingAccessPolicySetting:
+        """Retrieve the AI/BI dashboard embedding access policy.
+        
+        Retrieves the AI/BI dashboard embedding access policy. The default setting is ALLOW_APPROVED_DOMAINS,
+        permitting AI/BI dashboards to be embedded on approved domains.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET',
+                           '/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default',
+                           query=query,
+                           headers=headers)
+        return AibiDashboardEmbeddingAccessPolicySetting.from_dict(res)
+
+    def update(self, allow_missing: bool, setting: AibiDashboardEmbeddingAccessPolicySetting,
+               field_mask: str) -> AibiDashboardEmbeddingAccessPolicySetting:
+        """Update the AI/BI dashboard embedding access policy.
+        
+        Updates the AI/BI dashboard embedding access policy at the workspace level.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`AibiDashboardEmbeddingAccessPolicySetting`
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
+        """
+        body = {}
+        if allow_missing is not None: body['allow_missing'] = allow_missing
+        if field_mask is not None: body['field_mask'] = field_mask
+        if setting is not None: body['setting'] = setting.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH',
+                           '/api/2.0/settings/types/aibi_dash_embed_ws_acc_policy/names/default',
+                           body=body,
+                           headers=headers)
+        return AibiDashboardEmbeddingAccessPolicySetting.from_dict(res)
+
+
+class AibiDashboardEmbeddingApprovedDomainsAPI:
+    """Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains list
+    can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def delete(self,
+               *,
+               etag: Optional[str] = None) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse:
+        """Delete AI/BI dashboard embedding approved domains.
+        
+        Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the default
+        empty list.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('DELETE',
+                           '/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default',
+                           query=query,
+                           headers=headers)
+        return DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse.from_dict(res)
+
+    def get(self, *, etag: Optional[str] = None) -> AibiDashboardEmbeddingApprovedDomainsSetting:
+        """Retrieve the list of domains approved to host embedded AI/BI dashboards.
+        
+        Retrieves the list of domains approved to host embedded AI/BI dashboards.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET',
+                           '/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default',
+                           query=query,
+                           headers=headers)
+        return AibiDashboardEmbeddingApprovedDomainsSetting.from_dict(res)
+
+    def update(self, allow_missing: bool, setting: AibiDashboardEmbeddingApprovedDomainsSetting,
+               field_mask: str) -> AibiDashboardEmbeddingApprovedDomainsSetting:
+        """Update the list of domains approved to host embedded AI/BI dashboards.
+        
+        Updates the list of domains approved to host embedded AI/BI dashboards. This update will fail if the
+        current workspace access policy is not ALLOW_APPROVED_DOMAINS.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
+        """
+        body = {}
+        if allow_missing is not None: body['allow_missing'] = allow_missing
+        if field_mask is not None: body['field_mask'] = field_mask
+        if setting is not None: body['setting'] = setting.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do('PATCH',
+                           '/api/2.0/settings/types/aibi_dash_embed_ws_apprvd_domains/names/default',
+                           body=body,
+                           headers=headers)
+        return AibiDashboardEmbeddingApprovedDomainsSetting.from_dict(res)
+
+
 class AutomaticClusterUpdateAPI:
     """Controls whether automatic cluster update is enabled for the current workspace. By default, it is turned
     off."""
@@ -3148,9 +4854,15 @@ def update(self, allow_missing: bool, setting: AutomaticClusterUpdateSetting,
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`AutomaticClusterUpdateSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`AutomaticClusterUpdateSetting`
         """
@@ -3214,9 +4926,15 @@ def update(self, allow_missing: bool, setting: ComplianceSecurityProfileSetting,
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`ComplianceSecurityProfileSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`ComplianceSecurityProfileSetting`
         """
@@ -3316,9 +5034,15 @@ def update(self, allow_missing: bool, setting: CspEnablementAccountSetting,
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`CspEnablementAccountSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`CspEnablementAccountSetting`
         """
@@ -3426,9 +5150,15 @@ def update(self, allow_missing: bool, setting: DefaultNamespaceSetting,
           restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only
           applies when using Unity Catalog-enabled compute.
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`DefaultNamespaceSetting`
         """
@@ -3516,9 +5246,15 @@ def update(self, allow_missing: bool, setting: DisableLegacyAccess,
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`DisableLegacyAccess`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`DisableLegacyAccess`
         """
@@ -3601,9 +5337,15 @@ def update(self, allow_missing: bool, setting: DisableLegacyDbfs, field_mask: st
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`DisableLegacyDbfs`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`DisableLegacyDbfs`
         """
@@ -3692,9 +5434,15 @@ def update(self, allow_missing: bool, setting: DisableLegacyFeatures,
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`DisableLegacyFeatures`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`DisableLegacyFeatures`
         """
@@ -3712,6 +5460,101 @@ def update(self, allow_missing: bool, setting: DisableLegacyFeatures,
         return DisableLegacyFeatures.from_dict(res)
 
 
+class EnableIpAccessListsAPI:
+    """Controls the enforcement of IP access lists for accessing the account console. Allowing you to enable or
+    disable restricted access based on IP addresses."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def delete(self, *, etag: Optional[str] = None) -> DeleteAccountIpAccessEnableResponse:
+        """Delete the account IP access toggle setting.
+        
+        Reverts the value of the account IP access toggle setting to default (ON)
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteAccountIpAccessEnableResponse`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'DELETE',
+            f'/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default',
+            query=query,
+            headers=headers)
+        return DeleteAccountIpAccessEnableResponse.from_dict(res)
+
+    def get(self, *, etag: Optional[str] = None) -> AccountIpAccessEnable:
+        """Get the account IP access toggle setting.
+        
+        Gets the value of the account IP access toggle setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`AccountIpAccessEnable`
+        """
+
+        query = {}
+        if etag is not None: query['etag'] = etag
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'GET',
+            f'/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default',
+            query=query,
+            headers=headers)
+        return AccountIpAccessEnable.from_dict(res)
+
+    def update(self, allow_missing: bool, setting: AccountIpAccessEnable,
+               field_mask: str) -> AccountIpAccessEnable:
+        """Update the account IP access toggle setting.
+        
+        Updates the value of the account IP access toggle setting.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`AccountIpAccessEnable`
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`AccountIpAccessEnable`
+        """
+        body = {}
+        if allow_missing is not None: body['allow_missing'] = allow_missing
+        if field_mask is not None: body['field_mask'] = field_mask
+        if setting is not None: body['setting'] = setting.as_dict()
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        res = self._api.do(
+            'PATCH',
+            f'/api/2.0/accounts/{self._api.account_id}/settings/types/acct_ip_acl_enable/names/default',
+            body=body,
+            headers=headers)
+        return AccountIpAccessEnable.from_dict(res)
+
+
 class EnhancedSecurityMonitoringAPI:
     """Controls whether enhanced security monitoring is enabled for the current workspace. If the compliance
     security profile is enabled, this is automatically enabled. By default, it is disabled. However, if the
@@ -3761,9 +5604,15 @@ def update(self, allow_missing: bool, setting: EnhancedSecurityMonitoringSetting
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`EnhancedSecurityMonitoringSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`EnhancedSecurityMonitoringSetting`
         """
@@ -3824,9 +5673,15 @@ def update(self, allow_missing: bool, setting: EsmEnablementAccountSetting,
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`EsmEnablementAccountSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`EsmEnablementAccountSetting`
         """
@@ -4365,6 +6220,7 @@ def update(self,
         required in the request body.
         
         :param id: str
+          UUID identifying notification destination.
         :param config: :class:`Config` (optional)
           The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.
         :param display_name: str (optional)
@@ -4455,9 +6311,15 @@ def update(self, allow_missing: bool, setting: PersonalComputeSetting,
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`PersonalComputeSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`PersonalComputeSetting`
         """
@@ -4555,9 +6417,15 @@ def update(self, allow_missing: bool, setting: RestrictWorkspaceAdminsSetting,
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`RestrictWorkspaceAdminsSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`RestrictWorkspaceAdminsSetting`
         """
@@ -4580,6 +6448,8 @@ class SettingsAPI:
     def __init__(self, api_client):
         self._api = api_client
 
+        self._aibi_dashboard_embedding_access_policy = AibiDashboardEmbeddingAccessPolicyAPI(self._api)
+        self._aibi_dashboard_embedding_approved_domains = AibiDashboardEmbeddingApprovedDomainsAPI(self._api)
         self._automatic_cluster_update = AutomaticClusterUpdateAPI(self._api)
         self._compliance_security_profile = ComplianceSecurityProfileAPI(self._api)
         self._default_namespace = DefaultNamespaceAPI(self._api)
@@ -4588,6 +6458,16 @@ def __init__(self, api_client):
         self._enhanced_security_monitoring = EnhancedSecurityMonitoringAPI(self._api)
         self._restrict_workspace_admins = RestrictWorkspaceAdminsAPI(self._api)
 
+    @property
+    def aibi_dashboard_embedding_access_policy(self) -> AibiDashboardEmbeddingAccessPolicyAPI:
+        """Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the workspace level."""
+        return self._aibi_dashboard_embedding_access_policy
+
+    @property
+    def aibi_dashboard_embedding_approved_domains(self) -> AibiDashboardEmbeddingApprovedDomainsAPI:
+        """Controls the list of domains approved to host the embedded AI/BI dashboards."""
+        return self._aibi_dashboard_embedding_approved_domains
+
     @property
     def automatic_cluster_update(self) -> AutomaticClusterUpdateAPI:
         """Controls whether automatic cluster update is enabled for the current workspace."""
@@ -4667,7 +6547,7 @@ def delete(self, token_id: str):
         Deletes a token, specified by its ID.
         
         :param token_id: str
-          The ID of the token to get.
+          The ID of the token to revoke.
         
         
         """
@@ -4751,7 +6631,8 @@ def set_permissions(
             access_control_list: Optional[List[TokenAccessControlRequest]] = None) -> TokenPermissions:
         """Set token permissions.
         
-        Sets permissions on all tokens. Tokens can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional)
         
diff --git a/databricks/sdk/service/sharing.py b/databricks/sdk/service/sharing.py
index 772bc7aee..2015f4ac5 100755
--- a/databricks/sdk/service/sharing.py
+++ b/databricks/sdk/service/sharing.py
@@ -23,418 +23,6 @@ class AuthenticationType(Enum):
     TOKEN = 'TOKEN'
 
 
-@dataclass
-class CentralCleanRoomInfo:
-    clean_room_assets: Optional[List[CleanRoomAssetInfo]] = None
-    """All assets from all collaborators that are available in the clean room. Only one of table_info
-    or notebook_info will be filled in."""
-
-    collaborators: Optional[List[CleanRoomCollaboratorInfo]] = None
-    """All collaborators who are in the clean room."""
-
-    creator: Optional[CleanRoomCollaboratorInfo] = None
-    """The collaborator who created the clean room."""
-
-    station_cloud: Optional[str] = None
-    """The cloud where clean room tasks will be run."""
-
-    station_region: Optional[str] = None
-    """The region where clean room tasks will be run."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CentralCleanRoomInfo into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.clean_room_assets: body['clean_room_assets'] = [v.as_dict() for v in self.clean_room_assets]
-        if self.collaborators: body['collaborators'] = [v.as_dict() for v in self.collaborators]
-        if self.creator: body['creator'] = self.creator.as_dict()
-        if self.station_cloud is not None: body['station_cloud'] = self.station_cloud
-        if self.station_region is not None: body['station_region'] = self.station_region
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CentralCleanRoomInfo:
-        """Deserializes the CentralCleanRoomInfo from a dictionary."""
-        return cls(clean_room_assets=_repeated_dict(d, 'clean_room_assets', CleanRoomAssetInfo),
-                   collaborators=_repeated_dict(d, 'collaborators', CleanRoomCollaboratorInfo),
-                   creator=_from_dict(d, 'creator', CleanRoomCollaboratorInfo),
-                   station_cloud=d.get('station_cloud', None),
-                   station_region=d.get('station_region', None))
-
-
-@dataclass
-class CleanRoomAssetInfo:
-    added_at: Optional[int] = None
-    """Time at which this asset was added, in epoch milliseconds."""
-
-    notebook_info: Optional[CleanRoomNotebookInfo] = None
-    """Details about the notebook asset."""
-
-    owner: Optional[CleanRoomCollaboratorInfo] = None
-    """The collaborator who owns the asset."""
-
-    table_info: Optional[CleanRoomTableInfo] = None
-    """Details about the table asset."""
-
-    updated_at: Optional[int] = None
-    """Time at which this asset was updated, in epoch milliseconds."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CleanRoomAssetInfo into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.added_at is not None: body['added_at'] = self.added_at
-        if self.notebook_info: body['notebook_info'] = self.notebook_info.as_dict()
-        if self.owner: body['owner'] = self.owner.as_dict()
-        if self.table_info: body['table_info'] = self.table_info.as_dict()
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CleanRoomAssetInfo:
-        """Deserializes the CleanRoomAssetInfo from a dictionary."""
-        return cls(added_at=d.get('added_at', None),
-                   notebook_info=_from_dict(d, 'notebook_info', CleanRoomNotebookInfo),
-                   owner=_from_dict(d, 'owner', CleanRoomCollaboratorInfo),
-                   table_info=_from_dict(d, 'table_info', CleanRoomTableInfo),
-                   updated_at=d.get('updated_at', None))
-
-
-@dataclass
-class CleanRoomCatalog:
-    catalog_name: Optional[str] = None
-    """Name of the catalog in the clean room station. Empty for notebooks."""
-
-    notebook_files: Optional[List[SharedDataObject]] = None
-    """The details of the shared notebook files."""
-
-    tables: Optional[List[SharedDataObject]] = None
-    """The details of the shared tables."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CleanRoomCatalog into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.notebook_files: body['notebook_files'] = [v.as_dict() for v in self.notebook_files]
-        if self.tables: body['tables'] = [v.as_dict() for v in self.tables]
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CleanRoomCatalog:
-        """Deserializes the CleanRoomCatalog from a dictionary."""
-        return cls(catalog_name=d.get('catalog_name', None),
-                   notebook_files=_repeated_dict(d, 'notebook_files', SharedDataObject),
-                   tables=_repeated_dict(d, 'tables', SharedDataObject))
-
-
-@dataclass
-class CleanRoomCatalogUpdate:
-    catalog_name: Optional[str] = None
-    """The name of the catalog to update assets."""
-
-    updates: Optional[SharedDataObjectUpdate] = None
-    """The updates to the assets in the catalog."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CleanRoomCatalogUpdate into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.updates: body['updates'] = self.updates.as_dict()
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CleanRoomCatalogUpdate:
-        """Deserializes the CleanRoomCatalogUpdate from a dictionary."""
-        return cls(catalog_name=d.get('catalog_name', None),
-                   updates=_from_dict(d, 'updates', SharedDataObjectUpdate))
-
-
-@dataclass
-class CleanRoomCollaboratorInfo:
-    global_metastore_id: Optional[str] = None
-    """The global Unity Catalog metastore id of the collaborator. Also known as the sharing identifier.
-    The identifier is of format __cloud__:__region__:__metastore-uuid__."""
-
-    organization_name: Optional[str] = None
-    """The organization name of the collaborator. This is configured in the metastore for Delta Sharing
-    and is used to identify the organization to other collaborators."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CleanRoomCollaboratorInfo into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
-        if self.organization_name is not None: body['organization_name'] = self.organization_name
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CleanRoomCollaboratorInfo:
-        """Deserializes the CleanRoomCollaboratorInfo from a dictionary."""
-        return cls(global_metastore_id=d.get('global_metastore_id', None),
-                   organization_name=d.get('organization_name', None))
-
-
-@dataclass
-class CleanRoomInfo:
-    comment: Optional[str] = None
-    """User-provided free-form text description."""
-
-    created_at: Optional[int] = None
-    """Time at which this clean room was created, in epoch milliseconds."""
-
-    created_by: Optional[str] = None
-    """Username of clean room creator."""
-
-    local_catalogs: Optional[List[CleanRoomCatalog]] = None
-    """Catalog aliases shared by the current collaborator with asset details."""
-
-    name: Optional[str] = None
-    """Name of the clean room."""
-
-    owner: Optional[str] = None
-    """Username of current owner of clean room."""
-
-    remote_detailed_info: Optional[CentralCleanRoomInfo] = None
-    """Central clean room details."""
-
-    updated_at: Optional[int] = None
-    """Time at which this clean room was updated, in epoch milliseconds."""
-
-    updated_by: Optional[str] = None
-    """Username of clean room updater."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CleanRoomInfo into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.created_by is not None: body['created_by'] = self.created_by
-        if self.local_catalogs: body['local_catalogs'] = [v.as_dict() for v in self.local_catalogs]
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
-        if self.remote_detailed_info: body['remote_detailed_info'] = self.remote_detailed_info.as_dict()
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.updated_by is not None: body['updated_by'] = self.updated_by
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CleanRoomInfo:
-        """Deserializes the CleanRoomInfo from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   created_at=d.get('created_at', None),
-                   created_by=d.get('created_by', None),
-                   local_catalogs=_repeated_dict(d, 'local_catalogs', CleanRoomCatalog),
-                   name=d.get('name', None),
-                   owner=d.get('owner', None),
-                   remote_detailed_info=_from_dict(d, 'remote_detailed_info', CentralCleanRoomInfo),
-                   updated_at=d.get('updated_at', None),
-                   updated_by=d.get('updated_by', None))
-
-
-@dataclass
-class CleanRoomNotebookInfo:
-    notebook_content: Optional[str] = None
-    """The base64 representation of the notebook content in HTML."""
-
-    notebook_name: Optional[str] = None
-    """The name of the notebook."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CleanRoomNotebookInfo into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.notebook_content is not None: body['notebook_content'] = self.notebook_content
-        if self.notebook_name is not None: body['notebook_name'] = self.notebook_name
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CleanRoomNotebookInfo:
-        """Deserializes the CleanRoomNotebookInfo from a dictionary."""
-        return cls(notebook_content=d.get('notebook_content', None),
-                   notebook_name=d.get('notebook_name', None))
-
-
-@dataclass
-class CleanRoomTableInfo:
-    catalog_name: Optional[str] = None
-    """Name of parent catalog."""
-
-    columns: Optional[List[ColumnInfo]] = None
-    """The array of __ColumnInfo__ definitions of the table's columns."""
-
-    full_name: Optional[str] = None
-    """Full name of table, in form of __catalog_name__.__schema_name__.__table_name__"""
-
-    name: Optional[str] = None
-    """Name of table, relative to parent schema."""
-
-    schema_name: Optional[str] = None
-    """Name of parent schema relative to its parent catalog."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CleanRoomTableInfo into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.catalog_name is not None: body['catalog_name'] = self.catalog_name
-        if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
-        if self.full_name is not None: body['full_name'] = self.full_name
-        if self.name is not None: body['name'] = self.name
-        if self.schema_name is not None: body['schema_name'] = self.schema_name
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CleanRoomTableInfo:
-        """Deserializes the CleanRoomTableInfo from a dictionary."""
-        return cls(catalog_name=d.get('catalog_name', None),
-                   columns=_repeated_dict(d, 'columns', ColumnInfo),
-                   full_name=d.get('full_name', None),
-                   name=d.get('name', None),
-                   schema_name=d.get('schema_name', None))
-
-
-@dataclass
-class ColumnInfo:
-    comment: Optional[str] = None
-    """User-provided free-form text description."""
-
-    mask: Optional[ColumnMask] = None
-
-    name: Optional[str] = None
-    """Name of Column."""
-
-    nullable: Optional[bool] = None
-    """Whether field may be Null (default: true)."""
-
-    partition_index: Optional[int] = None
-    """Partition index for column."""
-
-    position: Optional[int] = None
-    """Ordinal position of column (starting at position 0)."""
-
-    type_interval_type: Optional[str] = None
-    """Format of IntervalType."""
-
-    type_json: Optional[str] = None
-    """Full data type specification, JSON-serialized."""
-
-    type_name: Optional[ColumnTypeName] = None
-    """Name of type (INT, STRUCT, MAP, etc.)."""
-
-    type_precision: Optional[int] = None
-    """Digits of precision; required for DecimalTypes."""
-
-    type_scale: Optional[int] = None
-    """Digits to right of decimal; Required for DecimalTypes."""
-
-    type_text: Optional[str] = None
-    """Full data type specification as SQL/catalogString text."""
-
-    def as_dict(self) -> dict:
-        """Serializes the ColumnInfo into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.mask: body['mask'] = self.mask.as_dict()
-        if self.name is not None: body['name'] = self.name
-        if self.nullable is not None: body['nullable'] = self.nullable
-        if self.partition_index is not None: body['partition_index'] = self.partition_index
-        if self.position is not None: body['position'] = self.position
-        if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type
-        if self.type_json is not None: body['type_json'] = self.type_json
-        if self.type_name is not None: body['type_name'] = self.type_name.value
-        if self.type_precision is not None: body['type_precision'] = self.type_precision
-        if self.type_scale is not None: body['type_scale'] = self.type_scale
-        if self.type_text is not None: body['type_text'] = self.type_text
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ColumnInfo:
-        """Deserializes the ColumnInfo from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   mask=_from_dict(d, 'mask', ColumnMask),
-                   name=d.get('name', None),
-                   nullable=d.get('nullable', None),
-                   partition_index=d.get('partition_index', None),
-                   position=d.get('position', None),
-                   type_interval_type=d.get('type_interval_type', None),
-                   type_json=d.get('type_json', None),
-                   type_name=_enum(d, 'type_name', ColumnTypeName),
-                   type_precision=d.get('type_precision', None),
-                   type_scale=d.get('type_scale', None),
-                   type_text=d.get('type_text', None))
-
-
-@dataclass
-class ColumnMask:
-    function_name: Optional[str] = None
-    """The full name of the column mask SQL UDF."""
-
-    using_column_names: Optional[List[str]] = None
-    """The list of additional table columns to be passed as input to the column mask function. The
-    first arg of the mask function should be of the type of the column being masked and the types of
-    the rest of the args should match the types of columns in 'using_column_names'."""
-
-    def as_dict(self) -> dict:
-        """Serializes the ColumnMask into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.function_name is not None: body['function_name'] = self.function_name
-        if self.using_column_names: body['using_column_names'] = [v for v in self.using_column_names]
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ColumnMask:
-        """Deserializes the ColumnMask from a dictionary."""
-        return cls(function_name=d.get('function_name', None),
-                   using_column_names=d.get('using_column_names', None))
-
-
-class ColumnTypeName(Enum):
-    """Name of type (INT, STRUCT, MAP, etc.)."""
-
-    ARRAY = 'ARRAY'
-    BINARY = 'BINARY'
-    BOOLEAN = 'BOOLEAN'
-    BYTE = 'BYTE'
-    CHAR = 'CHAR'
-    DATE = 'DATE'
-    DECIMAL = 'DECIMAL'
-    DOUBLE = 'DOUBLE'
-    FLOAT = 'FLOAT'
-    INT = 'INT'
-    INTERVAL = 'INTERVAL'
-    LONG = 'LONG'
-    MAP = 'MAP'
-    NULL = 'NULL'
-    SHORT = 'SHORT'
-    STRING = 'STRING'
-    STRUCT = 'STRUCT'
-    TABLE_TYPE = 'TABLE_TYPE'
-    TIMESTAMP = 'TIMESTAMP'
-    TIMESTAMP_NTZ = 'TIMESTAMP_NTZ'
-    USER_DEFINED_TYPE = 'USER_DEFINED_TYPE'
-
-
-@dataclass
-class CreateCleanRoom:
-    name: str
-    """Name of the clean room."""
-
-    remote_detailed_info: CentralCleanRoomInfo
-    """Central clean room details."""
-
-    comment: Optional[str] = None
-    """User-provided free-form text description."""
-
-    def as_dict(self) -> dict:
-        """Serializes the CreateCleanRoom into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.remote_detailed_info: body['remote_detailed_info'] = self.remote_detailed_info.as_dict()
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> CreateCleanRoom:
-        """Deserializes the CreateCleanRoom from a dictionary."""
-        return cls(comment=d.get('comment', None),
-                   name=d.get('name', None),
-                   remote_detailed_info=_from_dict(d, 'remote_detailed_info', CentralCleanRoomInfo))
-
-
 @dataclass
 class CreateProvider:
     name: str
@@ -447,7 +35,8 @@ class CreateProvider:
     """Description about the provider."""
 
     recipient_profile_str: Optional[str] = None
-    """This field is required when the __authentication_type__ is **TOKEN** or not provided."""
+    """This field is required when the __authentication_type__ is **TOKEN**,
+    **OAUTH_CLIENT_CREDENTIALS** or not provided."""
 
     def as_dict(self) -> dict:
         """Serializes the CreateProvider into a dictionary suitable for use as a JSON request body."""
@@ -458,6 +47,15 @@ def as_dict(self) -> dict:
         if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateProvider into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.authentication_type is not None: body['authentication_type'] = self.authentication_type
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateProvider:
         """Deserializes the CreateProvider from a dictionary."""
@@ -479,7 +77,7 @@ class CreateRecipient:
     """Description about the recipient."""
 
     data_recipient_global_metastore_id: Optional[str] = None
-    """The global Unity Catalog metastore id provided by the data recipient. This field is required
+    """The global Unity Catalog metastore id provided by the data recipient. This field is only present
     when the __authentication_type__ is **DATABRICKS**. The identifier is of format
     __cloud__:__region__:__metastore-uuid__."""
 
@@ -493,10 +91,12 @@ class CreateRecipient:
     """Username of the recipient owner."""
 
     properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None
-    """Recipient properties as map of string key-value pairs."""
+    """Recipient properties as map of string key-value pairs. When provided in update request, the
+    specified properties will override the existing properties. To add and remove properties, one
+    would need to perform a read-modify-write."""
 
     sharing_code: Optional[str] = None
-    """The one-time sharing code provided by the data recipient. This field is required when the
+    """The one-time sharing code provided by the data recipient. This field is only present when the
     __authentication_type__ is **DATABRICKS**."""
 
     def as_dict(self) -> dict:
@@ -514,6 +114,21 @@ def as_dict(self) -> dict:
         if self.sharing_code is not None: body['sharing_code'] = self.sharing_code
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateRecipient into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.authentication_type is not None: body['authentication_type'] = self.authentication_type
+        if self.comment is not None: body['comment'] = self.comment
+        if self.data_recipient_global_metastore_id is not None:
+            body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs
+        if self.sharing_code is not None: body['sharing_code'] = self.sharing_code
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRecipient:
         """Deserializes the CreateRecipient from a dictionary."""
@@ -547,6 +162,14 @@ def as_dict(self) -> dict:
         if self.storage_root is not None: body['storage_root'] = self.storage_root
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateShare into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateShare:
         """Deserializes the CreateShare from a dictionary."""
@@ -563,6 +186,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -577,6 +205,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetActivationUrlInfoResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetActivationUrlInfoResponse:
         """Deserializes the GetActivationUrlInfoResponse from a dictionary."""
@@ -599,6 +232,13 @@ def as_dict(self) -> dict:
         if self.permissions_out: body['permissions_out'] = [v.as_dict() for v in self.permissions_out]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetRecipientSharePermissionsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.permissions_out: body['permissions_out'] = self.permissions_out
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetRecipientSharePermissionsResponse:
         """Deserializes the GetRecipientSharePermissionsResponse from a dictionary."""
@@ -617,33 +257,16 @@ def as_dict(self) -> dict:
         if self.allowed_ip_addresses: body['allowed_ip_addresses'] = [v for v in self.allowed_ip_addresses]
         return body
 
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> IpAccessList:
-        """Deserializes the IpAccessList from a dictionary."""
-        return cls(allowed_ip_addresses=d.get('allowed_ip_addresses', None))
-
-
-@dataclass
-class ListCleanRoomsResponse:
-    clean_rooms: Optional[List[CleanRoomInfo]] = None
-    """An array of clean rooms. Remote details (central) are not included."""
-
-    next_page_token: Optional[str] = None
-    """Opaque token to retrieve the next page of results. Absent if there are no more pages.
-    __page_token__ should be set to this value for the next request (for the next page of results)."""
-
-    def as_dict(self) -> dict:
-        """Serializes the ListCleanRoomsResponse into a dictionary suitable for use as a JSON request body."""
+    def as_shallow_dict(self) -> dict:
+        """Serializes the IpAccessList into a shallow dictionary of its immediate attributes."""
         body = {}
-        if self.clean_rooms: body['clean_rooms'] = [v.as_dict() for v in self.clean_rooms]
-        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.allowed_ip_addresses: body['allowed_ip_addresses'] = self.allowed_ip_addresses
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> ListCleanRoomsResponse:
-        """Deserializes the ListCleanRoomsResponse from a dictionary."""
-        return cls(clean_rooms=_repeated_dict(d, 'clean_rooms', CleanRoomInfo),
-                   next_page_token=d.get('next_page_token', None))
+    def from_dict(cls, d: Dict[str, any]) -> IpAccessList:
+        """Deserializes the IpAccessList from a dictionary."""
+        return cls(allowed_ip_addresses=d.get('allowed_ip_addresses', None))
 
 
 @dataclass
@@ -662,6 +285,13 @@ def as_dict(self) -> dict:
         if self.shares: body['shares'] = [v.as_dict() for v in self.shares]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListProviderSharesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.shares: body['shares'] = self.shares
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListProviderSharesResponse:
         """Deserializes the ListProviderSharesResponse from a dictionary."""
@@ -685,6 +315,13 @@ def as_dict(self) -> dict:
         if self.providers: body['providers'] = [v.as_dict() for v in self.providers]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListProvidersResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.providers: body['providers'] = self.providers
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListProvidersResponse:
         """Deserializes the ListProvidersResponse from a dictionary."""
@@ -708,6 +345,13 @@ def as_dict(self) -> dict:
         if self.recipients: body['recipients'] = [v.as_dict() for v in self.recipients]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListRecipientsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.recipients: body['recipients'] = self.recipients
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListRecipientsResponse:
         """Deserializes the ListRecipientsResponse from a dictionary."""
@@ -731,6 +375,13 @@ def as_dict(self) -> dict:
         if self.shares: body['shares'] = [v.as_dict() for v in self.shares]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListSharesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.shares: body['shares'] = self.shares
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSharesResponse:
         """Deserializes the ListSharesResponse from a dictionary."""
@@ -749,12 +400,41 @@ def as_dict(self) -> dict:
         if self.values: body['values'] = [v.as_dict() for v in self.values]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Partition into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.values: body['values'] = self.values
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Partition:
         """Deserializes the Partition from a dictionary."""
         return cls(values=_repeated_dict(d, 'values', PartitionValue))
 
 
+@dataclass
+class PartitionSpecificationPartition:
+    values: Optional[List[PartitionValue]] = None
+    """An array of partition values."""
+
+    def as_dict(self) -> dict:
+        """Serializes the PartitionSpecificationPartition into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.values: body['values'] = [v.as_dict() for v in self.values]
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PartitionSpecificationPartition into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.values: body['values'] = self.values
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> PartitionSpecificationPartition:
+        """Deserializes the PartitionSpecificationPartition from a dictionary."""
+        return cls(values=_repeated_dict(d, 'values', PartitionValue))
+
+
 @dataclass
 class PartitionValue:
     name: Optional[str] = None
@@ -764,7 +444,7 @@ class PartitionValue:
     """The operator to apply for the value."""
 
     recipient_property_key: Optional[str] = None
-    """The key of a Delta Sharing recipient's property. For example `databricks-account-id`. When this
+    """The key of a Delta Sharing recipient's property. For example "databricks-account-id". When this
     field is set, field `value` can not be set."""
 
     value: Optional[str] = None
@@ -781,6 +461,16 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PartitionValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.op is not None: body['op'] = self.op
+        if self.recipient_property_key is not None:
+            body['recipient_property_key'] = self.recipient_property_key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PartitionValue:
         """Deserializes the PartitionValue from a dictionary."""
@@ -791,7 +481,6 @@ def from_dict(cls, d: Dict[str, any]) -> PartitionValue:
 
 
 class PartitionValueOp(Enum):
-    """The operator to apply for the value."""
 
     EQUAL = 'EQUAL'
     LIKE = 'LIKE'
@@ -809,6 +498,7 @@ class Privilege(Enum):
     CREATE_EXTERNAL_TABLE = 'CREATE_EXTERNAL_TABLE'
     CREATE_EXTERNAL_VOLUME = 'CREATE_EXTERNAL_VOLUME'
     CREATE_FOREIGN_CATALOG = 'CREATE_FOREIGN_CATALOG'
+    CREATE_FOREIGN_SECURABLE = 'CREATE_FOREIGN_SECURABLE'
     CREATE_FUNCTION = 'CREATE_FUNCTION'
     CREATE_MANAGED_STORAGE = 'CREATE_MANAGED_STORAGE'
     CREATE_MATERIALIZED_VIEW = 'CREATE_MATERIALIZED_VIEW'
@@ -860,6 +550,13 @@ def as_dict(self) -> dict:
         if self.privileges: body['privileges'] = [v.value for v in self.privileges]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PrivilegeAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.principal is not None: body['principal'] = self.principal
+        if self.privileges: body['privileges'] = self.privileges
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PrivilegeAssignment:
         """Deserializes the PrivilegeAssignment from a dictionary."""
@@ -887,7 +584,7 @@ class ProviderInfo:
     data_provider_global_metastore_id: Optional[str] = None
     """The global UC metastore id of the data provider. This field is only present when the
     __authentication_type__ is **DATABRICKS**. The identifier is of format
-    ::."""
+    __cloud__:__region__:__metastore-uuid__."""
 
     metastore_id: Optional[str] = None
     """UUID of the provider's UC metastore. This field is only present when the __authentication_type__
@@ -900,10 +597,12 @@ class ProviderInfo:
     """Username of Provider owner."""
 
     recipient_profile: Optional[RecipientProfile] = None
-    """The recipient profile. This field is only present when the authentication_type is `TOKEN`."""
+    """The recipient profile. This field is only present when the authentication_type is `TOKEN` or
+    `OAUTH_CLIENT_CREDENTIALS`."""
 
     recipient_profile_str: Optional[str] = None
-    """This field is only present when the authentication_type is `TOKEN` or not provided."""
+    """This field is required when the __authentication_type__ is **TOKEN**,
+    **OAUTH_CLIENT_CREDENTIALS** or not provided."""
 
     region: Optional[str] = None
     """Cloud region of the provider's UC metastore. This field is only present when the
@@ -913,7 +612,7 @@ class ProviderInfo:
     """Time at which this Provider was created, in epoch milliseconds."""
 
     updated_by: Optional[str] = None
-    """Username of user who last modified Share."""
+    """Username of user who last modified Provider."""
 
     def as_dict(self) -> dict:
         """Serializes the ProviderInfo into a dictionary suitable for use as a JSON request body."""
@@ -935,6 +634,26 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ProviderInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.authentication_type is not None: body['authentication_type'] = self.authentication_type
+        if self.cloud is not None: body['cloud'] = self.cloud
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.data_provider_global_metastore_id is not None:
+            body['data_provider_global_metastore_id'] = self.data_provider_global_metastore_id
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.recipient_profile: body['recipient_profile'] = self.recipient_profile
+        if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str
+        if self.region is not None: body['region'] = self.region
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ProviderInfo:
         """Deserializes the ProviderInfo from a dictionary."""
@@ -965,6 +684,12 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ProviderShare into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ProviderShare:
         """Deserializes the ProviderShare from a dictionary."""
@@ -984,8 +709,8 @@ class RecipientInfo:
     """The delta sharing authentication type."""
 
     cloud: Optional[str] = None
-    """Cloud vendor of the recipient's Unity Catalog Metstore. This field is only present when the
-    __authentication_type__ is **DATABRICKS**`."""
+    """Cloud vendor of the recipient's Unity Catalog Metastore. This field is only present when the
+    __authentication_type__ is **DATABRICKS**."""
 
     comment: Optional[str] = None
     """Description about the recipient."""
@@ -1001,12 +726,15 @@ class RecipientInfo:
     when the __authentication_type__ is **DATABRICKS**. The identifier is of format
     __cloud__:__region__:__metastore-uuid__."""
 
+    expiration_time: Optional[int] = None
+    """Expiration timestamp of the token, in epoch milliseconds."""
+
     ip_access_list: Optional[IpAccessList] = None
     """IP Access List"""
 
     metastore_id: Optional[str] = None
-    """Unique identifier of recipient's Unity Catalog metastore. This field is only present when the
-    __authentication_type__ is **DATABRICKS**"""
+    """Unique identifier of recipient's Unity Catalog Metastore. This field is only present when the
+    __authentication_type__ is **DATABRICKS**."""
 
     name: Optional[str] = None
     """Name of Recipient."""
@@ -1015,10 +743,12 @@ class RecipientInfo:
     """Username of the recipient owner."""
 
     properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None
-    """Recipient properties as map of string key-value pairs."""
+    """Recipient properties as map of string key-value pairs. When provided in update request, the
+    specified properties will override the existing properties. To add and remove properties, one
+    would need to perform a read-modify-write."""
 
     region: Optional[str] = None
-    """Cloud region of the recipient's Unity Catalog Metstore. This field is only present when the
+    """Cloud region of the recipient's Unity Catalog Metastore. This field is only present when the
     __authentication_type__ is **DATABRICKS**."""
 
     sharing_code: Optional[str] = None
@@ -1046,6 +776,7 @@ def as_dict(self) -> dict:
         if self.created_by is not None: body['created_by'] = self.created_by
         if self.data_recipient_global_metastore_id is not None:
             body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
         if self.ip_access_list: body['ip_access_list'] = self.ip_access_list.as_dict()
         if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
         if self.name is not None: body['name'] = self.name
@@ -1058,6 +789,31 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RecipientInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.activated is not None: body['activated'] = self.activated
+        if self.activation_url is not None: body['activation_url'] = self.activation_url
+        if self.authentication_type is not None: body['authentication_type'] = self.authentication_type
+        if self.cloud is not None: body['cloud'] = self.cloud
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.data_recipient_global_metastore_id is not None:
+            body['data_recipient_global_metastore_id'] = self.data_recipient_global_metastore_id
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list
+        if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
+        if self.name is not None: body['name'] = self.name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs
+        if self.region is not None: body['region'] = self.region
+        if self.sharing_code is not None: body['sharing_code'] = self.sharing_code
+        if self.tokens: body['tokens'] = self.tokens
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RecipientInfo:
         """Deserializes the RecipientInfo from a dictionary."""
@@ -1069,6 +825,7 @@ def from_dict(cls, d: Dict[str, any]) -> RecipientInfo:
                    created_at=d.get('created_at', None),
                    created_by=d.get('created_by', None),
                    data_recipient_global_metastore_id=d.get('data_recipient_global_metastore_id', None),
+                   expiration_time=d.get('expiration_time', None),
                    ip_access_list=_from_dict(d, 'ip_access_list', IpAccessList),
                    metastore_id=d.get('metastore_id', None),
                    name=d.get('name', None),
@@ -1101,6 +858,15 @@ def as_dict(self) -> dict:
             body['share_credentials_version'] = self.share_credentials_version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RecipientProfile into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.bearer_token is not None: body['bearer_token'] = self.bearer_token
+        if self.endpoint is not None: body['endpoint'] = self.endpoint
+        if self.share_credentials_version is not None:
+            body['share_credentials_version'] = self.share_credentials_version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RecipientProfile:
         """Deserializes the RecipientProfile from a dictionary."""
@@ -1116,7 +882,7 @@ class RecipientTokenInfo:
     retrieved."""
 
     created_at: Optional[int] = None
-    """Time at which this recipient Token was created, in epoch milliseconds."""
+    """Time at which this recipient token was created, in epoch milliseconds."""
 
     created_by: Optional[str] = None
     """Username of recipient token creator."""
@@ -1128,10 +894,10 @@ class RecipientTokenInfo:
     """Unique ID of the recipient token."""
 
     updated_at: Optional[int] = None
-    """Time at which this recipient Token was updated, in epoch milliseconds."""
+    """Time at which this recipient token was updated, in epoch milliseconds."""
 
     updated_by: Optional[str] = None
-    """Username of recipient Token updater."""
+    """Username of recipient token updater."""
 
     def as_dict(self) -> dict:
         """Serializes the RecipientTokenInfo into a dictionary suitable for use as a JSON request body."""
@@ -1145,6 +911,18 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RecipientTokenInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.activation_url is not None: body['activation_url'] = self.activation_url
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.id is not None: body['id'] = self.id
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RecipientTokenInfo:
         """Deserializes the RecipientTokenInfo from a dictionary."""
@@ -1181,6 +959,16 @@ def as_dict(self) -> dict:
             body['shareCredentialsVersion'] = self.share_credentials_version
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RetrieveTokenResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.bearer_token is not None: body['bearerToken'] = self.bearer_token
+        if self.endpoint is not None: body['endpoint'] = self.endpoint
+        if self.expiration_time is not None: body['expirationTime'] = self.expiration_time
+        if self.share_credentials_version is not None:
+            body['shareCredentialsVersion'] = self.share_credentials_version
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RetrieveTokenResponse:
         """Deserializes the RetrieveTokenResponse from a dictionary."""
@@ -1198,7 +986,7 @@ class RotateRecipientToken:
     expire the existing token immediately, negative number will return an error."""
 
     name: Optional[str] = None
-    """The name of the recipient."""
+    """The name of the Recipient."""
 
     def as_dict(self) -> dict:
         """Serializes the RotateRecipientToken into a dictionary suitable for use as a JSON request body."""
@@ -1208,6 +996,14 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RotateRecipientToken into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.existing_token_expire_in_seconds is not None:
+            body['existing_token_expire_in_seconds'] = self.existing_token_expire_in_seconds
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RotateRecipientToken:
         """Deserializes the RotateRecipientToken from a dictionary."""
@@ -1228,15 +1024,18 @@ def as_dict(self) -> dict:
         if self.properties: body['properties'] = self.properties
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SecurablePropertiesKvPairs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.properties: body['properties'] = self.properties
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SecurablePropertiesKvPairs:
         """Deserializes the SecurablePropertiesKvPairs from a dictionary."""
         return cls(properties=d.get('properties', None))
 
 
-SecurablePropertiesMap = Dict[str, str]
-
-
 @dataclass
 class ShareInfo:
     comment: Optional[str] = None
@@ -1284,6 +1083,21 @@ def as_dict(self) -> dict:
         if self.updated_by is not None: body['updated_by'] = self.updated_by
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ShareInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.created_by is not None: body['created_by'] = self.created_by
+        if self.name is not None: body['name'] = self.name
+        if self.objects: body['objects'] = self.objects
+        if self.owner is not None: body['owner'] = self.owner
+        if self.storage_location is not None: body['storage_location'] = self.storage_location
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.updated_by is not None: body['updated_by'] = self.updated_by
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ShareInfo:
         """Deserializes the ShareInfo from a dictionary."""
@@ -1315,6 +1129,13 @@ def as_dict(self) -> dict:
         if self.share_name is not None: body['share_name'] = self.share_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ShareToPrivilegeAssignment into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.privilege_assignments: body['privilege_assignments'] = self.privilege_assignments
+        if self.share_name is not None: body['share_name'] = self.share_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ShareToPrivilegeAssignment:
         """Deserializes the ShareToPrivilegeAssignment from a dictionary."""
@@ -1397,6 +1218,25 @@ def as_dict(self) -> dict:
         if self.string_shared_as is not None: body['string_shared_as'] = self.string_shared_as
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SharedDataObject into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.added_at is not None: body['added_at'] = self.added_at
+        if self.added_by is not None: body['added_by'] = self.added_by
+        if self.cdf_enabled is not None: body['cdf_enabled'] = self.cdf_enabled
+        if self.comment is not None: body['comment'] = self.comment
+        if self.content is not None: body['content'] = self.content
+        if self.data_object_type is not None: body['data_object_type'] = self.data_object_type
+        if self.history_data_sharing_status is not None:
+            body['history_data_sharing_status'] = self.history_data_sharing_status
+        if self.name is not None: body['name'] = self.name
+        if self.partitions: body['partitions'] = self.partitions
+        if self.shared_as is not None: body['shared_as'] = self.shared_as
+        if self.start_version is not None: body['start_version'] = self.start_version
+        if self.status is not None: body['status'] = self.status
+        if self.string_shared_as is not None: body['string_shared_as'] = self.string_shared_as
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SharedDataObject:
         """Deserializes the SharedDataObject from a dictionary."""
@@ -1419,6 +1259,8 @@ def from_dict(cls, d: Dict[str, any]) -> SharedDataObject:
 class SharedDataObjectDataObjectType(Enum):
     """The type of the data object."""
 
+    FEATURE_SPEC = 'FEATURE_SPEC'
+    FUNCTION = 'FUNCTION'
     MATERIALIZED_VIEW = 'MATERIALIZED_VIEW'
     MODEL = 'MODEL'
     NOTEBOOK_FILE = 'NOTEBOOK_FILE'
@@ -1458,6 +1300,13 @@ def as_dict(self) -> dict:
         if self.data_object: body['data_object'] = self.data_object.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SharedDataObjectUpdate into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.action is not None: body['action'] = self.action
+        if self.data_object: body['data_object'] = self.data_object
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SharedDataObjectUpdate:
         """Deserializes the SharedDataObjectUpdate from a dictionary."""
@@ -1474,42 +1323,15 @@ class SharedDataObjectUpdateAction(Enum):
 
 
 @dataclass
-class UpdateCleanRoom:
-    catalog_updates: Optional[List[CleanRoomCatalogUpdate]] = None
-    """Array of shared data object updates."""
-
-    comment: Optional[str] = None
-    """User-provided free-form text description."""
-
-    name: Optional[str] = None
-    """The name of the clean room."""
-
-    owner: Optional[str] = None
-    """Username of current owner of clean room."""
+class UpdatePermissionsResponse:
 
     def as_dict(self) -> dict:
-        """Serializes the UpdateCleanRoom into a dictionary suitable for use as a JSON request body."""
+        """Serializes the UpdatePermissionsResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.catalog_updates: body['catalog_updates'] = [v.as_dict() for v in self.catalog_updates]
-        if self.comment is not None: body['comment'] = self.comment
-        if self.name is not None: body['name'] = self.name
-        if self.owner is not None: body['owner'] = self.owner
         return body
 
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateCleanRoom:
-        """Deserializes the UpdateCleanRoom from a dictionary."""
-        return cls(catalog_updates=_repeated_dict(d, 'catalog_updates', CleanRoomCatalogUpdate),
-                   comment=d.get('comment', None),
-                   name=d.get('name', None),
-                   owner=d.get('owner', None))
-
-
-@dataclass
-class UpdatePermissionsResponse:
-
-    def as_dict(self) -> dict:
-        """Serializes the UpdatePermissionsResponse into a dictionary suitable for use as a JSON request body."""
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdatePermissionsResponse into a shallow dictionary of its immediate attributes."""
         body = {}
         return body
 
@@ -1534,7 +1356,8 @@ class UpdateProvider:
     """Username of Provider owner."""
 
     recipient_profile_str: Optional[str] = None
-    """This field is required when the __authentication_type__ is **TOKEN** or not provided."""
+    """This field is required when the __authentication_type__ is **TOKEN**,
+    **OAUTH_CLIENT_CREDENTIALS** or not provided."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateProvider into a dictionary suitable for use as a JSON request body."""
@@ -1546,6 +1369,16 @@ def as_dict(self) -> dict:
         if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateProvider into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.recipient_profile_str is not None: body['recipient_profile_str'] = self.recipient_profile_str
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateProvider:
         """Deserializes the UpdateProvider from a dictionary."""
@@ -1571,7 +1404,7 @@ class UpdateRecipient:
     """Name of the recipient."""
 
     new_name: Optional[str] = None
-    """New name for the recipient."""
+    """New name for the recipient. ."""
 
     owner: Optional[str] = None
     """Username of the recipient owner."""
@@ -1593,6 +1426,18 @@ def as_dict(self) -> dict:
         if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateRecipient into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
+        if self.ip_access_list: body['ip_access_list'] = self.ip_access_list
+        if self.name is not None: body['name'] = self.name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.properties_kvpairs: body['properties_kvpairs'] = self.properties_kvpairs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRecipient:
         """Deserializes the UpdateRecipient from a dictionary."""
@@ -1605,20 +1450,6 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateRecipient:
                    properties_kvpairs=_from_dict(d, 'properties_kvpairs', SecurablePropertiesKvPairs))
 
 
-@dataclass
-class UpdateResponse:
-
-    def as_dict(self) -> dict:
-        """Serializes the UpdateResponse into a dictionary suitable for use as a JSON request body."""
-        body = {}
-        return body
-
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
-        """Deserializes the UpdateResponse from a dictionary."""
-        return cls()
-
-
 @dataclass
 class UpdateShare:
     comment: Optional[str] = None
@@ -1650,6 +1481,17 @@ def as_dict(self) -> dict:
         if self.updates: body['updates'] = [v.as_dict() for v in self.updates]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateShare into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.comment is not None: body['comment'] = self.comment
+        if self.name is not None: body['name'] = self.name
+        if self.new_name is not None: body['new_name'] = self.new_name
+        if self.owner is not None: body['owner'] = self.owner
+        if self.storage_root is not None: body['storage_root'] = self.storage_root
+        if self.updates: body['updates'] = self.updates
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateShare:
         """Deserializes the UpdateShare from a dictionary."""
@@ -1690,6 +1532,15 @@ def as_dict(self) -> dict:
         if self.page_token is not None: body['page_token'] = self.page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateSharePermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.changes: body['changes'] = self.changes
+        if self.max_results is not None: body['max_results'] = self.max_results
+        if self.name is not None: body['name'] = self.name
+        if self.page_token is not None: body['page_token'] = self.page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateSharePermissions:
         """Deserializes the UpdateSharePermissions from a dictionary."""
@@ -1699,157 +1550,6 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateSharePermissions:
                    page_token=d.get('page_token', None))
 
 
-class CleanRoomsAPI:
-    """A clean room is a secure, privacy-protecting environment where two or more parties can share sensitive
-    enterprise data, including customer data, for measurements, insights, activation and other use cases.
-    
-    To create clean rooms, you must be a metastore admin or a user with the **CREATE_CLEAN_ROOM** privilege."""
-
-    def __init__(self, api_client):
-        self._api = api_client
-
-    def create(self,
-               name: str,
-               remote_detailed_info: CentralCleanRoomInfo,
-               *,
-               comment: Optional[str] = None) -> CleanRoomInfo:
-        """Create a clean room.
-        
-        Creates a new clean room with specified colaborators. The caller must be a metastore admin or have the
-        **CREATE_CLEAN_ROOM** privilege on the metastore.
-        
-        :param name: str
-          Name of the clean room.
-        :param remote_detailed_info: :class:`CentralCleanRoomInfo`
-          Central clean room details.
-        :param comment: str (optional)
-          User-provided free-form text description.
-        
-        :returns: :class:`CleanRoomInfo`
-        """
-        body = {}
-        if comment is not None: body['comment'] = comment
-        if name is not None: body['name'] = name
-        if remote_detailed_info is not None: body['remote_detailed_info'] = remote_detailed_info.as_dict()
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('POST', '/api/2.1/unity-catalog/clean-rooms', body=body, headers=headers)
-        return CleanRoomInfo.from_dict(res)
-
-    def delete(self, name: str):
-        """Delete a clean room.
-        
-        Deletes a data object clean room from the metastore. The caller must be an owner of the clean room.
-        
-        :param name: str
-          The name of the clean room.
-        
-        
-        """
-
-        headers = {'Accept': 'application/json', }
-
-        self._api.do('DELETE', f'/api/2.1/unity-catalog/clean-rooms/{name}', headers=headers)
-
-    def get(self, name: str, *, include_remote_details: Optional[bool] = None) -> CleanRoomInfo:
-        """Get a clean room.
-        
-        Gets a data object clean room from the metastore. The caller must be a metastore admin or the owner of
-        the clean room.
-        
-        :param name: str
-          The name of the clean room.
-        :param include_remote_details: bool (optional)
-          Whether to include remote details (central) on the clean room.
-        
-        :returns: :class:`CleanRoomInfo`
-        """
-
-        query = {}
-        if include_remote_details is not None: query['include_remote_details'] = include_remote_details
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do('GET', f'/api/2.1/unity-catalog/clean-rooms/{name}', query=query, headers=headers)
-        return CleanRoomInfo.from_dict(res)
-
-    def list(self,
-             *,
-             max_results: Optional[int] = None,
-             page_token: Optional[str] = None) -> Iterator[CleanRoomInfo]:
-        """List clean rooms.
-        
-        Gets an array of data object clean rooms from the metastore. The caller must be a metastore admin or
-        the owner of the clean room. There is no guarantee of a specific ordering of the elements in the
-        array.
-        
-        :param max_results: int (optional)
-          Maximum number of clean rooms to return. If not set, all the clean rooms are returned (not
-          recommended). - when set to a value greater than 0, the page length is the minimum of this value and
-          a server configured value; - when set to 0, the page length is set to a server configured value
-          (recommended); - when set to a value less than 0, an invalid parameter error is returned;
-        :param page_token: str (optional)
-          Opaque pagination token to go to next page based on previous query.
-        
-        :returns: Iterator over :class:`CleanRoomInfo`
-        """
-
-        query = {}
-        if max_results is not None: query['max_results'] = max_results
-        if page_token is not None: query['page_token'] = page_token
-        headers = {'Accept': 'application/json', }
-
-        while True:
-            json = self._api.do('GET', '/api/2.1/unity-catalog/clean-rooms', query=query, headers=headers)
-            if 'clean_rooms' in json:
-                for v in json['clean_rooms']:
-                    yield CleanRoomInfo.from_dict(v)
-            if 'next_page_token' not in json or not json['next_page_token']:
-                return
-            query['page_token'] = json['next_page_token']
-
-    def update(self,
-               name: str,
-               *,
-               catalog_updates: Optional[List[CleanRoomCatalogUpdate]] = None,
-               comment: Optional[str] = None,
-               owner: Optional[str] = None) -> CleanRoomInfo:
-        """Update a clean room.
-        
-        Updates the clean room with the changes and data objects in the request. The caller must be the owner
-        of the clean room or a metastore admin.
-        
-        When the caller is a metastore admin, only the __owner__ field can be updated.
-        
-        In the case that the clean room name is changed **updateCleanRoom** requires that the caller is both
-        the clean room owner and a metastore admin.
-        
-        For each table that is added through this method, the clean room owner must also have **SELECT**
-        privilege on the table. The privilege must be maintained indefinitely for recipients to be able to
-        access the table. Typically, you should use a group as the clean room owner.
-        
-        Table removals through **update** do not require additional privileges.
-        
-        :param name: str
-          The name of the clean room.
-        :param catalog_updates: List[:class:`CleanRoomCatalogUpdate`] (optional)
-          Array of shared data object updates.
-        :param comment: str (optional)
-          User-provided free-form text description.
-        :param owner: str (optional)
-          Username of current owner of clean room.
-        
-        :returns: :class:`CleanRoomInfo`
-        """
-        body = {}
-        if catalog_updates is not None: body['catalog_updates'] = [v.as_dict() for v in catalog_updates]
-        if comment is not None: body['comment'] = comment
-        if owner is not None: body['owner'] = owner
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        res = self._api.do('PATCH', f'/api/2.1/unity-catalog/clean-rooms/{name}', body=body, headers=headers)
-        return CleanRoomInfo.from_dict(res)
-
-
 class ProvidersAPI:
     """A data provider is an object representing the organization in the real world who shares the data. A
     provider contains shares which further contain the shared data."""
@@ -1875,7 +1575,8 @@ def create(self,
         :param comment: str (optional)
           Description about the provider.
         :param recipient_profile_str: str (optional)
-          This field is required when the __authentication_type__ is **TOKEN** or not provided.
+          This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS**
+          or not provided.
         
         :returns: :class:`ProviderInfo`
         """
@@ -1957,6 +1658,7 @@ def list(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET', '/api/2.1/unity-catalog/providers', query=query, headers=headers)
             if 'providers' in json:
@@ -1998,12 +1700,18 @@ def list_shares(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
-        json = self._api.do('GET',
-                            f'/api/2.1/unity-catalog/providers/{name}/shares',
-                            query=query,
-                            headers=headers)
-        parsed = ListProviderSharesResponse.from_dict(json).shares
-        return parsed if parsed is not None else []
+        if "max_results" not in query: query['max_results'] = 0
+        while True:
+            json = self._api.do('GET',
+                                f'/api/2.1/unity-catalog/providers/{name}/shares',
+                                query=query,
+                                headers=headers)
+            if 'shares' in json:
+                for v in json['shares']:
+                    yield ProviderShare.from_dict(v)
+            if 'next_page_token' not in json or not json['next_page_token']:
+                return
+            query['page_token'] = json['next_page_token']
 
     def update(self,
                name: str,
@@ -2027,7 +1735,8 @@ def update(self,
         :param owner: str (optional)
           Username of Provider owner.
         :param recipient_profile_str: str (optional)
-          This field is required when the __authentication_type__ is **TOKEN** or not provided.
+          This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS**
+          or not provided.
         
         :returns: :class:`ProviderInfo`
         """
@@ -2122,7 +1831,7 @@ def create(self,
         """Create a share recipient.
         
         Creates a new recipient with the delta sharing authentication type in the metastore. The caller must
-        be a metastore admin or has the **CREATE_RECIPIENT** privilege on the metastore.
+        be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore.
         
         :param name: str
           Name of Recipient.
@@ -2131,8 +1840,8 @@ def create(self,
         :param comment: str (optional)
           Description about the recipient.
         :param data_recipient_global_metastore_id: str (optional)
-          The global Unity Catalog metastore id provided by the data recipient. This field is required when
-          the __authentication_type__ is **DATABRICKS**. The identifier is of format
+          The global Unity Catalog metastore id provided by the data recipient. This field is only present
+          when the __authentication_type__ is **DATABRICKS**. The identifier is of format
           __cloud__:__region__:__metastore-uuid__.
         :param expiration_time: int (optional)
           Expiration timestamp of the token, in epoch milliseconds.
@@ -2141,9 +1850,11 @@ def create(self,
         :param owner: str (optional)
           Username of the recipient owner.
         :param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional)
-          Recipient properties as map of string key-value pairs.
+          Recipient properties as map of string key-value pairs. When provided in update request, the
+          specified properties will override the existing properties. To add and remove properties, one would
+          need to perform a read-modify-write.
         :param sharing_code: str (optional)
-          The one-time sharing code provided by the data recipient. This field is required when the
+          The one-time sharing code provided by the data recipient. This field is only present when the
           __authentication_type__ is **DATABRICKS**.
         
         :returns: :class:`RecipientInfo`
@@ -2233,6 +1944,7 @@ def list(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET', '/api/2.1/unity-catalog/recipients', query=query, headers=headers)
             if 'recipients' in json:
@@ -2249,7 +1961,7 @@ def rotate_token(self, name: str, existing_token_expire_in_seconds: int) -> Reci
         The caller must be the owner of the recipient.
         
         :param name: str
-          The name of the recipient.
+          The name of the Recipient.
         :param existing_token_expire_in_seconds: int
           The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of
           existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to expire
@@ -2313,7 +2025,7 @@ def update(self,
                ip_access_list: Optional[IpAccessList] = None,
                new_name: Optional[str] = None,
                owner: Optional[str] = None,
-               properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None):
+               properties_kvpairs: Optional[SecurablePropertiesKvPairs] = None) -> RecipientInfo:
         """Update a share recipient.
         
         Updates an existing recipient in the metastore. The caller must be a metastore admin or the owner of
@@ -2329,7 +2041,7 @@ def update(self,
         :param ip_access_list: :class:`IpAccessList` (optional)
           IP Access List
         :param new_name: str (optional)
-          New name for the recipient.
+          New name for the recipient. .
         :param owner: str (optional)
           Username of the recipient owner.
         :param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional)
@@ -2337,7 +2049,7 @@ def update(self,
           specified properties will override the existing properties. To add and remove properties, one would
           need to perform a read-modify-write.
         
-        
+        :returns: :class:`RecipientInfo`
         """
         body = {}
         if comment is not None: body['comment'] = comment
@@ -2348,7 +2060,8 @@ def update(self,
         if properties_kvpairs is not None: body['properties_kvpairs'] = properties_kvpairs.as_dict()
         headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
 
-        self._api.do('PATCH', f'/api/2.1/unity-catalog/recipients/{name}', body=body, headers=headers)
+        res = self._api.do('PATCH', f'/api/2.1/unity-catalog/recipients/{name}', body=body, headers=headers)
+        return RecipientInfo.from_dict(res)
 
 
 class SharesAPI:
@@ -2452,6 +2165,7 @@ def list(self,
         if page_token is not None: query['page_token'] = page_token
         headers = {'Accept': 'application/json', }
 
+        if "max_results" not in query: query['max_results'] = 0
         while True:
             json = self._api.do('GET', '/api/2.1/unity-catalog/shares', query=query, headers=headers)
             if 'shares' in json:
diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py
index 7a224feeb..059b744ef 100755
--- a/databricks/sdk/service/sql.py
+++ b/databricks/sdk/service/sql.py
@@ -36,6 +36,14 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AccessControl into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AccessControl:
         """Deserializes the AccessControl from a dictionary."""
@@ -118,6 +126,26 @@ def as_dict(self) -> dict:
         if self.update_time is not None: body['update_time'] = self.update_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Alert into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.condition: body['condition'] = self.condition
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.custom_body is not None: body['custom_body'] = self.custom_body
+        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state
+        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.parent_path is not None: body['parent_path'] = self.parent_path
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
+        if self.state is not None: body['state'] = self.state
+        if self.trigger_time is not None: body['trigger_time'] = self.trigger_time
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Alert:
         """Deserializes the Alert from a dictionary."""
@@ -161,6 +189,15 @@ def as_dict(self) -> dict:
         if self.threshold: body['threshold'] = self.threshold.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AlertCondition into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state
+        if self.op is not None: body['op'] = self.op
+        if self.operand: body['operand'] = self.operand
+        if self.threshold: body['threshold'] = self.threshold
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertCondition:
         """Deserializes the AlertCondition from a dictionary."""
@@ -180,6 +217,12 @@ def as_dict(self) -> dict:
         if self.column: body['column'] = self.column.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AlertConditionOperand into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.column: body['column'] = self.column
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertConditionOperand:
         """Deserializes the AlertConditionOperand from a dictionary."""
@@ -196,6 +239,12 @@ def as_dict(self) -> dict:
         if self.value: body['value'] = self.value.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AlertConditionThreshold into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.value: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertConditionThreshold:
         """Deserializes the AlertConditionThreshold from a dictionary."""
@@ -212,6 +261,12 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AlertOperandColumn into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertOperandColumn:
         """Deserializes the AlertOperandColumn from a dictionary."""
@@ -234,6 +289,14 @@ def as_dict(self) -> dict:
         if self.string_value is not None: body['string_value'] = self.string_value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AlertOperandValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.bool_value is not None: body['bool_value'] = self.bool_value
+        if self.double_value is not None: body['double_value'] = self.double_value
+        if self.string_value is not None: body['string_value'] = self.string_value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertOperandValue:
         """Deserializes the AlertOperandValue from a dictionary."""
@@ -297,6 +360,18 @@ def as_dict(self) -> dict:
         if self.value: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AlertOptions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.column is not None: body['column'] = self.column
+        if self.custom_body is not None: body['custom_body'] = self.custom_body
+        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
+        if self.empty_result_state is not None: body['empty_result_state'] = self.empty_result_state
+        if self.muted is not None: body['muted'] = self.muted
+        if self.op is not None: body['op'] = self.op
+        if self.value: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertOptions:
         """Deserializes the AlertOptions from a dictionary."""
@@ -382,6 +457,24 @@ def as_dict(self) -> dict:
         if self.user_id is not None: body['user_id'] = self.user_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AlertQuery into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
+        if self.description is not None: body['description'] = self.description
+        if self.id is not None: body['id'] = self.id
+        if self.is_archived is not None: body['is_archived'] = self.is_archived
+        if self.is_draft is not None: body['is_draft'] = self.is_draft
+        if self.is_safe is not None: body['is_safe'] = self.is_safe
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.query is not None: body['query'] = self.query
+        if self.tags: body['tags'] = self.tags
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.user_id is not None: body['user_id'] = self.user_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AlertQuery:
         """Deserializes the AlertQuery from a dictionary."""
@@ -434,6 +527,15 @@ def as_dict(self) -> dict:
         if self.row_offset is not None: body['row_offset'] = self.row_offset
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the BaseChunkInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.byte_count is not None: body['byte_count'] = self.byte_count
+        if self.chunk_index is not None: body['chunk_index'] = self.chunk_index
+        if self.row_count is not None: body['row_count'] = self.row_count
+        if self.row_offset is not None: body['row_offset'] = self.row_offset
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> BaseChunkInfo:
         """Deserializes the BaseChunkInfo from a dictionary."""
@@ -451,6 +553,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CancelExecutionResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CancelExecutionResponse:
         """Deserializes the CancelExecutionResponse from a dictionary."""
@@ -473,6 +580,13 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Channel into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dbsql_version is not None: body['dbsql_version'] = self.dbsql_version
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Channel:
         """Deserializes the Channel from a dictionary."""
@@ -496,6 +610,13 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ChannelInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dbsql_version is not None: body['dbsql_version'] = self.dbsql_version
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ChannelInfo:
         """Deserializes the ChannelInfo from a dictionary."""
@@ -507,7 +628,80 @@ class ChannelName(Enum):
     CHANNEL_NAME_CURRENT = 'CHANNEL_NAME_CURRENT'
     CHANNEL_NAME_CUSTOM = 'CHANNEL_NAME_CUSTOM'
     CHANNEL_NAME_PREVIEW = 'CHANNEL_NAME_PREVIEW'
-    CHANNEL_NAME_UNSPECIFIED = 'CHANNEL_NAME_UNSPECIFIED'
+    CHANNEL_NAME_PREVIOUS = 'CHANNEL_NAME_PREVIOUS'
+
+
+@dataclass
+class ClientConfig:
+    allow_custom_js_visualizations: Optional[bool] = None
+
+    allow_downloads: Optional[bool] = None
+
+    allow_external_shares: Optional[bool] = None
+
+    allow_subscriptions: Optional[bool] = None
+
+    date_format: Optional[str] = None
+
+    date_time_format: Optional[str] = None
+
+    disable_publish: Optional[bool] = None
+
+    enable_legacy_autodetect_types: Optional[bool] = None
+
+    feature_show_permissions_control: Optional[bool] = None
+
+    hide_plotly_mode_bar: Optional[bool] = None
+
+    def as_dict(self) -> dict:
+        """Serializes the ClientConfig into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.allow_custom_js_visualizations is not None:
+            body['allow_custom_js_visualizations'] = self.allow_custom_js_visualizations
+        if self.allow_downloads is not None: body['allow_downloads'] = self.allow_downloads
+        if self.allow_external_shares is not None: body['allow_external_shares'] = self.allow_external_shares
+        if self.allow_subscriptions is not None: body['allow_subscriptions'] = self.allow_subscriptions
+        if self.date_format is not None: body['date_format'] = self.date_format
+        if self.date_time_format is not None: body['date_time_format'] = self.date_time_format
+        if self.disable_publish is not None: body['disable_publish'] = self.disable_publish
+        if self.enable_legacy_autodetect_types is not None:
+            body['enable_legacy_autodetect_types'] = self.enable_legacy_autodetect_types
+        if self.feature_show_permissions_control is not None:
+            body['feature_show_permissions_control'] = self.feature_show_permissions_control
+        if self.hide_plotly_mode_bar is not None: body['hide_plotly_mode_bar'] = self.hide_plotly_mode_bar
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ClientConfig into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.allow_custom_js_visualizations is not None:
+            body['allow_custom_js_visualizations'] = self.allow_custom_js_visualizations
+        if self.allow_downloads is not None: body['allow_downloads'] = self.allow_downloads
+        if self.allow_external_shares is not None: body['allow_external_shares'] = self.allow_external_shares
+        if self.allow_subscriptions is not None: body['allow_subscriptions'] = self.allow_subscriptions
+        if self.date_format is not None: body['date_format'] = self.date_format
+        if self.date_time_format is not None: body['date_time_format'] = self.date_time_format
+        if self.disable_publish is not None: body['disable_publish'] = self.disable_publish
+        if self.enable_legacy_autodetect_types is not None:
+            body['enable_legacy_autodetect_types'] = self.enable_legacy_autodetect_types
+        if self.feature_show_permissions_control is not None:
+            body['feature_show_permissions_control'] = self.feature_show_permissions_control
+        if self.hide_plotly_mode_bar is not None: body['hide_plotly_mode_bar'] = self.hide_plotly_mode_bar
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> ClientConfig:
+        """Deserializes the ClientConfig from a dictionary."""
+        return cls(allow_custom_js_visualizations=d.get('allow_custom_js_visualizations', None),
+                   allow_downloads=d.get('allow_downloads', None),
+                   allow_external_shares=d.get('allow_external_shares', None),
+                   allow_subscriptions=d.get('allow_subscriptions', None),
+                   date_format=d.get('date_format', None),
+                   date_time_format=d.get('date_time_format', None),
+                   disable_publish=d.get('disable_publish', None),
+                   enable_legacy_autodetect_types=d.get('enable_legacy_autodetect_types', None),
+                   feature_show_permissions_control=d.get('feature_show_permissions_control', None),
+                   hide_plotly_mode_bar=d.get('hide_plotly_mode_bar', None))
 
 
 @dataclass
@@ -547,6 +741,18 @@ def as_dict(self) -> dict:
         if self.type_text is not None: body['type_text'] = self.type_text
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ColumnInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.position is not None: body['position'] = self.position
+        if self.type_interval_type is not None: body['type_interval_type'] = self.type_interval_type
+        if self.type_name is not None: body['type_name'] = self.type_name
+        if self.type_precision is not None: body['type_precision'] = self.type_precision
+        if self.type_scale is not None: body['type_scale'] = self.type_scale
+        if self.type_text is not None: body['type_text'] = self.type_text
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ColumnInfo:
         """Deserializes the ColumnInfo from a dictionary."""
@@ -612,6 +818,16 @@ def as_dict(self) -> dict:
         if self.rearm is not None: body['rearm'] = self.rearm
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateAlert into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.parent is not None: body['parent'] = self.parent
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.rearm is not None: body['rearm'] = self.rearm
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateAlert:
         """Deserializes the CreateAlert from a dictionary."""
@@ -632,6 +848,12 @@ def as_dict(self) -> dict:
         if self.alert: body['alert'] = self.alert.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateAlertRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alert: body['alert'] = self.alert
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateAlertRequest:
         """Deserializes the CreateAlertRequest from a dictionary."""
@@ -683,6 +905,19 @@ def as_dict(self) -> dict:
         if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateAlertRequestAlert into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.condition: body['condition'] = self.condition
+        if self.custom_body is not None: body['custom_body'] = self.custom_body
+        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
+        if self.parent_path is not None: body['parent_path'] = self.parent_path
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateAlertRequestAlert:
         """Deserializes the CreateAlertRequestAlert from a dictionary."""
@@ -706,6 +941,12 @@ def as_dict(self) -> dict:
         if self.query: body['query'] = self.query.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateQueryRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.query: body['query'] = self.query
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateQueryRequest:
         """Deserializes the CreateQueryRequest from a dictionary."""
@@ -762,6 +1003,22 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateQueryRequestQuery into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.description is not None: body['description'] = self.description
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.parameters: body['parameters'] = self.parameters
+        if self.parent_path is not None: body['parent_path'] = self.parent_path
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode
+        if self.schema is not None: body['schema'] = self.schema
+        if self.tags: body['tags'] = self.tags
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateQueryRequestQuery:
         """Deserializes the CreateQueryRequestQuery from a dictionary."""
@@ -788,6 +1045,12 @@ def as_dict(self) -> dict:
         if self.visualization: body['visualization'] = self.visualization.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateVisualizationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.visualization: body['visualization'] = self.visualization
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateVisualizationRequest:
         """Deserializes the CreateVisualizationRequest from a dictionary."""
@@ -823,6 +1086,16 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateVisualizationRequestVisualization into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
+        if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateVisualizationRequestVisualization:
         """Deserializes the CreateVisualizationRequestVisualization from a dictionary."""
@@ -924,6 +1197,25 @@ def as_dict(self) -> dict:
         if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateWarehouseRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins
+        if self.channel: body['channel'] = self.channel
+        if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
+        if self.creator_name is not None: body['creator_name'] = self.creator_name
+        if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
+        if self.enable_serverless_compute is not None:
+            body['enable_serverless_compute'] = self.enable_serverless_compute
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
+        if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
+        if self.name is not None: body['name'] = self.name
+        if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy
+        if self.tags: body['tags'] = self.tags
+        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateWarehouseRequest:
         """Deserializes the CreateWarehouseRequest from a dictionary."""
@@ -962,6 +1254,12 @@ def as_dict(self) -> dict:
         if self.id is not None: body['id'] = self.id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateWarehouseResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateWarehouseResponse:
         """Deserializes the CreateWarehouseResponse from a dictionary."""
@@ -999,6 +1297,17 @@ def as_dict(self) -> dict:
         if self.width is not None: body['width'] = self.width
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateWidget into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.id is not None: body['id'] = self.id
+        if self.options: body['options'] = self.options
+        if self.text is not None: body['text'] = self.text
+        if self.visualization_id is not None: body['visualization_id'] = self.visualization_id
+        if self.width is not None: body['width'] = self.width
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateWidget:
         """Deserializes the CreateWidget from a dictionary."""
@@ -1090,6 +1399,29 @@ def as_dict(self) -> dict:
         if self.widgets: body['widgets'] = [v.as_dict() for v in self.widgets]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Dashboard into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.can_edit is not None: body['can_edit'] = self.can_edit
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.dashboard_filters_enabled is not None:
+            body['dashboard_filters_enabled'] = self.dashboard_filters_enabled
+        if self.id is not None: body['id'] = self.id
+        if self.is_archived is not None: body['is_archived'] = self.is_archived
+        if self.is_draft is not None: body['is_draft'] = self.is_draft
+        if self.is_favorite is not None: body['is_favorite'] = self.is_favorite
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.parent is not None: body['parent'] = self.parent
+        if self.permission_tier is not None: body['permission_tier'] = self.permission_tier
+        if self.slug is not None: body['slug'] = self.slug
+        if self.tags: body['tags'] = self.tags
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.user: body['user'] = self.user
+        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.widgets: body['widgets'] = self.widgets
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Dashboard:
         """Deserializes the Dashboard from a dictionary."""
@@ -1134,6 +1466,15 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DashboardEditContent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_id is not None: body['dashboard_id'] = self.dashboard_id
+        if self.name is not None: body['name'] = self.name
+        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DashboardEditContent:
         """Deserializes the DashboardEditContent from a dictionary."""
@@ -1155,6 +1496,12 @@ def as_dict(self) -> dict:
         if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DashboardOptions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DashboardOptions:
         """Deserializes the DashboardOptions from a dictionary."""
@@ -1193,6 +1540,18 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DashboardPostContent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dashboard_filters_enabled is not None:
+            body['dashboard_filters_enabled'] = self.dashboard_filters_enabled
+        if self.is_favorite is not None: body['is_favorite'] = self.is_favorite
+        if self.name is not None: body['name'] = self.name
+        if self.parent is not None: body['parent'] = self.parent
+        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DashboardPostContent:
         """Deserializes the DashboardPostContent from a dictionary."""
@@ -1253,6 +1612,20 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DataSource into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.name is not None: body['name'] = self.name
+        if self.pause_reason is not None: body['pause_reason'] = self.pause_reason
+        if self.paused is not None: body['paused'] = self.paused
+        if self.supports_auto_limit is not None: body['supports_auto_limit'] = self.supports_auto_limit
+        if self.syntax is not None: body['syntax'] = self.syntax
+        if self.type is not None: body['type'] = self.type
+        if self.view_only is not None: body['view_only'] = self.view_only
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DataSource:
         """Deserializes the DataSource from a dictionary."""
@@ -1287,6 +1660,13 @@ def as_dict(self) -> dict:
         if self.start is not None: body['start'] = self.start
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DateRange into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.end is not None: body['end'] = self.end
+        if self.start is not None: body['start'] = self.start
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DateRange:
         """Deserializes the DateRange from a dictionary."""
@@ -1317,6 +1697,16 @@ def as_dict(self) -> dict:
         if self.start_day_of_week is not None: body['start_day_of_week'] = self.start_day_of_week
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DateRangeValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.date_range_value: body['date_range_value'] = self.date_range_value
+        if self.dynamic_date_range_value is not None:
+            body['dynamic_date_range_value'] = self.dynamic_date_range_value
+        if self.precision is not None: body['precision'] = self.precision
+        if self.start_day_of_week is not None: body['start_day_of_week'] = self.start_day_of_week
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DateRangeValue:
         """Deserializes the DateRangeValue from a dictionary."""
@@ -1368,6 +1758,14 @@ def as_dict(self) -> dict:
         if self.precision is not None: body['precision'] = self.precision.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DateValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.date_value is not None: body['date_value'] = self.date_value
+        if self.dynamic_date_value is not None: body['dynamic_date_value'] = self.dynamic_date_value
+        if self.precision is not None: body['precision'] = self.precision
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DateValue:
         """Deserializes the DateValue from a dictionary."""
@@ -1390,6 +1788,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -1404,6 +1807,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteWarehouseResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteWarehouseResponse:
         """Deserializes the DeleteWarehouseResponse from a dictionary."""
@@ -1443,10 +1851,20 @@ def as_dict(self) -> dict:
         if self.rearm is not None: body['rearm'] = self.rearm
         return body
 
-    @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> EditAlert:
-        """Deserializes the EditAlert from a dictionary."""
-        return cls(alert_id=d.get('alert_id', None),
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditAlert into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alert_id is not None: body['alert_id'] = self.alert_id
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.rearm is not None: body['rearm'] = self.rearm
+        return body
+
+    @classmethod
+    def from_dict(cls, d: Dict[str, any]) -> EditAlert:
+        """Deserializes the EditAlert from a dictionary."""
+        return cls(alert_id=d.get('alert_id', None),
                    name=d.get('name', None),
                    options=_from_dict(d, 'options', AlertOptions),
                    query_id=d.get('query_id', None),
@@ -1547,6 +1965,26 @@ def as_dict(self) -> dict:
         if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditWarehouseRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins
+        if self.channel: body['channel'] = self.channel
+        if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
+        if self.creator_name is not None: body['creator_name'] = self.creator_name
+        if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
+        if self.enable_serverless_compute is not None:
+            body['enable_serverless_compute'] = self.enable_serverless_compute
+        if self.id is not None: body['id'] = self.id
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
+        if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
+        if self.name is not None: body['name'] = self.name
+        if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy
+        if self.tags: body['tags'] = self.tags
+        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditWarehouseRequest:
         """Deserializes the EditWarehouseRequest from a dictionary."""
@@ -1583,6 +2021,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EditWarehouseResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EditWarehouseResponse:
         """Deserializes the EditWarehouseResponse from a dictionary."""
@@ -1599,6 +2042,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Empty into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Empty:
         """Deserializes the Empty from a dictionary."""
@@ -1618,6 +2066,13 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointConfPair into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointConfPair:
         """Deserializes the EndpointConfPair from a dictionary."""
@@ -1652,6 +2107,16 @@ def as_dict(self) -> dict:
         if self.summary is not None: body['summary'] = self.summary
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointHealth into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.details is not None: body['details'] = self.details
+        if self.failure_reason: body['failure_reason'] = self.failure_reason
+        if self.message is not None: body['message'] = self.message
+        if self.status is not None: body['status'] = self.status
+        if self.summary is not None: body['summary'] = self.summary
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointHealth:
         """Deserializes the EndpointHealth from a dictionary."""
@@ -1780,6 +2245,32 @@ def as_dict(self) -> dict:
         if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins
+        if self.channel: body['channel'] = self.channel
+        if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
+        if self.creator_name is not None: body['creator_name'] = self.creator_name
+        if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
+        if self.enable_serverless_compute is not None:
+            body['enable_serverless_compute'] = self.enable_serverless_compute
+        if self.health: body['health'] = self.health
+        if self.id is not None: body['id'] = self.id
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.jdbc_url is not None: body['jdbc_url'] = self.jdbc_url
+        if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
+        if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
+        if self.name is not None: body['name'] = self.name
+        if self.num_active_sessions is not None: body['num_active_sessions'] = self.num_active_sessions
+        if self.num_clusters is not None: body['num_clusters'] = self.num_clusters
+        if self.odbc_params: body['odbc_params'] = self.odbc_params
+        if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy
+        if self.state is not None: body['state'] = self.state
+        if self.tags: body['tags'] = self.tags
+        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointInfo:
         """Deserializes the EndpointInfo from a dictionary."""
@@ -1827,6 +2318,13 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointTagPair into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointTagPair:
         """Deserializes the EndpointTagPair from a dictionary."""
@@ -1843,6 +2341,12 @@ def as_dict(self) -> dict:
         if self.custom_tags: body['custom_tags'] = [v.as_dict() for v in self.custom_tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointTags into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.custom_tags: body['custom_tags'] = self.custom_tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointTags:
         """Deserializes the EndpointTags from a dictionary."""
@@ -1868,6 +2372,14 @@ def as_dict(self) -> dict:
         if self.values: body['values'] = [v for v in self.values]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EnumValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enum_options is not None: body['enum_options'] = self.enum_options
+        if self.multi_values_options: body['multi_values_options'] = self.multi_values_options
+        if self.values: body['values'] = self.values
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EnumValue:
         """Deserializes the EnumValue from a dictionary."""
@@ -2009,6 +2521,22 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExecuteStatementRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.byte_limit is not None: body['byte_limit'] = self.byte_limit
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.disposition is not None: body['disposition'] = self.disposition
+        if self.format is not None: body['format'] = self.format
+        if self.on_wait_timeout is not None: body['on_wait_timeout'] = self.on_wait_timeout
+        if self.parameters: body['parameters'] = self.parameters
+        if self.row_limit is not None: body['row_limit'] = self.row_limit
+        if self.schema is not None: body['schema'] = self.schema
+        if self.statement is not None: body['statement'] = self.statement
+        if self.wait_timeout is not None: body['wait_timeout'] = self.wait_timeout
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExecuteStatementRequest:
         """Deserializes the ExecuteStatementRequest from a dictionary."""
@@ -2089,6 +2617,21 @@ def as_dict(self) -> dict:
         if self.row_offset is not None: body['row_offset'] = self.row_offset
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExternalLink into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.byte_count is not None: body['byte_count'] = self.byte_count
+        if self.chunk_index is not None: body['chunk_index'] = self.chunk_index
+        if self.expiration is not None: body['expiration'] = self.expiration
+        if self.external_link is not None: body['external_link'] = self.external_link
+        if self.http_headers: body['http_headers'] = self.http_headers
+        if self.next_chunk_index is not None: body['next_chunk_index'] = self.next_chunk_index
+        if self.next_chunk_internal_link is not None:
+            body['next_chunk_internal_link'] = self.next_chunk_internal_link
+        if self.row_count is not None: body['row_count'] = self.row_count
+        if self.row_offset is not None: body['row_offset'] = self.row_offset
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExternalLink:
         """Deserializes the ExternalLink from a dictionary."""
@@ -2129,6 +2672,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetResponse:
         """Deserializes the GetResponse from a dictionary."""
@@ -2148,6 +2699,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetWarehousePermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetWarehousePermissionLevelsResponse:
         """Deserializes the GetWarehousePermissionLevelsResponse from a dictionary."""
@@ -2272,6 +2829,32 @@ def as_dict(self) -> dict:
         if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetWarehouseResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.auto_stop_mins is not None: body['auto_stop_mins'] = self.auto_stop_mins
+        if self.channel: body['channel'] = self.channel
+        if self.cluster_size is not None: body['cluster_size'] = self.cluster_size
+        if self.creator_name is not None: body['creator_name'] = self.creator_name
+        if self.enable_photon is not None: body['enable_photon'] = self.enable_photon
+        if self.enable_serverless_compute is not None:
+            body['enable_serverless_compute'] = self.enable_serverless_compute
+        if self.health: body['health'] = self.health
+        if self.id is not None: body['id'] = self.id
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.jdbc_url is not None: body['jdbc_url'] = self.jdbc_url
+        if self.max_num_clusters is not None: body['max_num_clusters'] = self.max_num_clusters
+        if self.min_num_clusters is not None: body['min_num_clusters'] = self.min_num_clusters
+        if self.name is not None: body['name'] = self.name
+        if self.num_active_sessions is not None: body['num_active_sessions'] = self.num_active_sessions
+        if self.num_clusters is not None: body['num_clusters'] = self.num_clusters
+        if self.odbc_params: body['odbc_params'] = self.odbc_params
+        if self.spot_instance_policy is not None: body['spot_instance_policy'] = self.spot_instance_policy
+        if self.state is not None: body['state'] = self.state
+        if self.tags: body['tags'] = self.tags
+        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetWarehouseResponse:
         """Deserializes the GetWarehouseResponse from a dictionary."""
@@ -2358,6 +2941,22 @@ def as_dict(self) -> dict:
             body['sql_configuration_parameters'] = self.sql_configuration_parameters.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetWorkspaceWarehouseConfigResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.channel: body['channel'] = self.channel
+        if self.config_param: body['config_param'] = self.config_param
+        if self.data_access_config: body['data_access_config'] = self.data_access_config
+        if self.enabled_warehouse_types: body['enabled_warehouse_types'] = self.enabled_warehouse_types
+        if self.global_param: body['global_param'] = self.global_param
+        if self.google_service_account is not None:
+            body['google_service_account'] = self.google_service_account
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.security_policy is not None: body['security_policy'] = self.security_policy
+        if self.sql_configuration_parameters:
+            body['sql_configuration_parameters'] = self.sql_configuration_parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetWorkspaceWarehouseConfigResponse:
         """Deserializes the GetWorkspaceWarehouseConfigResponse from a dictionary."""
@@ -2433,6 +3032,22 @@ def as_dict(self) -> dict:
         if self.user: body['user'] = self.user.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LegacyAlert into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.id is not None: body['id'] = self.id
+        if self.last_triggered_at is not None: body['last_triggered_at'] = self.last_triggered_at
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.parent is not None: body['parent'] = self.parent
+        if self.query: body['query'] = self.query
+        if self.rearm is not None: body['rearm'] = self.rearm
+        if self.state is not None: body['state'] = self.state
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.user: body['user'] = self.user
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LegacyAlert:
         """Deserializes the LegacyAlert from a dictionary."""
@@ -2463,74 +3078,49 @@ class LegacyQuery:
     can_edit: Optional[bool] = None
     """Describes whether the authenticated user is allowed to edit the definition of this query."""
 
-    created_at: Optional[str] = None
-    """The timestamp when this query was created."""
+    catalog: Optional[str] = None
+    """Name of the catalog where this query will be executed."""
 
-    data_source_id: Optional[str] = None
-    """Data source ID maps to the ID of the data source used by the resource and is distinct from the
-    warehouse ID. [Learn more]
-    
-    [Learn more]: https://docs.databricks.com/api/workspace/datasources/list"""
+    create_time: Optional[str] = None
+    """Timestamp when this query was created."""
 
     description: Optional[str] = None
     """General description that conveys additional information about this query such as usage notes."""
 
-    id: Optional[str] = None
-    """Query ID."""
-
-    is_archived: Optional[bool] = None
-    """Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear
-    in search results. If this boolean is `true`, the `options` property for this query includes a
-    `moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days."""
-
-    is_draft: Optional[bool] = None
-    """Whether the query is a draft. Draft queries only appear in list views for their owners.
-    Visualizations from draft queries cannot appear on dashboards."""
-
-    is_favorite: Optional[bool] = None
-    """Whether this query object appears in the current user's favorites list. This flag determines
-    whether the star icon for favorites is selected."""
-
-    is_safe: Optional[bool] = None
-    """Text parameter types are not safe from SQL injection for all types of data source. Set this
-    Boolean parameter to `true` if a query either does not use any text type parameters or uses a
-    data source type where text type parameters are handled safely."""
-
-    last_modified_by: Optional[User] = None
+    display_name: Optional[str] = None
+    """Display name of the query that appears in list views, widget headings, and on the query page."""
 
-    last_modified_by_id: Optional[int] = None
-    """The ID of the user who last saved changes to this query."""
+    id: Optional[str] = None
+    """UUID identifying the query."""
 
-    latest_query_data_id: Optional[str] = None
-    """If there is a cached result for this query and user, this field includes the query result ID. If
-    this query uses parameters, this field is always null."""
+    last_modifier_user_name: Optional[str] = None
+    """Username of the user who last saved changes to this query."""
 
-    name: Optional[str] = None
-    """The title of this query that appears in list views, widget headings, and on the query page."""
+    lifecycle_state: Optional[LifecycleState] = None
+    """Indicates whether the query is trashed."""
 
-    options: Optional[QueryOptions] = None
+    owner_user_name: Optional[str] = None
+    """Username of the user that owns the query."""
 
-    parent: Optional[str] = None
-    """The identifier of the workspace folder containing the object."""
+    parameters: Optional[List[QueryParameter]] = None
+    """List of query parameter definitions."""
 
-    permission_tier: Optional[PermissionLevel] = None
-    """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query
-    * `CAN_MANAGE`: Can manage the query"""
+    parent_path: Optional[str] = None
+    """Workspace path of the workspace folder containing the object."""
 
-    query: Optional[str] = None
-    """The text of the query to be run."""
+    query_text: Optional[str] = None
+    """Text of the query to be run."""
 
-    query_hash: Optional[str] = None
-    """A SHA-256 hash of the query text along with the authenticated user ID."""
+    run_as_mode: Optional[RunAsMode] = None
+    """Sets the "Run as" role for the object."""
 
-    run_as_role: Optional[RunAsRole] = None
-    """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
-    viewer" behavior) or `"owner"` (signifying "run as owner" behavior)"""
+    schema: Optional[str] = None
+    """Name of the schema where this query will be executed."""
 
     tags: Optional[List[str]] = None
 
-    updated_at: Optional[str] = None
-    """The timestamp at which this query was last updated."""
+    update_time: Optional[str] = None
+    """Timestamp when this query was last updated."""
 
     user: Optional[User] = None
 
@@ -2542,6 +3132,29 @@ class LegacyQuery:
     def as_dict(self) -> dict:
         """Serializes the LegacyQuery into a dictionary suitable for use as a JSON request body."""
         body = {}
+        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.description is not None: body['description'] = self.description
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        if self.last_modifier_user_name is not None:
+            body['last_modifier_user_name'] = self.last_modifier_user_name
+        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
+        if self.parent_path is not None: body['parent_path'] = self.parent_path
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
+        if self.schema is not None: body['schema'] = self.schema
+        if self.tags: body['tags'] = [v for v in self.tags]
+        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LegacyQuery into a shallow dictionary of its immediate attributes."""
+        body = {}
         if self.can_edit is not None: body['can_edit'] = self.can_edit
         if self.created_at is not None: body['created_at'] = self.created_at
         if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
@@ -2551,21 +3164,21 @@ def as_dict(self) -> dict:
         if self.is_draft is not None: body['is_draft'] = self.is_draft
         if self.is_favorite is not None: body['is_favorite'] = self.is_favorite
         if self.is_safe is not None: body['is_safe'] = self.is_safe
-        if self.last_modified_by: body['last_modified_by'] = self.last_modified_by.as_dict()
+        if self.last_modified_by: body['last_modified_by'] = self.last_modified_by
         if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id
         if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id
         if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options.as_dict()
+        if self.options: body['options'] = self.options
         if self.parent is not None: body['parent'] = self.parent
-        if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value
+        if self.permission_tier is not None: body['permission_tier'] = self.permission_tier
         if self.query is not None: body['query'] = self.query
         if self.query_hash is not None: body['query_hash'] = self.query_hash
-        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value
-        if self.tags: body['tags'] = [v for v in self.tags]
+        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role
+        if self.tags: body['tags'] = self.tags
         if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.user: body['user'] = self.user.as_dict()
+        if self.user: body['user'] = self.user
         if self.user_id is not None: body['user_id'] = self.user_id
-        if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations]
+        if self.visualizations: body['visualizations'] = self.visualizations
         return body
 
     @classmethod
@@ -2575,21 +3188,16 @@ def from_dict(cls, d: Dict[str, any]) -> LegacyQuery:
                    created_at=d.get('created_at', None),
                    data_source_id=d.get('data_source_id', None),
                    description=d.get('description', None),
+                   display_name=d.get('display_name', None),
                    id=d.get('id', None),
-                   is_archived=d.get('is_archived', None),
-                   is_draft=d.get('is_draft', None),
-                   is_favorite=d.get('is_favorite', None),
-                   is_safe=d.get('is_safe', None),
-                   last_modified_by=_from_dict(d, 'last_modified_by', User),
-                   last_modified_by_id=d.get('last_modified_by_id', None),
-                   latest_query_data_id=d.get('latest_query_data_id', None),
-                   name=d.get('name', None),
-                   options=_from_dict(d, 'options', QueryOptions),
-                   parent=d.get('parent', None),
-                   permission_tier=_enum(d, 'permission_tier', PermissionLevel),
-                   query=d.get('query', None),
-                   query_hash=d.get('query_hash', None),
-                   run_as_role=_enum(d, 'run_as_role', RunAsRole),
+                   last_modifier_user_name=d.get('last_modifier_user_name', None),
+                   lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
+                   owner_user_name=d.get('owner_user_name', None),
+                   parameters=_repeated_dict(d, 'parameters', QueryParameter),
+                   parent_path=d.get('parent_path', None),
+                   query_text=d.get('query_text', None),
+                   run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
+                   schema=d.get('schema', None),
                    tags=d.get('tags', None),
                    updated_at=d.get('updated_at', None),
                    user=_from_dict(d, 'user', User),
@@ -2639,6 +3247,19 @@ def as_dict(self) -> dict:
         if self.updated_at is not None: body['updated_at'] = self.updated_at
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the LegacyVisualization into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.description is not None: body['description'] = self.description
+        if self.id is not None: body['id'] = self.id
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.query: body['query'] = self.query
+        if self.type is not None: body['type'] = self.type
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> LegacyVisualization:
         """Deserializes the LegacyVisualization from a dictionary."""
@@ -2671,6 +3292,13 @@ def as_dict(self) -> dict:
         if self.results: body['results'] = [v.as_dict() for v in self.results]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAlertsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.results: body['results'] = self.results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAlertsResponse:
         """Deserializes the ListAlertsResponse from a dictionary."""
@@ -2748,6 +3376,25 @@ def as_dict(self) -> dict:
         if self.update_time is not None: body['update_time'] = self.update_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAlertsResponseAlert into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.condition: body['condition'] = self.condition
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.custom_body is not None: body['custom_body'] = self.custom_body
+        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state
+        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
+        if self.state is not None: body['state'] = self.state
+        if self.trigger_time is not None: body['trigger_time'] = self.trigger_time
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAlertsResponseAlert:
         """Deserializes the ListAlertsResponseAlert from a dictionary."""
@@ -2791,6 +3438,14 @@ def as_dict(self) -> dict:
         if self.res: body['res'] = [v.as_dict() for v in self.res]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListQueriesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.has_next_page is not None: body['has_next_page'] = self.has_next_page
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.res: body['res'] = self.res
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListQueriesResponse:
         """Deserializes the ListQueriesResponse from a dictionary."""
@@ -2812,6 +3467,13 @@ def as_dict(self) -> dict:
         if self.results: body['results'] = [v.as_dict() for v in self.results]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListQueryObjectsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.results: body['results'] = self.results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListQueryObjectsResponse:
         """Deserializes the ListQueryObjectsResponse from a dictionary."""
@@ -2890,6 +3552,28 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListQueryObjectsResponseQuery into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.description is not None: body['description'] = self.description
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        if self.last_modifier_user_name is not None:
+            body['last_modifier_user_name'] = self.last_modifier_user_name
+        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.parameters: body['parameters'] = self.parameters
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode
+        if self.schema is not None: body['schema'] = self.schema
+        if self.tags: body['tags'] = self.tags
+        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListQueryObjectsResponseQuery:
         """Deserializes the ListQueryObjectsResponseQuery from a dictionary."""
@@ -2934,6 +3618,15 @@ def as_dict(self) -> dict:
         if self.results: body['results'] = [v.as_dict() for v in self.results]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.count is not None: body['count'] = self.count
+        if self.page is not None: body['page'] = self.page
+        if self.page_size is not None: body['page_size'] = self.page_size
+        if self.results: body['results'] = self.results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListResponse:
         """Deserializes the ListResponse from a dictionary."""
@@ -2956,6 +3649,13 @@ def as_dict(self) -> dict:
         if self.results: body['results'] = [v.as_dict() for v in self.results]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListVisualizationsForQueryResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.results: body['results'] = self.results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListVisualizationsForQueryResponse:
         """Deserializes the ListVisualizationsForQueryResponse from a dictionary."""
@@ -2974,6 +3674,12 @@ def as_dict(self) -> dict:
         if self.warehouses: body['warehouses'] = [v.as_dict() for v in self.warehouses]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListWarehousesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.warehouses: body['warehouses'] = self.warehouses
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListWarehousesResponse:
         """Deserializes the ListWarehousesResponse from a dictionary."""
@@ -2999,6 +3705,14 @@ def as_dict(self) -> dict:
         if self.suffix is not None: body['suffix'] = self.suffix
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MultiValuesOptions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.prefix is not None: body['prefix'] = self.prefix
+        if self.separator is not None: body['separator'] = self.separator
+        if self.suffix is not None: body['suffix'] = self.suffix
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MultiValuesOptions:
         """Deserializes the MultiValuesOptions from a dictionary."""
@@ -3017,6 +3731,12 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the NumericValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> NumericValue:
         """Deserializes the NumericValue from a dictionary."""
@@ -3060,6 +3780,15 @@ def as_dict(self) -> dict:
         if self.protocol is not None: body['protocol'] = self.protocol
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the OdbcParams into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.hostname is not None: body['hostname'] = self.hostname
+        if self.path is not None: body['path'] = self.path
+        if self.port is not None: body['port'] = self.port
+        if self.protocol is not None: body['protocol'] = self.protocol
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> OdbcParams:
         """Deserializes the OdbcParams from a dictionary."""
@@ -3114,6 +3843,18 @@ def as_dict(self) -> dict:
         if self.value: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Parameter into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enum_options is not None: body['enumOptions'] = self.enum_options
+        if self.multi_values_options: body['multiValuesOptions'] = self.multi_values_options
+        if self.name is not None: body['name'] = self.name
+        if self.query_id is not None: body['queryId'] = self.query_id
+        if self.title is not None: body['title'] = self.title
+        if self.type is not None: body['type'] = self.type
+        if self.value: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Parameter:
         """Deserializes the Parameter from a dictionary."""
@@ -3232,6 +3973,29 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Query into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.description is not None: body['description'] = self.description
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        if self.last_modifier_user_name is not None:
+            body['last_modifier_user_name'] = self.last_modifier_user_name
+        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.parameters: body['parameters'] = self.parameters
+        if self.parent_path is not None: body['parent_path'] = self.parent_path
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode
+        if self.schema is not None: body['schema'] = self.schema
+        if self.tags: body['tags'] = self.tags
+        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Query:
         """Deserializes the Query from a dictionary."""
@@ -3273,6 +4037,14 @@ def as_dict(self) -> dict:
         if self.values: body['values'] = [v for v in self.values]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryBackedValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.multi_values_options: body['multi_values_options'] = self.multi_values_options
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.values: body['values'] = self.values
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryBackedValue:
         """Deserializes the QueryBackedValue from a dictionary."""
@@ -3324,6 +4096,19 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryEditContent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.query is not None: body['query'] = self.query
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryEditContent:
         """Deserializes the QueryEditContent from a dictionary."""
@@ -3363,6 +4148,16 @@ def as_dict(self) -> dict:
         if self.warehouse_ids: body['warehouse_ids'] = [v for v in self.warehouse_ids]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryFilter into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.query_start_time_range: body['query_start_time_range'] = self.query_start_time_range
+        if self.statement_ids: body['statement_ids'] = self.statement_ids
+        if self.statuses: body['statuses'] = self.statuses
+        if self.user_ids: body['user_ids'] = self.user_ids
+        if self.warehouse_ids: body['warehouse_ids'] = self.warehouse_ids
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryFilter:
         """Deserializes the QueryFilter from a dictionary."""
@@ -3472,6 +4267,33 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.channel_used: body['channel_used'] = self.channel_used
+        if self.duration is not None: body['duration'] = self.duration
+        if self.endpoint_id is not None: body['endpoint_id'] = self.endpoint_id
+        if self.error_message is not None: body['error_message'] = self.error_message
+        if self.executed_as_user_id is not None: body['executed_as_user_id'] = self.executed_as_user_id
+        if self.executed_as_user_name is not None: body['executed_as_user_name'] = self.executed_as_user_name
+        if self.execution_end_time_ms is not None: body['execution_end_time_ms'] = self.execution_end_time_ms
+        if self.is_final is not None: body['is_final'] = self.is_final
+        if self.lookup_key is not None: body['lookup_key'] = self.lookup_key
+        if self.metrics: body['metrics'] = self.metrics
+        if self.plans_state is not None: body['plans_state'] = self.plans_state
+        if self.query_end_time_ms is not None: body['query_end_time_ms'] = self.query_end_time_ms
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.query_start_time_ms is not None: body['query_start_time_ms'] = self.query_start_time_ms
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.rows_produced is not None: body['rows_produced'] = self.rows_produced
+        if self.spark_ui_url is not None: body['spark_ui_url'] = self.spark_ui_url
+        if self.statement_type is not None: body['statement_type'] = self.statement_type
+        if self.status is not None: body['status'] = self.status
+        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.user_name is not None: body['user_name'] = self.user_name
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryInfo:
         """Deserializes the QueryInfo from a dictionary."""
@@ -3522,6 +4344,15 @@ def as_dict(self) -> dict:
         if self.results: body['results'] = [v.as_dict() for v in self.results]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryList into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.count is not None: body['count'] = self.count
+        if self.page is not None: body['page'] = self.page
+        if self.page_size is not None: body['page_size'] = self.page_size
+        if self.results: body['results'] = self.results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryList:
         """Deserializes the QueryList from a dictionary."""
@@ -3605,8 +4436,38 @@ class QueryMetrics:
     write_remote_bytes: Optional[int] = None
     """Size pf persistent data written to cloud object storage in your cloud tenant, in bytes."""
 
-    def as_dict(self) -> dict:
-        """Serializes the QueryMetrics into a dictionary suitable for use as a JSON request body."""
+    def as_dict(self) -> dict:
+        """Serializes the QueryMetrics into a dictionary suitable for use as a JSON request body."""
+        body = {}
+        if self.compilation_time_ms is not None: body['compilation_time_ms'] = self.compilation_time_ms
+        if self.execution_time_ms is not None: body['execution_time_ms'] = self.execution_time_ms
+        if self.network_sent_bytes is not None: body['network_sent_bytes'] = self.network_sent_bytes
+        if self.overloading_queue_start_timestamp is not None:
+            body['overloading_queue_start_timestamp'] = self.overloading_queue_start_timestamp
+        if self.photon_total_time_ms is not None: body['photon_total_time_ms'] = self.photon_total_time_ms
+        if self.provisioning_queue_start_timestamp is not None:
+            body['provisioning_queue_start_timestamp'] = self.provisioning_queue_start_timestamp
+        if self.pruned_bytes is not None: body['pruned_bytes'] = self.pruned_bytes
+        if self.pruned_files_count is not None: body['pruned_files_count'] = self.pruned_files_count
+        if self.query_compilation_start_timestamp is not None:
+            body['query_compilation_start_timestamp'] = self.query_compilation_start_timestamp
+        if self.read_bytes is not None: body['read_bytes'] = self.read_bytes
+        if self.read_cache_bytes is not None: body['read_cache_bytes'] = self.read_cache_bytes
+        if self.read_files_count is not None: body['read_files_count'] = self.read_files_count
+        if self.read_partitions_count is not None: body['read_partitions_count'] = self.read_partitions_count
+        if self.read_remote_bytes is not None: body['read_remote_bytes'] = self.read_remote_bytes
+        if self.result_fetch_time_ms is not None: body['result_fetch_time_ms'] = self.result_fetch_time_ms
+        if self.result_from_cache is not None: body['result_from_cache'] = self.result_from_cache
+        if self.rows_produced_count is not None: body['rows_produced_count'] = self.rows_produced_count
+        if self.rows_read_count is not None: body['rows_read_count'] = self.rows_read_count
+        if self.spill_to_disk_bytes is not None: body['spill_to_disk_bytes'] = self.spill_to_disk_bytes
+        if self.task_total_time_ms is not None: body['task_total_time_ms'] = self.task_total_time_ms
+        if self.total_time_ms is not None: body['total_time_ms'] = self.total_time_ms
+        if self.write_remote_bytes is not None: body['write_remote_bytes'] = self.write_remote_bytes
+        return body
+
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryMetrics into a shallow dictionary of its immediate attributes."""
         body = {}
         if self.compilation_time_ms is not None: body['compilation_time_ms'] = self.compilation_time_ms
         if self.execution_time_ms is not None: body['execution_time_ms'] = self.execution_time_ms
@@ -3685,6 +4546,15 @@ def as_dict(self) -> dict:
         if self.schema is not None: body['schema'] = self.schema
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryOptions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.moved_to_trash_at is not None: body['moved_to_trash_at'] = self.moved_to_trash_at
+        if self.parameters: body['parameters'] = self.parameters
+        if self.schema is not None: body['schema'] = self.schema
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryOptions:
         """Deserializes the QueryOptions from a dictionary."""
@@ -3734,6 +4604,19 @@ def as_dict(self) -> dict:
         if self.title is not None: body['title'] = self.title
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryParameter into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.date_range_value: body['date_range_value'] = self.date_range_value
+        if self.date_value: body['date_value'] = self.date_value
+        if self.enum_value: body['enum_value'] = self.enum_value
+        if self.name is not None: body['name'] = self.name
+        if self.numeric_value: body['numeric_value'] = self.numeric_value
+        if self.query_backed_value: body['query_backed_value'] = self.query_backed_value
+        if self.text_value: body['text_value'] = self.text_value
+        if self.title is not None: body['title'] = self.title
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryParameter:
         """Deserializes the QueryParameter from a dictionary."""
@@ -3791,6 +4674,19 @@ def as_dict(self) -> dict:
         if self.tags: body['tags'] = [v for v in self.tags]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryPostContent into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
+        if self.description is not None: body['description'] = self.description
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options
+        if self.parent is not None: body['parent'] = self.parent
+        if self.query is not None: body['query'] = self.query
+        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role
+        if self.tags: body['tags'] = self.tags
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryPostContent:
         """Deserializes the QueryPostContent from a dictionary."""
@@ -3858,6 +4754,13 @@ def as_dict(self) -> dict:
             body['configuration_pairs'] = [v.as_dict() for v in self.configuration_pairs]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepeatedEndpointConfPairs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.config_pair: body['config_pair'] = self.config_pair
+        if self.configuration_pairs: body['configuration_pairs'] = self.configuration_pairs
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepeatedEndpointConfPairs:
         """Deserializes the RepeatedEndpointConfPairs from a dictionary."""
@@ -3873,6 +4776,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RestoreResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RestoreResponse:
         """Deserializes the RestoreResponse from a dictionary."""
@@ -3924,6 +4832,20 @@ def as_dict(self) -> dict:
         if self.row_offset is not None: body['row_offset'] = self.row_offset
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResultData into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.byte_count is not None: body['byte_count'] = self.byte_count
+        if self.chunk_index is not None: body['chunk_index'] = self.chunk_index
+        if self.data_array: body['data_array'] = self.data_array
+        if self.external_links: body['external_links'] = self.external_links
+        if self.next_chunk_index is not None: body['next_chunk_index'] = self.next_chunk_index
+        if self.next_chunk_internal_link is not None:
+            body['next_chunk_internal_link'] = self.next_chunk_internal_link
+        if self.row_count is not None: body['row_count'] = self.row_count
+        if self.row_offset is not None: body['row_offset'] = self.row_offset
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResultData:
         """Deserializes the ResultData from a dictionary."""
@@ -3974,6 +4896,18 @@ def as_dict(self) -> dict:
         if self.truncated is not None: body['truncated'] = self.truncated
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResultManifest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.chunks: body['chunks'] = self.chunks
+        if self.format is not None: body['format'] = self.format
+        if self.schema: body['schema'] = self.schema
+        if self.total_byte_count is not None: body['total_byte_count'] = self.total_byte_count
+        if self.total_chunk_count is not None: body['total_chunk_count'] = self.total_chunk_count
+        if self.total_row_count is not None: body['total_row_count'] = self.total_row_count
+        if self.truncated is not None: body['truncated'] = self.truncated
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResultManifest:
         """Deserializes the ResultManifest from a dictionary."""
@@ -4001,6 +4935,13 @@ def as_dict(self) -> dict:
         if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResultSchema into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.column_count is not None: body['column_count'] = self.column_count
+        if self.columns: body['columns'] = self.columns
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResultSchema:
         """Deserializes the ResultSchema from a dictionary."""
@@ -4035,6 +4976,13 @@ def as_dict(self) -> dict:
         if self.message is not None: body['message'] = self.message
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ServiceError into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.error_code is not None: body['error_code'] = self.error_code
+        if self.message is not None: body['message'] = self.message
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ServiceError:
         """Deserializes the ServiceError from a dictionary."""
@@ -4078,6 +5026,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetResponse:
         """Deserializes the SetResponse from a dictionary."""
@@ -4138,6 +5094,22 @@ def as_dict(self) -> dict:
             body['sql_configuration_parameters'] = self.sql_configuration_parameters.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetWorkspaceWarehouseConfigRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.channel: body['channel'] = self.channel
+        if self.config_param: body['config_param'] = self.config_param
+        if self.data_access_config: body['data_access_config'] = self.data_access_config
+        if self.enabled_warehouse_types: body['enabled_warehouse_types'] = self.enabled_warehouse_types
+        if self.global_param: body['global_param'] = self.global_param
+        if self.google_service_account is not None:
+            body['google_service_account'] = self.google_service_account
+        if self.instance_profile_arn is not None: body['instance_profile_arn'] = self.instance_profile_arn
+        if self.security_policy is not None: body['security_policy'] = self.security_policy
+        if self.sql_configuration_parameters:
+            body['sql_configuration_parameters'] = self.sql_configuration_parameters
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetWorkspaceWarehouseConfigRequest:
         """Deserializes the SetWorkspaceWarehouseConfigRequest from a dictionary."""
@@ -4170,6 +5142,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SetWorkspaceWarehouseConfigResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SetWorkspaceWarehouseConfigResponse:
         """Deserializes the SetWorkspaceWarehouseConfigResponse from a dictionary."""
@@ -4192,6 +5169,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StartWarehouseResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StartWarehouseResponse:
         """Deserializes the StartWarehouseResponse from a dictionary."""
@@ -4233,6 +5215,14 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StatementParameterListItem into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        if self.type is not None: body['type'] = self.type
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StatementParameterListItem:
         """Deserializes the StatementParameterListItem from a dictionary."""
@@ -4262,6 +5252,15 @@ def as_dict(self) -> dict:
         if self.status: body['status'] = self.status.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StatementResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.manifest: body['manifest'] = self.manifest
+        if self.result: body['result'] = self.result
+        if self.statement_id is not None: body['statement_id'] = self.statement_id
+        if self.status: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StatementResponse:
         """Deserializes the StatementResponse from a dictionary."""
@@ -4306,6 +5305,13 @@ def as_dict(self) -> dict:
         if self.state is not None: body['state'] = self.state.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StatementStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.error: body['error'] = self.error
+        if self.state is not None: body['state'] = self.state
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StatementStatus:
         """Deserializes the StatementStatus from a dictionary."""
@@ -4329,6 +5335,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the StopWarehouseResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> StopWarehouseResponse:
         """Deserializes the StopWarehouseResponse from a dictionary."""
@@ -4345,6 +5356,12 @@ def as_dict(self) -> dict:
         if self.message is not None: body['message'] = self.message.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Success into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Success:
         """Deserializes the Success from a dictionary."""
@@ -4375,6 +5392,14 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TerminationReason into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.code is not None: body['code'] = self.code
+        if self.parameters: body['parameters'] = self.parameters
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TerminationReason:
         """Deserializes the TerminationReason from a dictionary."""
@@ -4486,6 +5511,12 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TextValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TextValue:
         """Deserializes the TextValue from a dictionary."""
@@ -4507,6 +5538,13 @@ def as_dict(self) -> dict:
         if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TimeRange into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.end_time_ms is not None: body['end_time_ms'] = self.end_time_ms
+        if self.start_time_ms is not None: body['start_time_ms'] = self.start_time_ms
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TimeRange:
         """Deserializes the TimeRange from a dictionary."""
@@ -4524,6 +5562,12 @@ def as_dict(self) -> dict:
         if self.new_owner is not None: body['new_owner'] = self.new_owner
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the TransferOwnershipObjectId into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.new_owner is not None: body['new_owner'] = self.new_owner
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> TransferOwnershipObjectId:
         """Deserializes the TransferOwnershipObjectId from a dictionary."""
@@ -4533,9 +5577,15 @@ def from_dict(cls, d: Dict[str, any]) -> TransferOwnershipObjectId:
 @dataclass
 class UpdateAlertRequest:
     update_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     alert: Optional[UpdateAlertRequestAlert] = None
 
@@ -4549,6 +5599,14 @@ def as_dict(self) -> dict:
         if self.update_mask is not None: body['update_mask'] = self.update_mask
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateAlertRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.alert: body['alert'] = self.alert
+        if self.id is not None: body['id'] = self.id
+        if self.update_mask is not None: body['update_mask'] = self.update_mask
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequest:
         """Deserializes the UpdateAlertRequest from a dictionary."""
@@ -4602,6 +5660,19 @@ def as_dict(self) -> dict:
         if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateAlertRequestAlert into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.condition: body['condition'] = self.condition
+        if self.custom_body is not None: body['custom_body'] = self.custom_body
+        if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequestAlert:
         """Deserializes the UpdateAlertRequestAlert from a dictionary."""
@@ -4618,9 +5689,15 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateAlertRequestAlert:
 @dataclass
 class UpdateQueryRequest:
     update_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     id: Optional[str] = None
 
@@ -4634,6 +5711,14 @@ def as_dict(self) -> dict:
         if self.update_mask is not None: body['update_mask'] = self.update_mask
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateQueryRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.query: body['query'] = self.query
+        if self.update_mask is not None: body['update_mask'] = self.update_mask
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateQueryRequest:
         """Deserializes the UpdateQueryRequest from a dictionary."""
@@ -4692,6 +5777,22 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateQueryRequestQuery into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.description is not None: body['description'] = self.description
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.parameters: body['parameters'] = self.parameters
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode
+        if self.schema is not None: body['schema'] = self.schema
+        if self.tags: body['tags'] = self.tags
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateQueryRequestQuery:
         """Deserializes the UpdateQueryRequestQuery from a dictionary."""
@@ -4716,6 +5817,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
         """Deserializes the UpdateResponse from a dictionary."""
@@ -4725,9 +5831,15 @@ def from_dict(cls, d: Dict[str, any]) -> UpdateResponse:
 @dataclass
 class UpdateVisualizationRequest:
     update_mask: str
-    """Field mask is required to be passed into the PATCH request. Field mask specifies which fields of
-    the setting payload will be updated. The field mask needs to be supplied as single string. To
-    specify multiple fields in the field mask, use comma as the separator (no space)."""
+    """The field mask must be a single string, with multiple fields separated by commas (no spaces).
+    The field path is relative to the resource object, using a dot (`.`) to navigate sub-fields
+    (e.g., `author.given_name`). Specification of elements in sequence or map fields is not allowed,
+    as only the entire collection field can be specified. Field names must exactly match the
+    resource field names.
+    
+    A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+    fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the
+    API changes in the future."""
 
     id: Optional[str] = None
 
@@ -4741,6 +5853,14 @@ def as_dict(self) -> dict:
         if self.visualization: body['visualization'] = self.visualization.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateVisualizationRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.update_mask is not None: body['update_mask'] = self.update_mask
+        if self.visualization: body['visualization'] = self.visualization
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateVisualizationRequest:
         """Deserializes the UpdateVisualizationRequest from a dictionary."""
@@ -4774,6 +5894,15 @@ def as_dict(self) -> dict:
         if self.type is not None: body['type'] = self.type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateVisualizationRequestVisualization into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
+        if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
+        if self.type is not None: body['type'] = self.type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateVisualizationRequestVisualization:
         """Deserializes the UpdateVisualizationRequestVisualization from a dictionary."""
@@ -4799,6 +5928,14 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the User into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.email is not None: body['email'] = self.email
+        if self.id is not None: body['id'] = self.id
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> User:
         """Deserializes the User from a dictionary."""
@@ -4846,6 +5983,19 @@ def as_dict(self) -> dict:
         if self.update_time is not None: body['update_time'] = self.update_time
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Visualization into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.id is not None: body['id'] = self.id
+        if self.query_id is not None: body['query_id'] = self.query_id
+        if self.serialized_options is not None: body['serialized_options'] = self.serialized_options
+        if self.serialized_query_plan is not None: body['serialized_query_plan'] = self.serialized_query_plan
+        if self.type is not None: body['type'] = self.type
+        if self.update_time is not None: body['update_time'] = self.update_time
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Visualization:
         """Deserializes the Visualization from a dictionary."""
@@ -4883,6 +6033,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WarehouseAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehouseAccessControlRequest:
         """Deserializes the WarehouseAccessControlRequest from a dictionary."""
@@ -4920,6 +6080,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WarehouseAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehouseAccessControlResponse:
         """Deserializes the WarehouseAccessControlResponse from a dictionary."""
@@ -4947,6 +6118,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WarehousePermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehousePermission:
         """Deserializes the WarehousePermission from a dictionary."""
@@ -4981,6 +6160,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WarehousePermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehousePermissions:
         """Deserializes the WarehousePermissions from a dictionary."""
@@ -5004,6 +6191,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WarehousePermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehousePermissionsDescription:
         """Deserializes the WarehousePermissionsDescription from a dictionary."""
@@ -5026,6 +6220,13 @@ def as_dict(self) -> dict:
         if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WarehousePermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehousePermissionsRequest:
         """Deserializes the WarehousePermissionsRequest from a dictionary."""
@@ -5050,6 +6251,13 @@ def as_dict(self) -> dict:
         if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WarehouseTypePair into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.enabled is not None: body['enabled'] = self.enabled
+        if self.warehouse_type is not None: body['warehouse_type'] = self.warehouse_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WarehouseTypePair:
         """Deserializes the WarehouseTypePair from a dictionary."""
@@ -5090,6 +6298,15 @@ def as_dict(self) -> dict:
         if self.width is not None: body['width'] = self.width
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Widget into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.id is not None: body['id'] = self.id
+        if self.options: body['options'] = self.options
+        if self.visualization: body['visualization'] = self.visualization
+        if self.width is not None: body['width'] = self.width
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Widget:
         """Deserializes the Widget from a dictionary."""
@@ -5136,6 +6353,18 @@ def as_dict(self) -> dict:
         if self.updated_at is not None: body['updated_at'] = self.updated_at
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WidgetOptions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.description is not None: body['description'] = self.description
+        if self.is_hidden is not None: body['isHidden'] = self.is_hidden
+        if self.parameter_mappings: body['parameterMappings'] = self.parameter_mappings
+        if self.position: body['position'] = self.position
+        if self.title is not None: body['title'] = self.title
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WidgetOptions:
         """Deserializes the WidgetOptions from a dictionary."""
@@ -5178,6 +6407,16 @@ def as_dict(self) -> dict:
         if self.size_y is not None: body['sizeY'] = self.size_y
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WidgetPosition into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.auto_height is not None: body['autoHeight'] = self.auto_height
+        if self.col is not None: body['col'] = self.col
+        if self.row is not None: body['row'] = self.row
+        if self.size_x is not None: body['sizeX'] = self.size_x
+        if self.size_y is not None: body['sizeY'] = self.size_y
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WidgetPosition:
         """Deserializes the WidgetPosition from a dictionary."""
@@ -5280,9 +6519,15 @@ def update(self, id: str, update_mask: str, *, alert: Optional[UpdateAlertReques
         
         :param id: str
         :param update_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         :param alert: :class:`UpdateAlertRequestAlert` (optional)
         
         :returns: :class:`Alert`
@@ -5989,9 +7234,15 @@ def update(self, id: str, update_mask: str, *, query: Optional[UpdateQueryReques
         
         :param id: str
         :param update_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         :param query: :class:`UpdateQueryRequestQuery` (optional)
         
         :returns: :class:`Query`
@@ -6363,9 +7614,15 @@ def update(self,
         
         :param id: str
         :param update_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         :param visualization: :class:`UpdateVisualizationRequestVisualization` (optional)
         
         :returns: :class:`Visualization`
@@ -6502,6 +7759,24 @@ def update(self,
         return LegacyVisualization.from_dict(res)
 
 
+class RedashConfigAPI:
+    """Redash V2 service for workspace configurations (internal)"""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def get_config(self) -> ClientConfig:
+        """Read workspace configuration for Redash-v2.
+        
+        :returns: :class:`ClientConfig`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do('GET', '/api/2.0/redash-v2/config', headers=headers)
+        return ClientConfig.from_dict(res)
+
+
 class StatementExecutionAPI:
     """The Databricks SQL Statement Execution API can be used to execute SQL statements on a SQL warehouse and
     fetch the result.
@@ -6579,11 +7854,10 @@ class StatementExecutionAPI:
     outstanding statement might have already completed execution when the cancel request arrives. Polling for
     status until a terminal state is reached is a reliable way to determine the final state. - Wait timeouts
     are approximate, occur server-side, and cannot account for things such as caller delays and network
-    latency from caller to service. - The system will auto-close a statement after one hour if the client
-    stops polling and thus you must poll at least once an hour. - The results are only available for one hour
-    after success; polling does not extend this. - The SQL Execution API must be used for the entire lifecycle
-    of the statement. For example, you cannot use the Jobs API to execute the command, and then the SQL
-    Execution API to cancel it.
+    latency from caller to service. - To guarantee that the statement is kept alive, you must poll at least
+    once every 15 minutes. - The results are only available for one hour after success; polling does not
+    extend this. - The SQL Execution API must be used for the entire lifecycle of the statement. For example,
+    you cannot use the Jobs API to execute the command, and then the SQL Execution API to cancel it.
     
     [Apache Arrow Columnar]: https://arrow.apache.org/overview/
     [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html"""
@@ -7243,7 +8517,8 @@ def set_permissions(self,
                         ) -> WarehousePermissions:
         """Set SQL warehouse permissions.
         
-        Sets permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param warehouse_id: str
           The SQL warehouse for which to get or manage permissions.
diff --git a/databricks/sdk/service/vectorsearch.py b/databricks/sdk/service/vectorsearch.py
index d6c28b840..f1e6aeaa3 100755
--- a/databricks/sdk/service/vectorsearch.py
+++ b/databricks/sdk/service/vectorsearch.py
@@ -29,6 +29,12 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ColumnInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ColumnInfo:
         """Deserializes the ColumnInfo from a dictionary."""
@@ -50,6 +56,13 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateEndpoint into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.endpoint_type is not None: body['endpoint_type'] = self.endpoint_type
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateEndpoint:
         """Deserializes the CreateEndpoint from a dictionary."""
@@ -93,6 +106,17 @@ def as_dict(self) -> dict:
         if self.primary_key is not None: body['primary_key'] = self.primary_key
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateVectorIndexRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.delta_sync_index_spec: body['delta_sync_index_spec'] = self.delta_sync_index_spec
+        if self.direct_access_index_spec: body['direct_access_index_spec'] = self.direct_access_index_spec
+        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
+        if self.index_type is not None: body['index_type'] = self.index_type
+        if self.name is not None: body['name'] = self.name
+        if self.primary_key is not None: body['primary_key'] = self.primary_key
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateVectorIndexRequest:
         """Deserializes the CreateVectorIndexRequest from a dictionary."""
@@ -116,6 +140,12 @@ def as_dict(self) -> dict:
         if self.vector_index: body['vector_index'] = self.vector_index.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateVectorIndexResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.vector_index: body['vector_index'] = self.vector_index
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateVectorIndexResponse:
         """Deserializes the CreateVectorIndexResponse from a dictionary."""
@@ -139,6 +169,13 @@ def as_dict(self) -> dict:
         if self.success_row_count is not None: body['success_row_count'] = self.success_row_count
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteDataResult into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.failed_primary_keys: body['failed_primary_keys'] = self.failed_primary_keys
+        if self.success_row_count is not None: body['success_row_count'] = self.success_row_count
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDataResult:
         """Deserializes the DeleteDataResult from a dictionary."""
@@ -171,6 +208,13 @@ def as_dict(self) -> dict:
         if self.primary_keys: body['primary_keys'] = [v for v in self.primary_keys]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteDataVectorIndexRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.index_name is not None: body['index_name'] = self.index_name
+        if self.primary_keys: body['primary_keys'] = self.primary_keys
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDataVectorIndexRequest:
         """Deserializes the DeleteDataVectorIndexRequest from a dictionary."""
@@ -194,6 +238,13 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteDataVectorIndexResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.result: body['result'] = self.result
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteDataVectorIndexResponse:
         """Deserializes the DeleteDataVectorIndexResponse from a dictionary."""
@@ -209,6 +260,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteEndpointResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteEndpointResponse:
         """Deserializes the DeleteEndpointResponse from a dictionary."""
@@ -223,6 +279,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteIndexResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteIndexResponse:
         """Deserializes the DeleteIndexResponse from a dictionary."""
@@ -272,6 +333,18 @@ def as_dict(self) -> dict:
         if self.source_table is not None: body['source_table'] = self.source_table
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeltaSyncVectorIndexSpecRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.columns_to_sync: body['columns_to_sync'] = self.columns_to_sync
+        if self.embedding_source_columns: body['embedding_source_columns'] = self.embedding_source_columns
+        if self.embedding_vector_columns: body['embedding_vector_columns'] = self.embedding_vector_columns
+        if self.embedding_writeback_table is not None:
+            body['embedding_writeback_table'] = self.embedding_writeback_table
+        if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type
+        if self.source_table is not None: body['source_table'] = self.source_table
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeltaSyncVectorIndexSpecRequest:
         """Deserializes the DeltaSyncVectorIndexSpecRequest from a dictionary."""
@@ -325,6 +398,18 @@ def as_dict(self) -> dict:
         if self.source_table is not None: body['source_table'] = self.source_table
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeltaSyncVectorIndexSpecResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.embedding_source_columns: body['embedding_source_columns'] = self.embedding_source_columns
+        if self.embedding_vector_columns: body['embedding_vector_columns'] = self.embedding_vector_columns
+        if self.embedding_writeback_table is not None:
+            body['embedding_writeback_table'] = self.embedding_writeback_table
+        if self.pipeline_id is not None: body['pipeline_id'] = self.pipeline_id
+        if self.pipeline_type is not None: body['pipeline_type'] = self.pipeline_type
+        if self.source_table is not None: body['source_table'] = self.source_table
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeltaSyncVectorIndexSpecResponse:
         """Deserializes the DeltaSyncVectorIndexSpecResponse from a dictionary."""
@@ -363,6 +448,14 @@ def as_dict(self) -> dict:
         if self.schema_json is not None: body['schema_json'] = self.schema_json
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DirectAccessVectorIndexSpec into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.embedding_source_columns: body['embedding_source_columns'] = self.embedding_source_columns
+        if self.embedding_vector_columns: body['embedding_vector_columns'] = self.embedding_vector_columns
+        if self.schema_json is not None: body['schema_json'] = self.schema_json
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DirectAccessVectorIndexSpec:
         """Deserializes the DirectAccessVectorIndexSpec from a dictionary."""
@@ -389,6 +482,14 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EmbeddingSourceColumn into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.embedding_model_endpoint_name is not None:
+            body['embedding_model_endpoint_name'] = self.embedding_model_endpoint_name
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EmbeddingSourceColumn:
         """Deserializes the EmbeddingSourceColumn from a dictionary."""
@@ -411,6 +512,13 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EmbeddingVectorColumn into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.embedding_dimension is not None: body['embedding_dimension'] = self.embedding_dimension
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EmbeddingVectorColumn:
         """Deserializes the EmbeddingVectorColumn from a dictionary."""
@@ -461,6 +569,21 @@ def as_dict(self) -> dict:
         if self.num_indexes is not None: body['num_indexes'] = self.num_indexes
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creation_timestamp is not None: body['creation_timestamp'] = self.creation_timestamp
+        if self.creator is not None: body['creator'] = self.creator
+        if self.endpoint_status: body['endpoint_status'] = self.endpoint_status
+        if self.endpoint_type is not None: body['endpoint_type'] = self.endpoint_type
+        if self.id is not None: body['id'] = self.id
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        if self.last_updated_user is not None: body['last_updated_user'] = self.last_updated_user
+        if self.name is not None: body['name'] = self.name
+        if self.num_indexes is not None: body['num_indexes'] = self.num_indexes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointInfo:
         """Deserializes the EndpointInfo from a dictionary."""
@@ -492,6 +615,13 @@ def as_dict(self) -> dict:
         if self.state is not None: body['state'] = self.state.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the EndpointStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.message is not None: body['message'] = self.message
+        if self.state is not None: body['state'] = self.state
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> EndpointStatus:
         """Deserializes the EndpointStatus from a dictionary."""
@@ -528,6 +658,13 @@ def as_dict(self) -> dict:
         if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListEndpointResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.endpoints: body['endpoints'] = self.endpoints
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListEndpointResponse:
         """Deserializes the ListEndpointResponse from a dictionary."""
@@ -545,6 +682,12 @@ def as_dict(self) -> dict:
         if self.values: body['values'] = [v.as_dict() for v in self.values]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListValue into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.values: body['values'] = self.values
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListValue:
         """Deserializes the ListValue from a dictionary."""
@@ -566,6 +709,13 @@ def as_dict(self) -> dict:
         if self.vector_indexes: body['vector_indexes'] = [v.as_dict() for v in self.vector_indexes]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListVectorIndexesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.vector_indexes: body['vector_indexes'] = self.vector_indexes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListVectorIndexesResponse:
         """Deserializes the ListVectorIndexesResponse from a dictionary."""
@@ -590,6 +740,13 @@ def as_dict(self) -> dict:
         if self.value: body['value'] = self.value.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MapStringValueEntry into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MapStringValueEntry:
         """Deserializes the MapStringValueEntry from a dictionary."""
@@ -628,6 +785,16 @@ def as_dict(self) -> dict:
         if self.primary_key is not None: body['primary_key'] = self.primary_key
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MiniVectorIndex into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creator is not None: body['creator'] = self.creator
+        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
+        if self.index_type is not None: body['index_type'] = self.index_type
+        if self.name is not None: body['name'] = self.name
+        if self.primary_key is not None: body['primary_key'] = self.primary_key
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MiniVectorIndex:
         """Deserializes the MiniVectorIndex from a dictionary."""
@@ -672,6 +839,14 @@ def as_dict(self) -> dict:
         if self.page_token is not None: body['page_token'] = self.page_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryVectorIndexNextPageRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
+        if self.index_name is not None: body['index_name'] = self.index_name
+        if self.page_token is not None: body['page_token'] = self.page_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryVectorIndexNextPageRequest:
         """Deserializes the QueryVectorIndexNextPageRequest from a dictionary."""
@@ -724,6 +899,19 @@ def as_dict(self) -> dict:
         if self.score_threshold is not None: body['score_threshold'] = self.score_threshold
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryVectorIndexRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.columns: body['columns'] = self.columns
+        if self.filters_json is not None: body['filters_json'] = self.filters_json
+        if self.index_name is not None: body['index_name'] = self.index_name
+        if self.num_results is not None: body['num_results'] = self.num_results
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.query_type is not None: body['query_type'] = self.query_type
+        if self.query_vector: body['query_vector'] = self.query_vector
+        if self.score_threshold is not None: body['score_threshold'] = self.score_threshold
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryVectorIndexRequest:
         """Deserializes the QueryVectorIndexRequest from a dictionary."""
@@ -758,6 +946,14 @@ def as_dict(self) -> dict:
         if self.result: body['result'] = self.result.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the QueryVectorIndexResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.manifest: body['manifest'] = self.manifest
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.result: body['result'] = self.result
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> QueryVectorIndexResponse:
         """Deserializes the QueryVectorIndexResponse from a dictionary."""
@@ -783,6 +979,13 @@ def as_dict(self) -> dict:
         if self.row_count is not None: body['row_count'] = self.row_count
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResultData into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data_array: body['data_array'] = self.data_array
+        if self.row_count is not None: body['row_count'] = self.row_count
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResultData:
         """Deserializes the ResultData from a dictionary."""
@@ -806,6 +1009,13 @@ def as_dict(self) -> dict:
         if self.columns: body['columns'] = [v.as_dict() for v in self.columns]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ResultManifest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.column_count is not None: body['column_count'] = self.column_count
+        if self.columns: body['columns'] = self.columns
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ResultManifest:
         """Deserializes the ResultManifest from a dictionary."""
@@ -833,6 +1043,14 @@ def as_dict(self) -> dict:
         if self.num_results is not None: body['num_results'] = self.num_results
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ScanVectorIndexRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.index_name is not None: body['index_name'] = self.index_name
+        if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key
+        if self.num_results is not None: body['num_results'] = self.num_results
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ScanVectorIndexRequest:
         """Deserializes the ScanVectorIndexRequest from a dictionary."""
@@ -858,6 +1076,13 @@ def as_dict(self) -> dict:
         if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ScanVectorIndexResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.data: body['data'] = self.data
+        if self.last_primary_key is not None: body['last_primary_key'] = self.last_primary_key
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ScanVectorIndexResponse:
         """Deserializes the ScanVectorIndexResponse from a dictionary."""
@@ -875,6 +1100,12 @@ def as_dict(self) -> dict:
         if self.fields: body['fields'] = [v.as_dict() for v in self.fields]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Struct into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.fields: body['fields'] = self.fields
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Struct:
         """Deserializes the Struct from a dictionary."""
@@ -889,6 +1120,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SyncIndexResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SyncIndexResponse:
         """Deserializes the SyncIndexResponse from a dictionary."""
@@ -912,6 +1148,13 @@ def as_dict(self) -> dict:
         if self.success_row_count is not None: body['success_row_count'] = self.success_row_count
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpsertDataResult into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.failed_primary_keys: body['failed_primary_keys'] = self.failed_primary_keys
+        if self.success_row_count is not None: body['success_row_count'] = self.success_row_count
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpsertDataResult:
         """Deserializes the UpsertDataResult from a dictionary."""
@@ -944,6 +1187,13 @@ def as_dict(self) -> dict:
         if self.inputs_json is not None: body['inputs_json'] = self.inputs_json
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpsertDataVectorIndexRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.index_name is not None: body['index_name'] = self.index_name
+        if self.inputs_json is not None: body['inputs_json'] = self.inputs_json
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpsertDataVectorIndexRequest:
         """Deserializes the UpsertDataVectorIndexRequest from a dictionary."""
@@ -967,6 +1217,13 @@ def as_dict(self) -> dict:
         if self.status is not None: body['status'] = self.status.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpsertDataVectorIndexResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.result: body['result'] = self.result
+        if self.status is not None: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpsertDataVectorIndexResponse:
         """Deserializes the UpsertDataVectorIndexResponse from a dictionary."""
@@ -999,6 +1256,17 @@ def as_dict(self) -> dict:
         if self.struct_value: body['struct_value'] = self.struct_value.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Value into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.bool_value is not None: body['bool_value'] = self.bool_value
+        if self.list_value: body['list_value'] = self.list_value
+        if self.null_value is not None: body['null_value'] = self.null_value
+        if self.number_value is not None: body['number_value'] = self.number_value
+        if self.string_value is not None: body['string_value'] = self.string_value
+        if self.struct_value: body['struct_value'] = self.struct_value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Value:
         """Deserializes the Value from a dictionary."""
@@ -1052,6 +1320,19 @@ def as_dict(self) -> dict:
         if self.status: body['status'] = self.status.as_dict()
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the VectorIndex into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.creator is not None: body['creator'] = self.creator
+        if self.delta_sync_index_spec: body['delta_sync_index_spec'] = self.delta_sync_index_spec
+        if self.direct_access_index_spec: body['direct_access_index_spec'] = self.direct_access_index_spec
+        if self.endpoint_name is not None: body['endpoint_name'] = self.endpoint_name
+        if self.index_type is not None: body['index_type'] = self.index_type
+        if self.name is not None: body['name'] = self.name
+        if self.primary_key is not None: body['primary_key'] = self.primary_key
+        if self.status: body['status'] = self.status
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> VectorIndex:
         """Deserializes the VectorIndex from a dictionary."""
@@ -1090,6 +1371,15 @@ def as_dict(self) -> dict:
         if self.ready is not None: body['ready'] = self.ready
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the VectorIndexStatus into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.index_url is not None: body['index_url'] = self.index_url
+        if self.indexed_row_count is not None: body['indexed_row_count'] = self.indexed_row_count
+        if self.message is not None: body['message'] = self.message
+        if self.ready is not None: body['ready'] = self.ready
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> VectorIndexStatus:
         """Deserializes the VectorIndexStatus from a dictionary."""
diff --git a/databricks/sdk/service/workspace.py b/databricks/sdk/service/workspace.py
index 7c8bfbd5e..eb5418987 100755
--- a/databricks/sdk/service/workspace.py
+++ b/databricks/sdk/service/workspace.py
@@ -29,6 +29,13 @@ def as_dict(self) -> dict:
         if self.principal is not None: body['principal'] = self.principal
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AclItem into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission is not None: body['permission'] = self.permission
+        if self.principal is not None: body['principal'] = self.principal
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AclItem:
         """Deserializes the AclItem from a dictionary."""
@@ -57,6 +64,13 @@ def as_dict(self) -> dict:
         if self.resource_id is not None: body['resource_id'] = self.resource_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the AzureKeyVaultSecretScopeMetadata into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.dns_name is not None: body['dns_name'] = self.dns_name
+        if self.resource_id is not None: body['resource_id'] = self.resource_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> AzureKeyVaultSecretScopeMetadata:
         """Deserializes the AzureKeyVaultSecretScopeMetadata from a dictionary."""
@@ -91,6 +105,14 @@ def as_dict(self) -> dict:
         if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCredentialsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.git_provider is not None: body['git_provider'] = self.git_provider
+        if self.git_username is not None: body['git_username'] = self.git_username
+        if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCredentialsRequest:
         """Deserializes the CreateCredentialsRequest from a dictionary."""
@@ -119,6 +141,14 @@ def as_dict(self) -> dict:
         if self.git_username is not None: body['git_username'] = self.git_username
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateCredentialsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.git_provider is not None: body['git_provider'] = self.git_provider
+        if self.git_username is not None: body['git_username'] = self.git_username
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateCredentialsResponse:
         """Deserializes the CreateCredentialsResponse from a dictionary."""
@@ -154,6 +184,15 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateRepoRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.path is not None: body['path'] = self.path
+        if self.provider is not None: body['provider'] = self.provider
+        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRepoRequest:
         """Deserializes the CreateRepoRequest from a dictionary."""
@@ -198,6 +237,18 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateRepoResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.branch is not None: body['branch'] = self.branch
+        if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id
+        if self.id is not None: body['id'] = self.id
+        if self.path is not None: body['path'] = self.path
+        if self.provider is not None: body['provider'] = self.provider
+        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateRepoResponse:
         """Deserializes the CreateRepoResponse from a dictionary."""
@@ -234,6 +285,16 @@ def as_dict(self) -> dict:
         if self.scope_backend_type is not None: body['scope_backend_type'] = self.scope_backend_type.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateScope into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.backend_azure_keyvault: body['backend_azure_keyvault'] = self.backend_azure_keyvault
+        if self.initial_manage_principal is not None:
+            body['initial_manage_principal'] = self.initial_manage_principal
+        if self.scope is not None: body['scope'] = self.scope
+        if self.scope_backend_type is not None: body['scope_backend_type'] = self.scope_backend_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateScope:
         """Deserializes the CreateScope from a dictionary."""
@@ -252,6 +313,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CreateScopeResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CreateScopeResponse:
         """Deserializes the CreateScopeResponse from a dictionary."""
@@ -278,6 +344,14 @@ def as_dict(self) -> dict:
         if self.git_username is not None: body['git_username'] = self.git_username
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the CredentialInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.git_provider is not None: body['git_provider'] = self.git_provider
+        if self.git_username is not None: body['git_username'] = self.git_username
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> CredentialInfo:
         """Deserializes the CredentialInfo from a dictionary."""
@@ -303,6 +377,13 @@ def as_dict(self) -> dict:
         if self.recursive is not None: body['recursive'] = self.recursive
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Delete into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.path is not None: body['path'] = self.path
+        if self.recursive is not None: body['recursive'] = self.recursive
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Delete:
         """Deserializes the Delete from a dictionary."""
@@ -324,6 +405,13 @@ def as_dict(self) -> dict:
         if self.scope is not None: body['scope'] = self.scope
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteAcl into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.principal is not None: body['principal'] = self.principal
+        if self.scope is not None: body['scope'] = self.scope
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteAcl:
         """Deserializes the DeleteAcl from a dictionary."""
@@ -338,6 +426,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteAclResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteAclResponse:
         """Deserializes the DeleteAclResponse from a dictionary."""
@@ -352,6 +445,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteCredentialsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteCredentialsResponse:
         """Deserializes the DeleteCredentialsResponse from a dictionary."""
@@ -366,6 +464,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteRepoResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteRepoResponse:
         """Deserializes the DeleteRepoResponse from a dictionary."""
@@ -380,6 +483,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
         """Deserializes the DeleteResponse from a dictionary."""
@@ -397,6 +505,12 @@ def as_dict(self) -> dict:
         if self.scope is not None: body['scope'] = self.scope
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteScope into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.scope is not None: body['scope'] = self.scope
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteScope:
         """Deserializes the DeleteScope from a dictionary."""
@@ -411,6 +525,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteScopeResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteScopeResponse:
         """Deserializes the DeleteScopeResponse from a dictionary."""
@@ -432,6 +551,13 @@ def as_dict(self) -> dict:
         if self.scope is not None: body['scope'] = self.scope
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteSecret into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.scope is not None: body['scope'] = self.scope
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteSecret:
         """Deserializes the DeleteSecret from a dictionary."""
@@ -446,6 +572,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the DeleteSecretResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> DeleteSecretResponse:
         """Deserializes the DeleteSecretResponse from a dictionary."""
@@ -478,6 +609,13 @@ def as_dict(self) -> dict:
         if self.file_type is not None: body['file_type'] = self.file_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ExportResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.content is not None: body['content'] = self.content
+        if self.file_type is not None: body['file_type'] = self.file_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ExportResponse:
         """Deserializes the ExportResponse from a dictionary."""
@@ -504,6 +642,14 @@ def as_dict(self) -> dict:
         if self.git_username is not None: body['git_username'] = self.git_username
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetCredentialsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.git_provider is not None: body['git_provider'] = self.git_provider
+        if self.git_username is not None: body['git_username'] = self.git_username
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetCredentialsResponse:
         """Deserializes the GetCredentialsResponse from a dictionary."""
@@ -523,6 +669,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetRepoPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetRepoPermissionLevelsResponse:
         """Deserializes the GetRepoPermissionLevelsResponse from a dictionary."""
@@ -564,6 +716,18 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetRepoResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.branch is not None: body['branch'] = self.branch
+        if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id
+        if self.id is not None: body['id'] = self.id
+        if self.path is not None: body['path'] = self.path
+        if self.provider is not None: body['provider'] = self.provider
+        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetRepoResponse:
         """Deserializes the GetRepoResponse from a dictionary."""
@@ -591,6 +755,13 @@ def as_dict(self) -> dict:
         if self.value is not None: body['value'] = self.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetSecretResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.value is not None: body['value'] = self.value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetSecretResponse:
         """Deserializes the GetSecretResponse from a dictionary."""
@@ -608,6 +779,12 @@ def as_dict(self) -> dict:
         if self.permission_levels: body['permission_levels'] = [v.as_dict() for v in self.permission_levels]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the GetWorkspaceObjectPermissionLevelsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission_levels: body['permission_levels'] = self.permission_levels
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> GetWorkspaceObjectPermissionLevelsResponse:
         """Deserializes the GetWorkspaceObjectPermissionLevelsResponse from a dictionary."""
@@ -657,6 +834,16 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Import into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.content is not None: body['content'] = self.content
+        if self.format is not None: body['format'] = self.format
+        if self.language is not None: body['language'] = self.language
+        if self.overwrite is not None: body['overwrite'] = self.overwrite
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Import:
         """Deserializes the Import from a dictionary."""
@@ -684,6 +871,7 @@ class ImportFormat(Enum):
     DBC = 'DBC'
     HTML = 'HTML'
     JUPYTER = 'JUPYTER'
+    RAW = 'RAW'
     R_MARKDOWN = 'R_MARKDOWN'
     SOURCE = 'SOURCE'
 
@@ -696,6 +884,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ImportResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ImportResponse:
         """Deserializes the ImportResponse from a dictionary."""
@@ -722,6 +915,12 @@ def as_dict(self) -> dict:
         if self.items: body['items'] = [v.as_dict() for v in self.items]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListAclsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.items: body['items'] = self.items
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListAclsResponse:
         """Deserializes the ListAclsResponse from a dictionary."""
@@ -739,6 +938,12 @@ def as_dict(self) -> dict:
         if self.credentials: body['credentials'] = [v.as_dict() for v in self.credentials]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListCredentialsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credentials: body['credentials'] = self.credentials
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListCredentialsResponse:
         """Deserializes the ListCredentialsResponse from a dictionary."""
@@ -761,6 +966,13 @@ def as_dict(self) -> dict:
         if self.repos: body['repos'] = [v.as_dict() for v in self.repos]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListReposResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.next_page_token is not None: body['next_page_token'] = self.next_page_token
+        if self.repos: body['repos'] = self.repos
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListReposResponse:
         """Deserializes the ListReposResponse from a dictionary."""
@@ -778,6 +990,12 @@ def as_dict(self) -> dict:
         if self.objects: body['objects'] = [v.as_dict() for v in self.objects]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.objects: body['objects'] = self.objects
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListResponse:
         """Deserializes the ListResponse from a dictionary."""
@@ -795,6 +1013,12 @@ def as_dict(self) -> dict:
         if self.scopes: body['scopes'] = [v.as_dict() for v in self.scopes]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListScopesResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.scopes: body['scopes'] = self.scopes
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListScopesResponse:
         """Deserializes the ListScopesResponse from a dictionary."""
@@ -812,6 +1036,12 @@ def as_dict(self) -> dict:
         if self.secrets: body['secrets'] = [v.as_dict() for v in self.secrets]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ListSecretsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.secrets: body['secrets'] = self.secrets
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ListSecretsResponse:
         """Deserializes the ListSecretsResponse from a dictionary."""
@@ -830,6 +1060,12 @@ def as_dict(self) -> dict:
         if self.path is not None: body['path'] = self.path
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the Mkdirs into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.path is not None: body['path'] = self.path
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> Mkdirs:
         """Deserializes the Mkdirs from a dictionary."""
@@ -844,6 +1080,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the MkdirsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> MkdirsResponse:
         """Deserializes the MkdirsResponse from a dictionary."""
@@ -893,6 +1134,19 @@ def as_dict(self) -> dict:
         if self.size is not None: body['size'] = self.size
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the ObjectInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.language is not None: body['language'] = self.language
+        if self.modified_at is not None: body['modified_at'] = self.modified_at
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        if self.path is not None: body['path'] = self.path
+        if self.resource_id is not None: body['resource_id'] = self.resource_id
+        if self.size is not None: body['size'] = self.size
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> ObjectInfo:
         """Deserializes the ObjectInfo from a dictionary."""
@@ -940,6 +1194,14 @@ def as_dict(self) -> dict:
         if self.scope is not None: body['scope'] = self.scope
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PutAcl into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.permission is not None: body['permission'] = self.permission
+        if self.principal is not None: body['principal'] = self.principal
+        if self.scope is not None: body['scope'] = self.scope
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutAcl:
         """Deserializes the PutAcl from a dictionary."""
@@ -956,6 +1218,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PutAclResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutAclResponse:
         """Deserializes the PutAclResponse from a dictionary."""
@@ -985,6 +1252,15 @@ def as_dict(self) -> dict:
         if self.string_value is not None: body['string_value'] = self.string_value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PutSecret into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.bytes_value is not None: body['bytes_value'] = self.bytes_value
+        if self.key is not None: body['key'] = self.key
+        if self.scope is not None: body['scope'] = self.scope
+        if self.string_value is not None: body['string_value'] = self.string_value
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutSecret:
         """Deserializes the PutSecret from a dictionary."""
@@ -1002,6 +1278,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the PutSecretResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> PutSecretResponse:
         """Deserializes the PutSecretResponse from a dictionary."""
@@ -1032,6 +1313,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepoAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoAccessControlRequest:
         """Deserializes the RepoAccessControlRequest from a dictionary."""
@@ -1069,6 +1360,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepoAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoAccessControlResponse:
         """Deserializes the RepoAccessControlResponse from a dictionary."""
@@ -1116,6 +1418,18 @@ def as_dict(self) -> dict:
         if self.url is not None: body['url'] = self.url
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepoInfo into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.branch is not None: body['branch'] = self.branch
+        if self.head_commit_id is not None: body['head_commit_id'] = self.head_commit_id
+        if self.id is not None: body['id'] = self.id
+        if self.path is not None: body['path'] = self.path
+        if self.provider is not None: body['provider'] = self.provider
+        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout
+        if self.url is not None: body['url'] = self.url
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoInfo:
         """Deserializes the RepoInfo from a dictionary."""
@@ -1145,6 +1459,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepoPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoPermission:
         """Deserializes the RepoPermission from a dictionary."""
@@ -1179,6 +1501,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepoPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoPermissions:
         """Deserializes the RepoPermissions from a dictionary."""
@@ -1201,6 +1531,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepoPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoPermissionsDescription:
         """Deserializes the RepoPermissionsDescription from a dictionary."""
@@ -1223,6 +1560,13 @@ def as_dict(self) -> dict:
         if self.repo_id is not None: body['repo_id'] = self.repo_id
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the RepoPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.repo_id is not None: body['repo_id'] = self.repo_id
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> RepoPermissionsRequest:
         """Deserializes the RepoPermissionsRequest from a dictionary."""
@@ -1252,6 +1596,14 @@ def as_dict(self) -> dict:
             body['last_updated_timestamp'] = self.last_updated_timestamp
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SecretMetadata into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.key is not None: body['key'] = self.key
+        if self.last_updated_timestamp is not None:
+            body['last_updated_timestamp'] = self.last_updated_timestamp
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SecretMetadata:
         """Deserializes the SecretMetadata from a dictionary."""
@@ -1277,6 +1629,14 @@ def as_dict(self) -> dict:
         if self.name is not None: body['name'] = self.name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SecretScope into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.backend_type is not None: body['backend_type'] = self.backend_type
+        if self.keyvault_metadata: body['keyvault_metadata'] = self.keyvault_metadata
+        if self.name is not None: body['name'] = self.name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SecretScope:
         """Deserializes the SecretScope from a dictionary."""
@@ -1300,6 +1660,12 @@ def as_dict(self) -> dict:
         if self.patterns: body['patterns'] = [v for v in self.patterns]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SparseCheckout into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.patterns: body['patterns'] = self.patterns
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparseCheckout:
         """Deserializes the SparseCheckout from a dictionary."""
@@ -1321,6 +1687,12 @@ def as_dict(self) -> dict:
         if self.patterns: body['patterns'] = [v for v in self.patterns]
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the SparseCheckoutUpdate into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.patterns: body['patterns'] = self.patterns
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> SparseCheckoutUpdate:
         """Deserializes the SparseCheckoutUpdate from a dictionary."""
@@ -1359,6 +1731,15 @@ def as_dict(self) -> dict:
         if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCredentialsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.credential_id is not None: body['credential_id'] = self.credential_id
+        if self.git_provider is not None: body['git_provider'] = self.git_provider
+        if self.git_username is not None: body['git_username'] = self.git_username
+        if self.personal_access_token is not None: body['personal_access_token'] = self.personal_access_token
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCredentialsRequest:
         """Deserializes the UpdateCredentialsRequest from a dictionary."""
@@ -1376,6 +1757,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateCredentialsResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateCredentialsResponse:
         """Deserializes the UpdateCredentialsResponse from a dictionary."""
@@ -1408,6 +1794,15 @@ def as_dict(self) -> dict:
         if self.tag is not None: body['tag'] = self.tag
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateRepoRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.branch is not None: body['branch'] = self.branch
+        if self.repo_id is not None: body['repo_id'] = self.repo_id
+        if self.sparse_checkout: body['sparse_checkout'] = self.sparse_checkout
+        if self.tag is not None: body['tag'] = self.tag
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRepoRequest:
         """Deserializes the UpdateRepoRequest from a dictionary."""
@@ -1425,6 +1820,11 @@ def as_dict(self) -> dict:
         body = {}
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the UpdateRepoResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> UpdateRepoResponse:
         """Deserializes the UpdateRepoResponse from a dictionary."""
@@ -1455,6 +1855,16 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspaceObjectAccessControlRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectAccessControlRequest:
         """Deserializes the WorkspaceObjectAccessControlRequest from a dictionary."""
@@ -1492,6 +1902,17 @@ def as_dict(self) -> dict:
         if self.user_name is not None: body['user_name'] = self.user_name
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspaceObjectAccessControlResponse into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.all_permissions: body['all_permissions'] = self.all_permissions
+        if self.display_name is not None: body['display_name'] = self.display_name
+        if self.group_name is not None: body['group_name'] = self.group_name
+        if self.service_principal_name is not None:
+            body['service_principal_name'] = self.service_principal_name
+        if self.user_name is not None: body['user_name'] = self.user_name
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectAccessControlResponse:
         """Deserializes the WorkspaceObjectAccessControlResponse from a dictionary."""
@@ -1519,6 +1940,14 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspaceObjectPermission into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.inherited is not None: body['inherited'] = self.inherited
+        if self.inherited_from_object: body['inherited_from_object'] = self.inherited_from_object
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectPermission:
         """Deserializes the WorkspaceObjectPermission from a dictionary."""
@@ -1553,6 +1982,14 @@ def as_dict(self) -> dict:
         if self.object_type is not None: body['object_type'] = self.object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspaceObjectPermissions into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.object_id is not None: body['object_id'] = self.object_id
+        if self.object_type is not None: body['object_type'] = self.object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectPermissions:
         """Deserializes the WorkspaceObjectPermissions from a dictionary."""
@@ -1576,6 +2013,13 @@ def as_dict(self) -> dict:
         if self.permission_level is not None: body['permission_level'] = self.permission_level.value
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspaceObjectPermissionsDescription into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.description is not None: body['description'] = self.description
+        if self.permission_level is not None: body['permission_level'] = self.permission_level
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectPermissionsDescription:
         """Deserializes the WorkspaceObjectPermissionsDescription from a dictionary."""
@@ -1602,6 +2046,14 @@ def as_dict(self) -> dict:
         if self.workspace_object_type is not None: body['workspace_object_type'] = self.workspace_object_type
         return body
 
+    def as_shallow_dict(self) -> dict:
+        """Serializes the WorkspaceObjectPermissionsRequest into a shallow dictionary of its immediate attributes."""
+        body = {}
+        if self.access_control_list: body['access_control_list'] = self.access_control_list
+        if self.workspace_object_id is not None: body['workspace_object_id'] = self.workspace_object_id
+        if self.workspace_object_type is not None: body['workspace_object_type'] = self.workspace_object_type
+        return body
+
     @classmethod
     def from_dict(cls, d: Dict[str, any]) -> WorkspaceObjectPermissionsRequest:
         """Deserializes the WorkspaceObjectPermissionsRequest from a dictionary."""
@@ -1799,7 +2251,7 @@ def delete(self, repo_id: int):
         Deletes the specified repo.
         
         :param repo_id: int
-          ID of the Git folder (repo) object in the workspace.
+          The ID for the corresponding repo to delete.
         
         
         """
@@ -1897,7 +2349,8 @@ def set_permissions(
             access_control_list: Optional[List[RepoAccessControlRequest]] = None) -> RepoPermissions:
         """Set repo permissions.
         
-        Sets permissions on a repo. Repos can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param repo_id: str
           The repo for which to get or manage permissions.
@@ -2527,8 +2980,9 @@ def set_permissions(
     ) -> WorkspaceObjectPermissions:
         """Set workspace object permissions.
         
-        Sets permissions on a workspace object. Workspace objects can inherit permissions from their parent
-        objects or root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their parent objects or root
+        object.
         
         :param workspace_object_type: str
           The workspace object type for which to get or manage permissions.
diff --git a/databricks/sdk/useragent.py b/databricks/sdk/useragent.py
index 5b15d2822..45adfe51d 100644
--- a/databricks/sdk/useragent.py
+++ b/databricks/sdk/useragent.py
@@ -148,4 +148,58 @@ def to_string(alternate_product_info: Optional[Tuple[str, str]] = None,
     base.extend(_extra)
     base.extend(_get_upstream_user_agent_info())
     base.extend(_get_runtime_info())
+    if cicd_provider() != "":
+        base.append((CICD_KEY, cicd_provider()))
     return " ".join(f"{k}/{v}" for k, v in base)
+
+
+# List of CI/CD providers and pairs of envvar/value that are used to detect them.
+_PROVIDERS = {
+    "github": [("GITHUB_ACTIONS", "true")],
+    "gitlab": [("GITLAB_CI", "true")],
+    "jenkins": [("JENKINS_URL", "")],
+    "azure-devops": [("TF_BUILD", "True")],
+    "circle": [("CIRCLECI", "true")],
+    "travis": [("TRAVIS", "true")],
+    "bitbucket": [("BITBUCKET_BUILD_NUMBER", "")],
+    "google-cloud-build": [("PROJECT_ID", ""), ("BUILD_ID", ""), ("PROJECT_NUMBER", ""), ("LOCATION", "")],
+    "aws-code-build": [("CODEBUILD_BUILD_ARN", "")],
+    "tf-cloud": [("TFC_RUN_ID", "")],
+}
+
+# Private variable to store the CI/CD provider. This value is computed at
+# the first invocation of cicd_providers() and is cached for subsequent calls.
+_cicd_provider = None
+
+
+def cicd_provider() -> str:
+    """Return the CI/CD provider if detected, or an empty string otherwise."""
+
+    # This function is safe because (i) assignation are atomic, and (ii)
+    # computating the CI/CD provider is idempotent.
+    global _cicd_provider
+    if _cicd_provider is not None:
+        return _cicd_provider
+
+    providers = []
+    for p in _PROVIDERS:
+        found = True
+        for envvar, value in _PROVIDERS[p]:
+            v = os.getenv(envvar)
+            if v is None or (value != "" and v != value):
+                found = False
+                break
+
+        if found:
+            providers.append(p)
+
+    if len(providers) == 0:
+        _cicd_provider = ""
+    else:
+        # TODO: reconsider what to do if multiple providers are detected.
+        # The current mechanism as the benefit of being deterministic and
+        # robust to ordering changes in _PROVIDERS.
+        providers.sort()
+        _cicd_provider = providers[0]
+
+    return _cicd_provider
diff --git a/databricks/sdk/version.py b/databricks/sdk/version.py
index aae5aca67..c09c695fd 100644
--- a/databricks/sdk/version.py
+++ b/databricks/sdk/version.py
@@ -1 +1 @@
-__version__ = '0.36.0'
+__version__ = '0.44.1'
diff --git a/docs/account/billing/budget_policy.rst b/docs/account/billing/budget_policy.rst
new file mode 100644
index 000000000..6f7d7ede1
--- /dev/null
+++ b/docs/account/billing/budget_policy.rst
@@ -0,0 +1,88 @@
+``a.budget_policy``: Budget Policy
+==================================
+.. currentmodule:: databricks.sdk.service.billing
+
+.. py:class:: BudgetPolicyAPI
+
+    A service serves REST API about Budget policies
+
+    .. py:method:: create( [, custom_tags: Optional[List[compute.CustomPolicyTag]], policy_name: Optional[str], request_id: Optional[str]]) -> BudgetPolicy
+
+        Create a budget policy.
+        
+        Creates a new policy.
+        
+        :param custom_tags: List[:class:`CustomPolicyTag`] (optional)
+          A list of tags defined by the customer. At most 40 entries are allowed per policy.
+        :param policy_name: str (optional)
+          The name of the policy. - Must be unique among active policies. - Can contain only characters of
+          0-9, a-z, A-Z, -, =, ., :, /, @, _, +, whitespace.
+        :param request_id: str (optional)
+          A unique identifier for this request. Restricted to 36 ASCII characters. A random UUID is
+          recommended. This request is only idempotent if a `request_id` is provided.
+        
+        :returns: :class:`BudgetPolicy`
+        
+
+    .. py:method:: delete(policy_id: str)
+
+        Delete a budget policy.
+        
+        Deletes a policy
+        
+        :param policy_id: str
+          The Id of the policy.
+        
+        
+        
+
+    .. py:method:: get(policy_id: str) -> BudgetPolicy
+
+        Get a budget policy.
+        
+        Retrieves a policy by it's ID.
+        
+        :param policy_id: str
+          The Id of the policy.
+        
+        :returns: :class:`BudgetPolicy`
+        
+
+    .. py:method:: list( [, filter_by: Optional[Filter], page_size: Optional[int], page_token: Optional[str], sort_spec: Optional[SortSpec]]) -> Iterator[BudgetPolicy]
+
+        List policies.
+        
+        Lists all policies. Policies are returned in the alphabetically ascending order of their names.
+        
+        :param filter_by: :class:`Filter` (optional)
+          A filter to apply to the list of policies.
+        :param page_size: int (optional)
+          The maximum number of budget policies to return. If unspecified, at most 100 budget policies will be
+          returned. The maximum value is 1000; values above 1000 will be coerced to 1000.
+        :param page_token: str (optional)
+          A page token, received from a previous `ListServerlessPolicies` call. Provide this to retrieve the
+          subsequent page. If unspecified, the first page will be returned.
+          
+          When paginating, all other parameters provided to `ListServerlessPoliciesRequest` must match the
+          call that provided the page token.
+        :param sort_spec: :class:`SortSpec` (optional)
+          The sort specification.
+        
+        :returns: Iterator over :class:`BudgetPolicy`
+        
+
+    .. py:method:: update(policy_id: str [, limit_config: Optional[LimitConfig], policy: Optional[BudgetPolicy]]) -> BudgetPolicy
+
+        Update a budget policy.
+        
+        Updates a policy
+        
+        :param policy_id: str
+          The Id of the policy. This field is generated by Databricks and globally unique.
+        :param limit_config: :class:`LimitConfig` (optional)
+          DEPRECATED. This is redundant field as LimitConfig is part of the BudgetPolicy
+        :param policy: :class:`BudgetPolicy` (optional)
+          Contains the BudgetPolicy details.
+        
+        :returns: :class:`BudgetPolicy`
+        
\ No newline at end of file
diff --git a/docs/account/billing/budgets.rst b/docs/account/billing/budgets.rst
index edba0a733..43c77d00b 100644
--- a/docs/account/billing/budgets.rst
+++ b/docs/account/billing/budgets.rst
@@ -115,7 +115,7 @@
         Gets a budget configuration for an account. Both account and budget configuration are specified by ID.
         
         :param budget_id: str
-          The Databricks budget configuration ID.
+          The budget configuration ID
         
         :returns: :class:`GetBudgetConfigurationResponse`
         
diff --git a/docs/account/billing/index.rst b/docs/account/billing/index.rst
index 0e07da594..b8b317616 100644
--- a/docs/account/billing/index.rst
+++ b/docs/account/billing/index.rst
@@ -8,6 +8,7 @@ Configure different aspects of Databricks billing and usage.
    :maxdepth: 1
 
    billable_usage
+   budget_policy
    budgets
    log_delivery
    usage_dashboards
\ No newline at end of file
diff --git a/docs/account/oauth2/custom_app_integration.rst b/docs/account/oauth2/custom_app_integration.rst
index 0dcc3d8e0..7043a343b 100644
--- a/docs/account/oauth2/custom_app_integration.rst
+++ b/docs/account/oauth2/custom_app_integration.rst
@@ -7,7 +7,7 @@
     These APIs enable administrators to manage custom OAuth app integrations, which is required for
     adding/using Custom OAuth App Integration like Tableau Cloud for Databricks in AWS cloud.
 
-    .. py:method:: create( [, confidential: Optional[bool], name: Optional[str], redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy]]) -> CreateCustomAppIntegrationOutput
+    .. py:method:: create( [, confidential: Optional[bool], name: Optional[str], redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy], user_authorized_scopes: Optional[List[str]]]) -> CreateCustomAppIntegrationOutput
 
         Create Custom OAuth App Integration.
         
@@ -26,6 +26,9 @@
           profile, email.
         :param token_access_policy: :class:`TokenAccessPolicy` (optional)
           Token access policy
+        :param user_authorized_scopes: List[str] (optional)
+          Scopes that will need to be consented by end user to mint the access token. If the user does not
+          authorize the access token will not be minted. Must be a subset of scopes.
         
         :returns: :class:`CreateCustomAppIntegrationOutput`
         
@@ -49,6 +52,7 @@
         Gets the Custom OAuth App Integration for the given integration id.
         
         :param integration_id: str
+          The OAuth app integration ID.
         
         :returns: :class:`GetCustomAppIntegrationOutput`
         
@@ -66,7 +70,7 @@
         :returns: Iterator over :class:`GetCustomAppIntegrationOutput`
         
 
-    .. py:method:: update(integration_id: str [, redirect_urls: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy]])
+    .. py:method:: update(integration_id: str [, redirect_urls: Optional[List[str]], scopes: Optional[List[str]], token_access_policy: Optional[TokenAccessPolicy], user_authorized_scopes: Optional[List[str]]])
 
         Updates Custom OAuth App Integration.
         
@@ -76,8 +80,14 @@
         :param integration_id: str
         :param redirect_urls: List[str] (optional)
           List of OAuth redirect urls to be updated in the custom OAuth app integration
+        :param scopes: List[str] (optional)
+          List of OAuth scopes to be updated in the custom OAuth app integration, similar to redirect URIs
+          this will fully replace the existing values instead of appending
         :param token_access_policy: :class:`TokenAccessPolicy` (optional)
           Token access policy to be updated in the custom OAuth app integration
+        :param user_authorized_scopes: List[str] (optional)
+          Scopes that will need to be consented by end user to mint the access token. If the user does not
+          authorize the access token will not be minted. Must be a subset of scopes.
         
         
         
\ No newline at end of file
diff --git a/docs/account/oauth2/federation_policy.rst b/docs/account/oauth2/federation_policy.rst
new file mode 100644
index 000000000..c95bf563c
--- /dev/null
+++ b/docs/account/oauth2/federation_policy.rst
@@ -0,0 +1,105 @@
+``a.federation_policy``: Account Federation Policies
+====================================================
+.. currentmodule:: databricks.sdk.service.oauth2
+
+.. py:class:: AccountFederationPolicyAPI
+
+    These APIs manage account federation policies.
+    
+    Account federation policies allow users and service principals in your Databricks account to securely
+    access Databricks APIs using tokens from your trusted identity providers (IdPs).
+    
+    With token federation, your users and service principals can exchange tokens from your IdP for Databricks
+    OAuth tokens, which can be used to access Databricks APIs. Token federation eliminates the need to manage
+    Databricks secrets, and allows you to centralize management of token issuance policies in your IdP.
+    Databricks token federation is typically used in combination with [SCIM], so users in your IdP are
+    synchronized into your Databricks account.
+    
+    Token federation is configured in your Databricks account using an account federation policy. An account
+    federation policy specifies: * which IdP, or issuer, your Databricks account should accept tokens from *
+    how to determine which Databricks user, or subject, a token is issued for
+    
+    To configure a federation policy, you provide the following: * The required token __issuer__, as specified
+    in the “iss” claim of your tokens. The issuer is an https URL that identifies your IdP. * The allowed
+    token __audiences__, as specified in the “aud” claim of your tokens. This identifier is intended to
+    represent the recipient of the token. As long as the audience in the token matches at least one audience
+    in the policy, the token is considered a match. If unspecified, the default value is your Databricks
+    account id. * The __subject claim__, which indicates which token claim contains the Databricks username of
+    the user the token was issued for. If unspecified, the default value is “sub”. * Optionally, the
+    public keys used to validate the signature of your tokens, in JWKS format. If unspecified (recommended),
+    Databricks automatically fetches the public keys from your issuer’s well known endpoint. Databricks
+    strongly recommends relying on your issuer’s well known endpoint for discovering public keys.
+    
+    An example federation policy is: ``` issuer: "https://idp.mycompany.com/oidc" audiences: ["databricks"]
+    subject_claim: "sub" ```
+    
+    An example JWT token body that matches this policy and could be used to authenticate to Databricks as user
+    `username@mycompany.com` is: ``` { "iss": "https://idp.mycompany.com/oidc", "aud": "databricks", "sub":
+    "username@mycompany.com" } ```
+    
+    You may also need to configure your IdP to generate tokens for your users to exchange with Databricks, if
+    your users do not already have the ability to generate tokens that are compatible with your federation
+    policy.
+    
+    You do not need to configure an OAuth application in Databricks to use token federation.
+    
+    [SCIM]: https://docs.databricks.com/admin/users-groups/scim/index.html
+
+    .. py:method:: create( [, policy: Optional[FederationPolicy], policy_id: Optional[str]]) -> FederationPolicy
+
+        Create account federation policy.
+        
+        :param policy: :class:`FederationPolicy` (optional)
+        :param policy_id: str (optional)
+          The identifier for the federation policy. The identifier must contain only lowercase alphanumeric
+          characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks.
+        
+        :returns: :class:`FederationPolicy`
+        
+
+    .. py:method:: delete(policy_id: str)
+
+        Delete account federation policy.
+        
+        :param policy_id: str
+          The identifier for the federation policy.
+        
+        
+        
+
+    .. py:method:: get(policy_id: str) -> FederationPolicy
+
+        Get account federation policy.
+        
+        :param policy_id: str
+          The identifier for the federation policy.
+        
+        :returns: :class:`FederationPolicy`
+        
+
+    .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FederationPolicy]
+
+        List account federation policies.
+        
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`FederationPolicy`
+        
+
+    .. py:method:: update(policy_id: str [, policy: Optional[FederationPolicy], update_mask: Optional[str]]) -> FederationPolicy
+
+        Update account federation policy.
+        
+        :param policy_id: str
+          The identifier for the federation policy.
+        :param policy: :class:`FederationPolicy` (optional)
+        :param update_mask: str (optional)
+          The field mask specifies which fields of the policy to update. To specify multiple fields in the
+          field mask, use comma as the separator (no space). The special value '*' indicates that all fields
+          should be updated (full replacement). If unspecified, all fields that are set in the policy provided
+          in the update request will overwrite the corresponding fields in the existing policy. Example value:
+          'description,oidc_policy.audiences'.
+        
+        :returns: :class:`FederationPolicy`
+        
\ No newline at end of file
diff --git a/docs/account/oauth2/index.rst b/docs/account/oauth2/index.rst
index a4663ef6b..745a3e721 100644
--- a/docs/account/oauth2/index.rst
+++ b/docs/account/oauth2/index.rst
@@ -8,6 +8,8 @@ Configure OAuth 2.0 application registrations for Databricks
    :maxdepth: 1
 
    custom_app_integration
+   federation_policy
    o_auth_published_apps
    published_app_integration
+   service_principal_federation_policy
    service_principal_secrets
\ No newline at end of file
diff --git a/docs/account/oauth2/service_principal_federation_policy.rst b/docs/account/oauth2/service_principal_federation_policy.rst
new file mode 100644
index 000000000..2e0577ba4
--- /dev/null
+++ b/docs/account/oauth2/service_principal_federation_policy.rst
@@ -0,0 +1,115 @@
+``a.service_principal_federation_policy``: Service Principal Federation Policies
+================================================================================
+.. currentmodule:: databricks.sdk.service.oauth2
+
+.. py:class:: ServicePrincipalFederationPolicyAPI
+
+    These APIs manage service principal federation policies.
+    
+    Service principal federation, also known as Workload Identity Federation, allows your automated workloads
+    running outside of Databricks to securely access Databricks APIs without the need for Databricks secrets.
+    With Workload Identity Federation, your application (or workload) authenticates to Databricks as a
+    Databricks service principal, using tokens provided by the workload runtime.
+    
+    Databricks strongly recommends using Workload Identity Federation to authenticate to Databricks from
+    automated workloads, over alternatives such as OAuth client secrets or Personal Access Tokens, whenever
+    possible. Workload Identity Federation is supported by many popular services, including Github Actions,
+    Azure DevOps, GitLab, Terraform Cloud, and Kubernetes clusters, among others.
+    
+    Workload identity federation is configured in your Databricks account using a service principal federation
+    policy. A service principal federation policy specifies: * which IdP, or issuer, the service principal is
+    allowed to authenticate from * which workload identity, or subject, is allowed to authenticate as the
+    Databricks service principal
+    
+    To configure a federation policy, you provide the following: * The required token __issuer__, as specified
+    in the “iss” claim of workload identity tokens. The issuer is an https URL that identifies the
+    workload identity provider. * The required token __subject__, as specified in the “sub” claim of
+    workload identity tokens. The subject uniquely identifies the workload in the workload runtime
+    environment. * The allowed token __audiences__, as specified in the “aud” claim of workload identity
+    tokens. The audience is intended to represent the recipient of the token. As long as the audience in the
+    token matches at least one audience in the policy, the token is considered a match. If unspecified, the
+    default value is your Databricks account id. * Optionally, the public keys used to validate the signature
+    of the workload identity tokens, in JWKS format. If unspecified (recommended), Databricks automatically
+    fetches the public keys from the issuer’s well known endpoint. Databricks strongly recommends relying on
+    the issuer’s well known endpoint for discovering public keys.
+    
+    An example service principal federation policy, for a Github Actions workload, is: ``` issuer:
+    "https://token.actions.githubusercontent.com" audiences: ["https://github.com/my-github-org"] subject:
+    "repo:my-github-org/my-repo:environment:prod" ```
+    
+    An example JWT token body that matches this policy and could be used to authenticate to Databricks is: ```
+    { "iss": "https://token.actions.githubusercontent.com", "aud": "https://github.com/my-github-org", "sub":
+    "repo:my-github-org/my-repo:environment:prod" } ```
+    
+    You may also need to configure the workload runtime to generate tokens for your workloads.
+    
+    You do not need to configure an OAuth application in Databricks to use token federation.
+
+    .. py:method:: create(service_principal_id: int [, policy: Optional[FederationPolicy], policy_id: Optional[str]]) -> FederationPolicy
+
+        Create service principal federation policy.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy: :class:`FederationPolicy` (optional)
+        :param policy_id: str (optional)
+          The identifier for the federation policy. The identifier must contain only lowercase alphanumeric
+          characters, numbers, hyphens, and slashes. If unspecified, the id will be assigned by Databricks.
+        
+        :returns: :class:`FederationPolicy`
+        
+
+    .. py:method:: delete(service_principal_id: int, policy_id: str)
+
+        Delete service principal federation policy.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy_id: str
+          The identifier for the federation policy.
+        
+        
+        
+
+    .. py:method:: get(service_principal_id: int, policy_id: str) -> FederationPolicy
+
+        Get service principal federation policy.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy_id: str
+          The identifier for the federation policy.
+        
+        :returns: :class:`FederationPolicy`
+        
+
+    .. py:method:: list(service_principal_id: int [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[FederationPolicy]
+
+        List service principal federation policies.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param page_size: int (optional)
+        :param page_token: str (optional)
+        
+        :returns: Iterator over :class:`FederationPolicy`
+        
+
+    .. py:method:: update(service_principal_id: int, policy_id: str [, policy: Optional[FederationPolicy], update_mask: Optional[str]]) -> FederationPolicy
+
+        Update service principal federation policy.
+        
+        :param service_principal_id: int
+          The service principal id for the federation policy.
+        :param policy_id: str
+          The identifier for the federation policy.
+        :param policy: :class:`FederationPolicy` (optional)
+        :param update_mask: str (optional)
+          The field mask specifies which fields of the policy to update. To specify multiple fields in the
+          field mask, use comma as the separator (no space). The special value '*' indicates that all fields
+          should be updated (full replacement). If unspecified, all fields that are set in the policy provided
+          in the update request will overwrite the corresponding fields in the existing policy. Example value:
+          'description,oidc_policy.audiences'.
+        
+        :returns: :class:`FederationPolicy`
+        
\ No newline at end of file
diff --git a/docs/account/oauth2/service_principal_secrets.rst b/docs/account/oauth2/service_principal_secrets.rst
index 4249b9dea..955d6da53 100644
--- a/docs/account/oauth2/service_principal_secrets.rst
+++ b/docs/account/oauth2/service_principal_secrets.rst
@@ -42,7 +42,7 @@
         
         
 
-    .. py:method:: list(service_principal_id: int) -> Iterator[SecretInfo]
+    .. py:method:: list(service_principal_id: int [, page_token: Optional[str]]) -> Iterator[SecretInfo]
 
         List service principal secrets.
         
@@ -51,6 +51,13 @@
         
         :param service_principal_id: int
           The service principal ID.
+        :param page_token: str (optional)
+          An opaque page token which was the `next_page_token` in the response of the previous request to list
+          the secrets for this service principal. Provide this token to retrieve the next page of secret
+          entries. When providing a `page_token`, all other parameters provided to the request must match the
+          previous request. To list all of the secrets for a service principal, it is necessary to continue
+          requesting pages of entries until the response contains no `next_page_token`. Note that the number
+          of entries returned must not be used to determine when the listing is complete.
         
         :returns: Iterator over :class:`SecretInfo`
         
\ No newline at end of file
diff --git a/docs/account/provisioning/workspaces.rst b/docs/account/provisioning/workspaces.rst
index 98c47cc9b..ad8a75942 100644
--- a/docs/account/provisioning/workspaces.rst
+++ b/docs/account/provisioning/workspaces.rst
@@ -11,7 +11,7 @@
     These endpoints are available if your account is on the E2 version of the platform or on a select custom
     plan that allows multiple workspaces per account.
 
-    .. py:method:: create(workspace_name: str [, aws_region: Optional[str], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str]]) -> Wait[Workspace]
+    .. py:method:: create(workspace_name: str [, aws_region: Optional[str], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], is_no_public_ip_enabled: Optional[bool], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str]]) -> Wait[Workspace]
 
 
         Usage:
@@ -116,6 +116,8 @@
           [calculate subnet sizes for a new workspace]: https://docs.gcp.databricks.com/administration-guide/cloud-configurations/gcp/network-sizing.html
         :param gke_config: :class:`GkeConfig` (optional)
           The configurations for the GKE cluster of a Databricks workspace.
+        :param is_no_public_ip_enabled: bool (optional)
+          Whether no public IP is enabled for the workspace.
         :param location: str (optional)
           The Google Cloud region of the workspace data plane in your Google account. For example, `us-east4`.
         :param managed_services_customer_managed_key_id: str (optional)
@@ -148,7 +150,7 @@
           See :method:wait_get_workspace_running for more details.
         
 
-    .. py:method:: create_and_wait(workspace_name: str [, aws_region: Optional[str], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace
+    .. py:method:: create_and_wait(workspace_name: str [, aws_region: Optional[str], cloud: Optional[str], cloud_resource_container: Optional[CloudResourceContainer], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], deployment_name: Optional[str], gcp_managed_network_config: Optional[GcpManagedNetworkConfig], gke_config: Optional[GkeConfig], is_no_public_ip_enabled: Optional[bool], location: Optional[str], managed_services_customer_managed_key_id: Optional[str], network_id: Optional[str], pricing_tier: Optional[PricingTier], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace
 
 
     .. py:method:: delete(workspace_id: int)
@@ -227,7 +229,7 @@
         :returns: Iterator over :class:`Workspace`
         
 
-    .. py:method:: update(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str]]) -> Wait[Workspace]
+    .. py:method:: update(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str]]) -> Wait[Workspace]
 
 
         Usage:
@@ -370,6 +372,9 @@
           The ID of the workspace's network configuration object. Used only if you already use a
           customer-managed VPC. For failed workspaces only, you can switch from a Databricks-managed VPC to a
           customer-managed VPC by updating the workspace to add a network configuration ID.
+        :param private_access_settings_id: str (optional)
+          The ID of the workspace's private access settings configuration object. This parameter is available
+          only for updating failed workspaces.
         :param storage_configuration_id: str (optional)
           The ID of the workspace's storage configuration object. This parameter is available only for
           updating failed workspaces.
@@ -382,7 +387,7 @@
           See :method:wait_get_workspace_running for more details.
         
 
-    .. py:method:: update_and_wait(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace
+    .. py:method:: update_and_wait(workspace_id: int [, aws_region: Optional[str], credentials_id: Optional[str], custom_tags: Optional[Dict[str, str]], managed_services_customer_managed_key_id: Optional[str], network_connectivity_config_id: Optional[str], network_id: Optional[str], private_access_settings_id: Optional[str], storage_configuration_id: Optional[str], storage_customer_managed_key_id: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> Workspace
 
 
     .. py:method:: wait_get_workspace_running(workspace_id: int, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[Workspace], None]]) -> Workspace
diff --git a/docs/account/settings/csp_enablement_account.rst b/docs/account/settings/csp_enablement_account.rst
index b6fec691c..885aae89f 100644
--- a/docs/account/settings/csp_enablement_account.rst
+++ b/docs/account/settings/csp_enablement_account.rst
@@ -37,9 +37,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`CspEnablementAccountSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`CspEnablementAccountSetting`
         
\ No newline at end of file
diff --git a/docs/account/settings/disable_legacy_features.rst b/docs/account/settings/disable_legacy_features.rst
index d7f1db9d3..b10d7e2dc 100644
--- a/docs/account/settings/disable_legacy_features.rst
+++ b/docs/account/settings/disable_legacy_features.rst
@@ -52,9 +52,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`DisableLegacyFeatures`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`DisableLegacyFeatures`
         
\ No newline at end of file
diff --git a/docs/account/settings/enable_ip_access_lists.rst b/docs/account/settings/enable_ip_access_lists.rst
new file mode 100644
index 000000000..9485b7332
--- /dev/null
+++ b/docs/account/settings/enable_ip_access_lists.rst
@@ -0,0 +1,63 @@
+``a.settings.enable_ip_access_lists``: Enable Account IP Access Lists
+=====================================================================
+.. currentmodule:: databricks.sdk.service.settings
+
+.. py:class:: EnableIpAccessListsAPI
+
+    Controls the enforcement of IP access lists for accessing the account console. Allowing you to enable or
+    disable restricted access based on IP addresses.
+
+    .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAccountIpAccessEnableResponse
+
+        Delete the account IP access toggle setting.
+        
+        Reverts the value of the account IP access toggle setting to default (ON)
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteAccountIpAccessEnableResponse`
+        
+
+    .. py:method:: get( [, etag: Optional[str]]) -> AccountIpAccessEnable
+
+        Get the account IP access toggle setting.
+        
+        Gets the value of the account IP access toggle setting.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`AccountIpAccessEnable`
+        
+
+    .. py:method:: update(allow_missing: bool, setting: AccountIpAccessEnable, field_mask: str) -> AccountIpAccessEnable
+
+        Update the account IP access toggle setting.
+        
+        Updates the value of the account IP access toggle setting.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`AccountIpAccessEnable`
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`AccountIpAccessEnable`
+        
\ No newline at end of file
diff --git a/docs/account/settings/esm_enablement_account.rst b/docs/account/settings/esm_enablement_account.rst
index 59376793b..e9359d907 100644
--- a/docs/account/settings/esm_enablement_account.rst
+++ b/docs/account/settings/esm_enablement_account.rst
@@ -34,9 +34,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`EsmEnablementAccountSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`EsmEnablementAccountSetting`
         
\ No newline at end of file
diff --git a/docs/account/settings/index.rst b/docs/account/settings/index.rst
index abf97c6a0..9ffe7694e 100644
--- a/docs/account/settings/index.rst
+++ b/docs/account/settings/index.rst
@@ -12,5 +12,6 @@ Manage security settings for Accounts and Workspaces
    settings
    csp_enablement_account
    disable_legacy_features
+   enable_ip_access_lists
    esm_enablement_account
    personal_compute
\ No newline at end of file
diff --git a/docs/account/settings/personal_compute.rst b/docs/account/settings/personal_compute.rst
index 00ccf3012..54e958a28 100644
--- a/docs/account/settings/personal_compute.rst
+++ b/docs/account/settings/personal_compute.rst
@@ -54,9 +54,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`PersonalComputeSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`PersonalComputeSetting`
         
\ No newline at end of file
diff --git a/docs/account/settings/settings.rst b/docs/account/settings/settings.rst
index 3df647279..abf1c0e45 100644
--- a/docs/account/settings/settings.rst
+++ b/docs/account/settings/settings.rst
@@ -25,6 +25,12 @@
         provisioned. 3. Disables the use of ‘No-isolation clusters’. 4. Disables Databricks Runtime versions
         prior to 13.3LTS.
 
+    .. py:property:: enable_ip_access_lists
+        :type: EnableIpAccessListsAPI
+
+        Controls the enforcement of IP access lists for accessing the account console. Allowing you to enable or
+        disable restricted access based on IP addresses.
+
     .. py:property:: esm_enablement_account
         :type: EsmEnablementAccountAPI
 
diff --git a/docs/dbdataclasses/apps.rst b/docs/dbdataclasses/apps.rst
index 2d522c625..2214e2ac9 100644
--- a/docs/dbdataclasses/apps.rst
+++ b/docs/dbdataclasses/apps.rst
@@ -190,14 +190,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: CreateAppDeploymentRequest
-   :members:
-   :undoc-members:
-
-.. autoclass:: CreateAppRequest
-   :members:
-   :undoc-members:
-
 .. autoclass:: GetAppPermissionLevelsResponse
    :members:
    :undoc-members:
@@ -217,7 +209,3 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 .. autoclass:: StopAppRequest
    :members:
    :undoc-members:
-
-.. autoclass:: UpdateAppRequest
-   :members:
-   :undoc-members:
diff --git a/docs/dbdataclasses/billing.rst b/docs/dbdataclasses/billing.rst
index 25deb0a18..590fd693e 100644
--- a/docs/dbdataclasses/billing.rst
+++ b/docs/dbdataclasses/billing.rst
@@ -57,6 +57,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: BudgetPolicy
+   :members:
+   :undoc-members:
+
 .. autoclass:: CreateBillingUsageDashboardRequest
    :members:
    :undoc-members:
@@ -85,6 +89,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: CreateBudgetPolicyRequest
+   :members:
+   :undoc-members:
+
 .. autoclass:: CreateLogDeliveryConfigurationParams
    :members:
    :undoc-members:
@@ -93,6 +101,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: DeleteResponse
+   :members:
+   :undoc-members:
+
 .. py:class:: DeliveryStatus
 
    The status string for log delivery. Possible values are: * `CREATED`: There were no log delivery attempts since the config was created. * `SUCCEEDED`: The latest attempt of log delivery has succeeded completely. * `USER_FAILURE`: The latest attempt of log delivery failed because of misconfiguration of customer provided permissions on role or storage. * `SYSTEM_FAILURE`: The latest attempt of log delivery failed because of an Databricks internal error. Contact support if it doesn't go away soon. * `NOT_FOUND`: The log delivery status as the configuration has been disabled since the release of this feature or there are no workspaces in the account.
@@ -116,6 +128,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: Filter
+   :members:
+   :undoc-members:
+
 .. autoclass:: GetBillingUsageDashboardResponse
    :members:
    :undoc-members:
@@ -124,10 +140,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: LimitConfig
+   :members:
+   :undoc-members:
+
 .. autoclass:: ListBudgetConfigurationsResponse
    :members:
    :undoc-members:
 
+.. autoclass:: ListBudgetPoliciesResponse
+   :members:
+   :undoc-members:
+
 .. py:class:: LogDeliveryConfigStatus
 
    Status of log delivery configuration. Set to `ENABLED` (enabled) or `DISABLED` (disabled). Defaults to `ENABLED`. You can [enable or disable the configuration](#operation/patch-log-delivery-config-status) later. Deletion of a configuration is not supported, so disable a log delivery configuration that is no longer needed.
@@ -175,6 +199,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: SortSpec
+   :members:
+   :undoc-members:
+
+.. py:class:: SortSpecField
+
+   .. py:attribute:: POLICY_NAME
+      :value: "POLICY_NAME"
+
 .. autoclass:: UpdateBudgetConfigurationBudget
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/catalog.rst b/docs/dbdataclasses/catalog.rst
index cb6399348..17d23b223 100644
--- a/docs/dbdataclasses/catalog.rst
+++ b/docs/dbdataclasses/catalog.rst
@@ -69,6 +69,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: AwsIamRole
+   :members:
+   :undoc-members:
+
 .. autoclass:: AwsIamRoleRequest
    :members:
    :undoc-members:
@@ -77,6 +81,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: AzureActiveDirectoryToken
+   :members:
+   :undoc-members:
+
+.. autoclass:: AzureManagedIdentity
+   :members:
+   :undoc-members:
+
 .. autoclass:: AzureManagedIdentityRequest
    :members:
    :undoc-members:
@@ -101,49 +113,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. py:class:: CatalogInfoSecurableKind
-
-   Kind of catalog securable.
-
-   .. py:attribute:: CATALOG_DELTASHARING
-      :value: "CATALOG_DELTASHARING"
-
-   .. py:attribute:: CATALOG_FOREIGN_BIGQUERY
-      :value: "CATALOG_FOREIGN_BIGQUERY"
-
-   .. py:attribute:: CATALOG_FOREIGN_DATABRICKS
-      :value: "CATALOG_FOREIGN_DATABRICKS"
-
-   .. py:attribute:: CATALOG_FOREIGN_MYSQL
-      :value: "CATALOG_FOREIGN_MYSQL"
-
-   .. py:attribute:: CATALOG_FOREIGN_POSTGRESQL
-      :value: "CATALOG_FOREIGN_POSTGRESQL"
-
-   .. py:attribute:: CATALOG_FOREIGN_REDSHIFT
-      :value: "CATALOG_FOREIGN_REDSHIFT"
-
-   .. py:attribute:: CATALOG_FOREIGN_SNOWFLAKE
-      :value: "CATALOG_FOREIGN_SNOWFLAKE"
-
-   .. py:attribute:: CATALOG_FOREIGN_SQLDW
-      :value: "CATALOG_FOREIGN_SQLDW"
-
-   .. py:attribute:: CATALOG_FOREIGN_SQLSERVER
-      :value: "CATALOG_FOREIGN_SQLSERVER"
-
-   .. py:attribute:: CATALOG_INTERNAL
-      :value: "CATALOG_INTERNAL"
-
-   .. py:attribute:: CATALOG_STANDARD
-      :value: "CATALOG_STANDARD"
-
-   .. py:attribute:: CATALOG_SYSTEM
-      :value: "CATALOG_SYSTEM"
-
-   .. py:attribute:: CATALOG_SYSTEM_DELTASHARING
-      :value: "CATALOG_SYSTEM_DELTASHARING"
-
 .. py:class:: CatalogIsolationMode
 
    Whether the current securable is accessible from all workspaces or a specific set of workspaces.
@@ -181,8 +150,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: ColumnTypeName
 
-   Name of type (INT, STRUCT, MAP, etc.).
-
    .. py:attribute:: ARRAY
       :value: "ARRAY"
 
@@ -246,53 +213,13 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: USER_DEFINED_TYPE
       :value: "USER_DEFINED_TYPE"
 
+   .. py:attribute:: VARIANT
+      :value: "VARIANT"
+
 .. autoclass:: ConnectionInfo
    :members:
    :undoc-members:
 
-.. py:class:: ConnectionInfoSecurableKind
-
-   Kind of connection securable.
-
-   .. py:attribute:: CONNECTION_BIGQUERY
-      :value: "CONNECTION_BIGQUERY"
-
-   .. py:attribute:: CONNECTION_BUILTIN_HIVE_METASTORE
-      :value: "CONNECTION_BUILTIN_HIVE_METASTORE"
-
-   .. py:attribute:: CONNECTION_DATABRICKS
-      :value: "CONNECTION_DATABRICKS"
-
-   .. py:attribute:: CONNECTION_EXTERNAL_HIVE_METASTORE
-      :value: "CONNECTION_EXTERNAL_HIVE_METASTORE"
-
-   .. py:attribute:: CONNECTION_GLUE
-      :value: "CONNECTION_GLUE"
-
-   .. py:attribute:: CONNECTION_HTTP_BEARER
-      :value: "CONNECTION_HTTP_BEARER"
-
-   .. py:attribute:: CONNECTION_MYSQL
-      :value: "CONNECTION_MYSQL"
-
-   .. py:attribute:: CONNECTION_ONLINE_CATALOG
-      :value: "CONNECTION_ONLINE_CATALOG"
-
-   .. py:attribute:: CONNECTION_POSTGRESQL
-      :value: "CONNECTION_POSTGRESQL"
-
-   .. py:attribute:: CONNECTION_REDSHIFT
-      :value: "CONNECTION_REDSHIFT"
-
-   .. py:attribute:: CONNECTION_SNOWFLAKE
-      :value: "CONNECTION_SNOWFLAKE"
-
-   .. py:attribute:: CONNECTION_SQLDW
-      :value: "CONNECTION_SQLDW"
-
-   .. py:attribute:: CONNECTION_SQLSERVER
-      :value: "CONNECTION_SQLSERVER"
-
 .. py:class:: ConnectionType
 
    The type of connection.
@@ -342,6 +269,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: CreateCredentialRequest
+   :members:
+   :undoc-members:
+
 .. autoclass:: CreateExternalLocation
    :members:
    :undoc-members:
@@ -373,7 +304,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: CreateFunctionSecurityType
 
-   Function security type.
+   The security type of the function.
 
    .. py:attribute:: DEFINER
       :value: "DEFINER"
@@ -403,10 +334,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: CreateOnlineTableRequest
-   :members:
-   :undoc-members:
-
 .. autoclass:: CreateRegisteredModelRequest
    :members:
    :undoc-members:
@@ -431,6 +358,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: CredentialInfo
+   :members:
+   :undoc-members:
+
+.. py:class:: CredentialPurpose
+
+   .. py:attribute:: SERVICE
+      :value: "SERVICE"
+
+   .. py:attribute:: STORAGE
+      :value: "STORAGE"
+
 .. py:class:: CredentialType
 
    The type of credential.
@@ -441,6 +380,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: USERNAME_PASSWORD
       :value: "USERNAME_PASSWORD"
 
+.. autoclass:: CredentialValidationResult
+   :members:
+   :undoc-members:
+
 .. autoclass:: CurrentWorkspaceBindings
    :members:
    :undoc-members:
@@ -518,6 +461,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: WORKDAY_RAAS_FORMAT
       :value: "WORKDAY_RAAS_FORMAT"
 
+.. autoclass:: DatabricksGcpServiceAccount
+   :members:
+   :undoc-members:
+
 .. autoclass:: DatabricksGcpServiceAccountRequest
    :members:
    :undoc-members:
@@ -530,6 +477,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: DeleteCredentialResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: DeleteResponse
    :members:
    :undoc-members:
@@ -636,7 +587,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: FunctionInfoSecurityType
 
-   Function security type.
+   The security type of the function.
 
    .. py:attribute:: DEFINER
       :value: "DEFINER"
@@ -683,6 +634,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: GenerateTemporaryServiceCredentialAzureOptions
+   :members:
+   :undoc-members:
+
+.. autoclass:: GenerateTemporaryServiceCredentialGcpOptions
+   :members:
+   :undoc-members:
+
+.. autoclass:: GenerateTemporaryServiceCredentialRequest
+   :members:
+   :undoc-members:
+
 .. autoclass:: GenerateTemporaryTableCredentialRequest
    :members:
    :undoc-members:
@@ -696,6 +659,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CATALOG
       :value: "CATALOG"
 
+   .. py:attribute:: CREDENTIAL
+      :value: "CREDENTIAL"
+
    .. py:attribute:: EXTERNAL_LOCATION
       :value: "EXTERNAL_LOCATION"
 
@@ -722,8 +688,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: IsolationMode
 
-   Whether the current securable is accessible from all workspaces or a specific set of workspaces.
-
    .. py:attribute:: ISOLATION_MODE_ISOLATED
       :value: "ISOLATION_MODE_ISOLATED"
 
@@ -746,6 +710,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: ListCredentialsResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: ListExternalLocationsResponse
    :members:
    :undoc-members:
@@ -1070,6 +1038,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CREATE_FOREIGN_CATALOG
       :value: "CREATE_FOREIGN_CATALOG"
 
+   .. py:attribute:: CREATE_FOREIGN_SECURABLE
+      :value: "CREATE_FOREIGN_SECURABLE"
+
    .. py:attribute:: CREATE_FUNCTION
       :value: "CREATE_FUNCTION"
 
@@ -1185,6 +1156,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: ACTIVE
       :value: "ACTIVE"
 
+   .. py:attribute:: DEGRADED
+      :value: "DEGRADED"
+
    .. py:attribute:: DELETING
       :value: "DELETING"
 
@@ -1236,9 +1210,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CATALOG
       :value: "CATALOG"
 
+   .. py:attribute:: CLEAN_ROOM
+      :value: "CLEAN_ROOM"
+
    .. py:attribute:: CONNECTION
       :value: "CONNECTION"
 
+   .. py:attribute:: CREDENTIAL
+      :value: "CREDENTIAL"
+
    .. py:attribute:: EXTERNAL_LOCATION
       :value: "EXTERNAL_LOCATION"
 
@@ -1379,6 +1359,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: VIEW
       :value: "VIEW"
 
+.. autoclass:: TemporaryCredentials
+   :members:
+   :undoc-members:
+
 .. autoclass:: TriggeredUpdateStatus
    :members:
    :undoc-members:
@@ -1396,6 +1380,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CATALOG
       :value: "CATALOG"
 
+   .. py:attribute:: CREDENTIAL
+      :value: "CREDENTIAL"
+
    .. py:attribute:: EXTERNAL_LOCATION
       :value: "EXTERNAL_LOCATION"
 
@@ -1410,6 +1397,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: UpdateCredentialRequest
+   :members:
+   :undoc-members:
+
 .. autoclass:: UpdateExternalLocation
    :members:
    :undoc-members:
@@ -1476,6 +1467,27 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: ValidateCredentialRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: ValidateCredentialResponse
+   :members:
+   :undoc-members:
+
+.. py:class:: ValidateCredentialResult
+
+   A enum represents the result of the file operation
+
+   .. py:attribute:: FAIL
+      :value: "FAIL"
+
+   .. py:attribute:: PASS
+      :value: "PASS"
+
+   .. py:attribute:: SKIP
+      :value: "SKIP"
+
 .. autoclass:: ValidateStorageCredential
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/cleanrooms.rst b/docs/dbdataclasses/cleanrooms.rst
new file mode 100644
index 000000000..85ec98250
--- /dev/null
+++ b/docs/dbdataclasses/cleanrooms.rst
@@ -0,0 +1,158 @@
+Clean Rooms
+===========
+
+These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.cleanrooms`` module.
+
+.. py:currentmodule:: databricks.sdk.service.cleanrooms
+.. autoclass:: CleanRoom
+   :members:
+   :undoc-members:
+
+.. py:class:: CleanRoomAccessRestricted
+
+   .. py:attribute:: CSP_MISMATCH
+      :value: "CSP_MISMATCH"
+
+   .. py:attribute:: NO_RESTRICTION
+      :value: "NO_RESTRICTION"
+
+.. autoclass:: CleanRoomAsset
+   :members:
+   :undoc-members:
+
+.. py:class:: CleanRoomAssetAssetType
+
+   .. py:attribute:: FOREIGN_TABLE
+      :value: "FOREIGN_TABLE"
+
+   .. py:attribute:: NOTEBOOK_FILE
+      :value: "NOTEBOOK_FILE"
+
+   .. py:attribute:: TABLE
+      :value: "TABLE"
+
+   .. py:attribute:: VIEW
+      :value: "VIEW"
+
+   .. py:attribute:: VOLUME
+      :value: "VOLUME"
+
+.. autoclass:: CleanRoomAssetForeignTable
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomAssetForeignTableLocalDetails
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomAssetNotebook
+   :members:
+   :undoc-members:
+
+.. py:class:: CleanRoomAssetStatusEnum
+
+   .. py:attribute:: ACTIVE
+      :value: "ACTIVE"
+
+   .. py:attribute:: PENDING
+      :value: "PENDING"
+
+   .. py:attribute:: PERMISSION_DENIED
+      :value: "PERMISSION_DENIED"
+
+.. autoclass:: CleanRoomAssetTable
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomAssetTableLocalDetails
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomAssetView
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomAssetViewLocalDetails
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomAssetVolumeLocalDetails
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomCollaborator
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomNotebookTaskRun
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomOutputCatalog
+   :members:
+   :undoc-members:
+
+.. py:class:: CleanRoomOutputCatalogOutputCatalogStatus
+
+   .. py:attribute:: CREATED
+      :value: "CREATED"
+
+   .. py:attribute:: NOT_CREATED
+      :value: "NOT_CREATED"
+
+   .. py:attribute:: NOT_ELIGIBLE
+      :value: "NOT_ELIGIBLE"
+
+.. autoclass:: CleanRoomRemoteDetail
+   :members:
+   :undoc-members:
+
+.. py:class:: CleanRoomStatusEnum
+
+   .. py:attribute:: ACTIVE
+      :value: "ACTIVE"
+
+   .. py:attribute:: DELETED
+      :value: "DELETED"
+
+   .. py:attribute:: FAILED
+      :value: "FAILED"
+
+   .. py:attribute:: PROVISIONING
+      :value: "PROVISIONING"
+
+.. autoclass:: CollaboratorJobRunInfo
+   :members:
+   :undoc-members:
+
+.. autoclass:: ComplianceSecurityProfile
+   :members:
+   :undoc-members:
+
+.. autoclass:: CreateCleanRoomOutputCatalogResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: DeleteCleanRoomAssetResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: DeleteResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: ListCleanRoomAssetsResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: ListCleanRoomNotebookTaskRunsResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: ListCleanRoomsResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: UpdateCleanRoomRequest
+   :members:
+   :undoc-members:
diff --git a/docs/dbdataclasses/compute.rst b/docs/dbdataclasses/compute.rst
index 0066f0374..b90ec99f7 100644
--- a/docs/dbdataclasses/compute.rst
+++ b/docs/dbdataclasses/compute.rst
@@ -299,6 +299,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: CustomPolicyTag
+   :members:
+   :undoc-members:
+
 .. autoclass:: DataPlaneEventDetails
    :members:
    :undoc-members:
@@ -316,10 +320,20 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 .. py:class:: DataSecurityMode
 
    Data security mode decides what data governance model to use when accessing data from a cluster.
-   * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. Most programming languages, cluster features and data governance features are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited.
+   The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will choose the most appropriate access mode depending on your compute configuration. * `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias for `SINGLE_USER`.
+   The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a single user specified in `single_user_name`. Most programming languages, cluster features and data governance features are available in this mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data and credentials. Most data governance features are supported in this mode. But programming languages and cluster features might be limited.
    The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for future Databricks Runtime versions:
    * `LEGACY_TABLE_ACL`: This mode is for users migrating from legacy Table ACL clusters. * `LEGACY_PASSTHROUGH`: This mode is for users migrating from legacy Passthrough on high concurrency clusters. * `LEGACY_SINGLE_USER`: This mode is for users migrating from legacy Passthrough on standard clusters. * `LEGACY_SINGLE_USER_STANDARD`: This mode provides a way that doesn’t have UC nor passthrough enabled.
 
+   .. py:attribute:: DATA_SECURITY_MODE_AUTO
+      :value: "DATA_SECURITY_MODE_AUTO"
+
+   .. py:attribute:: DATA_SECURITY_MODE_DEDICATED
+      :value: "DATA_SECURITY_MODE_DEDICATED"
+
+   .. py:attribute:: DATA_SECURITY_MODE_STANDARD
+      :value: "DATA_SECURITY_MODE_STANDARD"
+
    .. py:attribute:: LEGACY_PASSTHROUGH
       :value: "LEGACY_PASSTHROUGH"
 
@@ -485,6 +499,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: EventType
 
+   .. py:attribute:: ADD_NODES_FAILED
+      :value: "ADD_NODES_FAILED"
+
+   .. py:attribute:: AUTOMATIC_CLUSTER_UPDATE
+      :value: "AUTOMATIC_CLUSTER_UPDATE"
+
+   .. py:attribute:: AUTOSCALING_BACKOFF
+      :value: "AUTOSCALING_BACKOFF"
+
+   .. py:attribute:: AUTOSCALING_FAILED
+      :value: "AUTOSCALING_FAILED"
+
    .. py:attribute:: AUTOSCALING_STATS_REPORT
       :value: "AUTOSCALING_STATS_REPORT"
 
@@ -782,6 +808,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. py:class:: Kind
+
+   The kind of compute described by this compute specification.
+   Depending on `kind`, different validations and default values will be applied.
+   The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
+
+   .. py:attribute:: CLASSIC_PREVIEW
+      :value: "CLASSIC_PREVIEW"
+
 .. py:class:: Language
 
    .. py:attribute:: PYTHON
diff --git a/docs/dbdataclasses/dashboards.rst b/docs/dbdataclasses/dashboards.rst
index 91de6ccb2..114bd1f5b 100644
--- a/docs/dbdataclasses/dashboards.rst
+++ b/docs/dbdataclasses/dashboards.rst
@@ -4,15 +4,11 @@ Dashboards
 These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.dashboards`` module.
 
 .. py:currentmodule:: databricks.sdk.service.dashboards
-.. autoclass:: CreateDashboardRequest
+.. autoclass:: CancelQueryExecutionResponse
    :members:
    :undoc-members:
 
-.. autoclass:: CreateScheduleRequest
-   :members:
-   :undoc-members:
-
-.. autoclass:: CreateSubscriptionRequest
+.. autoclass:: CancelQueryExecutionResponseStatus
    :members:
    :undoc-members:
 
@@ -29,6 +25,59 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: DASHBOARD_VIEW_BASIC
       :value: "DASHBOARD_VIEW_BASIC"
 
+.. py:class:: DataType
+
+   .. py:attribute:: DATA_TYPE_ARRAY
+      :value: "DATA_TYPE_ARRAY"
+
+   .. py:attribute:: DATA_TYPE_BIG_INT
+      :value: "DATA_TYPE_BIG_INT"
+
+   .. py:attribute:: DATA_TYPE_BINARY
+      :value: "DATA_TYPE_BINARY"
+
+   .. py:attribute:: DATA_TYPE_BOOLEAN
+      :value: "DATA_TYPE_BOOLEAN"
+
+   .. py:attribute:: DATA_TYPE_DATE
+      :value: "DATA_TYPE_DATE"
+
+   .. py:attribute:: DATA_TYPE_DECIMAL
+      :value: "DATA_TYPE_DECIMAL"
+
+   .. py:attribute:: DATA_TYPE_DOUBLE
+      :value: "DATA_TYPE_DOUBLE"
+
+   .. py:attribute:: DATA_TYPE_FLOAT
+      :value: "DATA_TYPE_FLOAT"
+
+   .. py:attribute:: DATA_TYPE_INT
+      :value: "DATA_TYPE_INT"
+
+   .. py:attribute:: DATA_TYPE_INTERVAL
+      :value: "DATA_TYPE_INTERVAL"
+
+   .. py:attribute:: DATA_TYPE_MAP
+      :value: "DATA_TYPE_MAP"
+
+   .. py:attribute:: DATA_TYPE_SMALL_INT
+      :value: "DATA_TYPE_SMALL_INT"
+
+   .. py:attribute:: DATA_TYPE_STRING
+      :value: "DATA_TYPE_STRING"
+
+   .. py:attribute:: DATA_TYPE_STRUCT
+      :value: "DATA_TYPE_STRUCT"
+
+   .. py:attribute:: DATA_TYPE_TIMESTAMP
+      :value: "DATA_TYPE_TIMESTAMP"
+
+   .. py:attribute:: DATA_TYPE_TINY_INT
+      :value: "DATA_TYPE_TINY_INT"
+
+   .. py:attribute:: DATA_TYPE_VOID
+      :value: "DATA_TYPE_VOID"
+
 .. autoclass:: DeleteScheduleResponse
    :members:
    :undoc-members:
@@ -37,6 +86,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: Empty
+   :members:
+   :undoc-members:
+
+.. autoclass:: ExecutePublishedDashboardQueryRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: ExecuteQueryResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: GenieAttachment
    :members:
    :undoc-members:
@@ -65,6 +126,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: GetPublishedDashboardEmbeddedResponse
+   :members:
+   :undoc-members:
+
 .. py:class:: LifecycleState
 
    .. py:attribute:: ACTIVE
@@ -166,6 +231,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION
       :value: "MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION"
 
+   .. py:attribute:: NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE
+      :value: "NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE"
+
    .. py:attribute:: NO_QUERY_TO_VISUALIZE_EXCEPTION
       :value: "NO_QUERY_TO_VISUALIZE_EXCEPTION"
 
@@ -187,6 +255,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: SQL_EXECUTION_EXCEPTION
       :value: "SQL_EXECUTION_EXCEPTION"
 
+   .. py:attribute:: STOP_PROCESS_DUE_TO_AUTO_REGENERATE
+      :value: "STOP_PROCESS_DUE_TO_AUTO_REGENERATE"
+
    .. py:attribute:: TABLES_MISSING_EXCEPTION
       :value: "TABLES_MISSING_EXCEPTION"
 
@@ -210,7 +281,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: MessageStatus
 
-   MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message status will stay in the `EXECUTING_QUERY` until a client calls [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message processing is completed. Results are in the `attachments` field. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user needs to execute the query again. * `CANCELLED`: Message has been cancelled.
+   MesssageStatus. The possible values are: * `FETCHING_METADATA`: Fetching metadata from the data sources. * `FILTERING_CONTEXT`: Running smart context step to determine relevant context. * `ASKING_AI`: Waiting for the LLM to respond to the users question. * `PENDING_WAREHOUSE`: Waiting for warehouse before the SQL query can start executing. * `EXECUTING_QUERY`: Executing AI provided SQL query. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. **Important: The message status will stay in the `EXECUTING_QUERY` until a client calls [getMessageQueryResult](:method:genie/getMessageQueryResult)**. * `FAILED`: Generating a response or the executing the query failed. Please see `error` field. * `COMPLETED`: Message processing is completed. Results are in the `attachments` field. Get the SQL query result by calling [getMessageQueryResult](:method:genie/getMessageQueryResult) API. * `SUBMITTED`: Message has been submitted. * `QUERY_RESULT_EXPIRED`: SQL result is not available anymore. The user needs to execute the query again. * `CANCELLED`: Message has been cancelled.
 
    .. py:attribute:: ASKING_AI
       :value: "ASKING_AI"
@@ -233,6 +304,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: FILTERING_CONTEXT
       :value: "FILTERING_CONTEXT"
 
+   .. py:attribute:: PENDING_WAREHOUSE
+      :value: "PENDING_WAREHOUSE"
+
    .. py:attribute:: QUERY_RESULT_EXPIRED
       :value: "QUERY_RESULT_EXPIRED"
 
@@ -243,6 +317,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: PendingStatus
+   :members:
+   :undoc-members:
+
+.. autoclass:: PollQueryStatusResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: PollQueryStatusResponseData
+   :members:
+   :undoc-members:
+
 .. autoclass:: PublishRequest
    :members:
    :undoc-members:
@@ -255,6 +341,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: QueryResponseStatus
+   :members:
+   :undoc-members:
+
+.. autoclass:: QuerySchema
+   :members:
+   :undoc-members:
+
+.. autoclass:: QuerySchemaColumn
+   :members:
+   :undoc-members:
+
 .. autoclass:: Result
    :members:
    :undoc-members:
@@ -287,22 +385,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: TextAttachment
+.. autoclass:: SuccessStatus
    :members:
    :undoc-members:
 
-.. autoclass:: TrashDashboardResponse
-   :members:
-   :undoc-members:
-
-.. autoclass:: UnpublishDashboardResponse
+.. autoclass:: TextAttachment
    :members:
    :undoc-members:
 
-.. autoclass:: UpdateDashboardRequest
+.. autoclass:: TrashDashboardResponse
    :members:
    :undoc-members:
 
-.. autoclass:: UpdateScheduleRequest
+.. autoclass:: UnpublishDashboardResponse
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/iam.rst b/docs/dbdataclasses/iam.rst
index 643da3d47..6df58ae4e 100644
--- a/docs/dbdataclasses/iam.rst
+++ b/docs/dbdataclasses/iam.rst
@@ -12,10 +12,22 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: Actor
+   :members:
+   :undoc-members:
+
+.. autoclass:: CheckPolicyResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: ComplexValue
    :members:
    :undoc-members:
 
+.. autoclass:: ConsistencyToken
+   :members:
+   :undoc-members:
+
 .. autoclass:: DeleteResponse
    :members:
    :undoc-members:
@@ -242,6 +254,20 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. py:class:: RequestAuthzIdentity
+
+   Defines the identity to be used for authZ of the request on the server side. See one pager for for more information: http://go/acl/service-identity
+
+   .. py:attribute:: REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY
+      :value: "REQUEST_AUTHZ_IDENTITY_SERVICE_IDENTITY"
+
+   .. py:attribute:: REQUEST_AUTHZ_IDENTITY_USER_CONTEXT
+      :value: "REQUEST_AUTHZ_IDENTITY_USER_CONTEXT"
+
+.. autoclass:: ResourceInfo
+   :members:
+   :undoc-members:
+
 .. autoclass:: ResourceMeta
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/index.rst b/docs/dbdataclasses/index.rst
index 987bee7f5..3ecb9c13f 100644
--- a/docs/dbdataclasses/index.rst
+++ b/docs/dbdataclasses/index.rst
@@ -8,6 +8,7 @@ Dataclasses
    apps
    billing
    catalog
+   cleanrooms
    compute
    dashboards
    files
diff --git a/docs/dbdataclasses/jobs.rst b/docs/dbdataclasses/jobs.rst
index 3aa0db043..e85322a66 100644
--- a/docs/dbdataclasses/jobs.rst
+++ b/docs/dbdataclasses/jobs.rst
@@ -28,6 +28,95 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. py:class:: CleanRoomTaskRunLifeCycleState
+
+   Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to remove coupling with jobs API definition
+
+   .. py:attribute:: BLOCKED
+      :value: "BLOCKED"
+
+   .. py:attribute:: INTERNAL_ERROR
+      :value: "INTERNAL_ERROR"
+
+   .. py:attribute:: PENDING
+      :value: "PENDING"
+
+   .. py:attribute:: QUEUED
+      :value: "QUEUED"
+
+   .. py:attribute:: RUNNING
+      :value: "RUNNING"
+
+   .. py:attribute:: RUN_LIFE_CYCLE_STATE_UNSPECIFIED
+      :value: "RUN_LIFE_CYCLE_STATE_UNSPECIFIED"
+
+   .. py:attribute:: SKIPPED
+      :value: "SKIPPED"
+
+   .. py:attribute:: TERMINATED
+      :value: "TERMINATED"
+
+   .. py:attribute:: TERMINATING
+      :value: "TERMINATING"
+
+   .. py:attribute:: WAITING_FOR_RETRY
+      :value: "WAITING_FOR_RETRY"
+
+.. py:class:: CleanRoomTaskRunResultState
+
+   Copied from elastic-spark-common/api/messages/runs.proto. Using the original definition to avoid cyclic dependency.
+
+   .. py:attribute:: CANCELED
+      :value: "CANCELED"
+
+   .. py:attribute:: DISABLED
+      :value: "DISABLED"
+
+   .. py:attribute:: EVICTED
+      :value: "EVICTED"
+
+   .. py:attribute:: EXCLUDED
+      :value: "EXCLUDED"
+
+   .. py:attribute:: FAILED
+      :value: "FAILED"
+
+   .. py:attribute:: MAXIMUM_CONCURRENT_RUNS_REACHED
+      :value: "MAXIMUM_CONCURRENT_RUNS_REACHED"
+
+   .. py:attribute:: RUN_RESULT_STATE_UNSPECIFIED
+      :value: "RUN_RESULT_STATE_UNSPECIFIED"
+
+   .. py:attribute:: SUCCESS
+      :value: "SUCCESS"
+
+   .. py:attribute:: SUCCESS_WITH_FAILURES
+      :value: "SUCCESS_WITH_FAILURES"
+
+   .. py:attribute:: TIMEDOUT
+      :value: "TIMEDOUT"
+
+   .. py:attribute:: UPSTREAM_CANCELED
+      :value: "UPSTREAM_CANCELED"
+
+   .. py:attribute:: UPSTREAM_EVICTED
+      :value: "UPSTREAM_EVICTED"
+
+   .. py:attribute:: UPSTREAM_FAILED
+      :value: "UPSTREAM_FAILED"
+
+.. autoclass:: CleanRoomTaskRunState
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomsNotebookTask
+   :members:
+   :undoc-members:
+
+.. autoclass:: CleanRoomsNotebookTaskCleanRoomsNotebookTaskOutput
+   :members:
+   :undoc-members:
+
 .. autoclass:: ClusterInstance
    :members:
    :undoc-members:
@@ -317,7 +406,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 .. py:class:: JobsHealthMetric
 
    Specifies the health metric that is being evaluated for a particular health rule.
-   * `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`: An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric is in Private Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag across all streams. This metric is in Private Preview. * `STREAMING_BACKLOG_SECONDS`: An estimate of the maximum consumer delay across all streams. This metric is in Private Preview. * `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all streams. This metric is in Private Preview.
+   * `RUN_DURATION_SECONDS`: Expected total time for a run in seconds. * `STREAMING_BACKLOG_BYTES`: An estimate of the maximum bytes of data waiting to be consumed across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_RECORDS`: An estimate of the maximum offset lag across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_SECONDS`: An estimate of the maximum consumer delay across all streams. This metric is in Public Preview. * `STREAMING_BACKLOG_FILES`: An estimate of the maximum number of outstanding files across all streams. This metric is in Public Preview.
 
    .. py:attribute:: RUN_DURATION_SECONDS
       :value: "RUN_DURATION_SECONDS"
@@ -369,6 +458,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: OutputSchemaInfo
+   :members:
+   :undoc-members:
+
 .. py:class:: PauseStatus
 
    .. py:attribute:: PAUSED
@@ -377,6 +470,16 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: UNPAUSED
       :value: "UNPAUSED"
 
+.. py:class:: PerformanceTarget
+
+   PerformanceTarget defines how performant (lower latency) or cost efficient the execution of run on serverless compute should be. The performance mode on the job or pipeline should map to a performance setting that is passed to Cluster Manager (see cluster-common PerformanceTarget).
+
+   .. py:attribute:: COST_OPTIMIZED
+      :value: "COST_OPTIMIZED"
+
+   .. py:attribute:: PERFORMANCE_OPTIMIZED
+      :value: "PERFORMANCE_OPTIMIZED"
+
 .. autoclass:: PeriodicTriggerConfiguration
    :members:
    :undoc-members:
@@ -802,6 +905,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    The code indicates why the run was terminated. Additional codes might be introduced in future releases. * `SUCCESS`: The run was completed successfully. * `USER_CANCELED`: The run was successfully canceled during execution by a user. * `CANCELED`: The run was canceled during execution by the Databricks platform; for example, if the maximum run duration was exceeded. * `SKIPPED`: Run was never executed, for example, if the upstream task run failed, the dependency type condition was not met, or there were no material tasks to execute. * `INTERNAL_ERROR`: The run encountered an unexpected error. Refer to the state message for further details. * `DRIVER_ERROR`: The run encountered an error while communicating with the Spark Driver. * `CLUSTER_ERROR`: The run failed due to a cluster error. Refer to the state message for further details. * `REPOSITORY_CHECKOUT_FAILED`: Failed to complete the checkout due to an error when communicating with the third party service. * `INVALID_CLUSTER_REQUEST`: The run failed because it issued an invalid request to start the cluster. * `WORKSPACE_RUN_LIMIT_EXCEEDED`: The workspace has reached the quota for the maximum number of concurrent active runs. Consider scheduling the runs over a larger time frame. * `FEATURE_DISABLED`: The run failed because it tried to access a feature unavailable for the workspace. * `CLUSTER_REQUEST_LIMIT_EXCEEDED`: The number of cluster creation, start, and upsize requests have exceeded the allotted rate limit. Consider spreading the run execution over a larger time frame. * `STORAGE_ACCESS_ERROR`: The run failed due to an error when accessing the customer blob storage. Refer to the state message for further details. * `RUN_EXECUTION_ERROR`: The run was completed with task failures. For more details, refer to the state message or run output. * `UNAUTHORIZED_ERROR`: The run failed due to a permission issue while accessing a resource. Refer to the state message for further details. * `LIBRARY_INSTALLATION_ERROR`: The run failed while installing the user-requested library. Refer to the state message for further details. The causes might include, but are not limited to: The provided library is invalid, there are insufficient permissions to install the library, and so forth. * `MAX_CONCURRENT_RUNS_EXCEEDED`: The scheduled run exceeds the limit of maximum concurrent runs set for the job. * `MAX_SPARK_CONTEXTS_EXCEEDED`: The run is scheduled on a cluster that has already reached the maximum number of contexts it is configured to create. See: [Link]. * `RESOURCE_NOT_FOUND`: A resource necessary for run execution does not exist. Refer to the state message for further details. * `INVALID_RUN_CONFIGURATION`: The run failed due to an invalid configuration. Refer to the state message for further details. * `CLOUD_FAILURE`: The run failed due to a cloud provider issue. Refer to the state message for further details. * `MAX_JOB_QUEUE_SIZE_EXCEEDED`: The run was skipped due to reaching the job level queue size limit.
    [Link]: https://kb.databricks.com/en_US/notebooks/too-many-execution-contexts-are-open-right-now
 
+   .. py:attribute:: BUDGET_POLICY_LIMIT_EXCEEDED
+      :value: "BUDGET_POLICY_LIMIT_EXCEEDED"
+
    .. py:attribute:: CANCELED
       :value: "CANCELED"
 
@@ -900,7 +1006,7 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 .. py:class:: TriggerType
 
    The type of trigger that fired this run.
-   * `PERIODIC`: Schedules that periodically trigger runs, such as a cron scheduler. * `ONE_TIME`: One time triggers that fire a single run. This occurs you triggered a single run on demand through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a previously failed run. This occurs when you request to re-run the job in case of failures. * `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`: Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is triggered by a table update.
+   * `PERIODIC`: Schedules that periodically trigger runs, such as a cron scheduler. * `ONE_TIME`: One time triggers that fire a single run. This occurs you triggered a single run on demand through the UI or the API. * `RETRY`: Indicates a run that is triggered as a retry of a previously failed run. This occurs when you request to re-run the job in case of failures. * `RUN_JOB_TASK`: Indicates a run that is triggered using a Run Job task. * `FILE_ARRIVAL`: Indicates a run that is triggered by a file arrival. * `TABLE`: Indicates a run that is triggered by a table update. * `CONTINUOUS_RESTART`: Indicates a run created by user to manually restart a continuous job run.
 
    .. py:attribute:: FILE_ARRIVAL
       :value: "FILE_ARRIVAL"
diff --git a/docs/dbdataclasses/marketplace.rst b/docs/dbdataclasses/marketplace.rst
index bb48967db..c1029d842 100644
--- a/docs/dbdataclasses/marketplace.rst
+++ b/docs/dbdataclasses/marketplace.rst
@@ -29,6 +29,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: ASSET_TYPE_NOTEBOOK
       :value: "ASSET_TYPE_NOTEBOOK"
 
+   .. py:attribute:: ASSET_TYPE_PARTNER_INTEGRATION
+      :value: "ASSET_TYPE_PARTNER_INTEGRATION"
+
 .. autoclass:: BatchGetListingsResponse
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/oauth2.rst b/docs/dbdataclasses/oauth2.rst
index 6265f6648..10202e55e 100644
--- a/docs/dbdataclasses/oauth2.rst
+++ b/docs/dbdataclasses/oauth2.rst
@@ -24,10 +24,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: DataPlaneInfo
-   :members:
-   :undoc-members:
-
 .. autoclass:: DeleteCustomAppIntegrationOutput
    :members:
    :undoc-members:
@@ -40,6 +36,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: FederationPolicy
+   :members:
+   :undoc-members:
+
 .. autoclass:: GetCustomAppIntegrationOutput
    :members:
    :undoc-members:
@@ -60,10 +60,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: ListFederationPoliciesResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: ListServicePrincipalSecretsResponse
    :members:
    :undoc-members:
 
+.. autoclass:: OidcFederationPolicy
+   :members:
+   :undoc-members:
+
 .. autoclass:: PublishedAppOutput
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/pipelines.rst b/docs/dbdataclasses/pipelines.rst
index 9f419f160..903cb52ff 100644
--- a/docs/dbdataclasses/pipelines.rst
+++ b/docs/dbdataclasses/pipelines.rst
@@ -20,6 +20,31 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. py:class:: DayOfWeek
+
+   Days of week in which the restart is allowed to happen (within a five-hour window starting at start_hour). If not specified all days of the week will be used.
+
+   .. py:attribute:: FRIDAY
+      :value: "FRIDAY"
+
+   .. py:attribute:: MONDAY
+      :value: "MONDAY"
+
+   .. py:attribute:: SATURDAY
+      :value: "SATURDAY"
+
+   .. py:attribute:: SUNDAY
+      :value: "SUNDAY"
+
+   .. py:attribute:: THURSDAY
+      :value: "THURSDAY"
+
+   .. py:attribute:: TUESDAY
+      :value: "TUESDAY"
+
+   .. py:attribute:: WEDNESDAY
+      :value: "WEDNESDAY"
+
 .. autoclass:: DeletePipelineResponse
    :members:
    :undoc-members:
@@ -269,6 +294,14 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: RestartWindow
+   :members:
+   :undoc-members:
+
+.. autoclass:: RunAs
+   :members:
+   :undoc-members:
+
 .. autoclass:: SchemaSpec
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/provisioning.rst b/docs/dbdataclasses/provisioning.rst
index 7990eae96..4c909d488 100644
--- a/docs/dbdataclasses/provisioning.rst
+++ b/docs/dbdataclasses/provisioning.rst
@@ -106,6 +106,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: VPC
       :value: "VPC"
 
+.. autoclass:: ExternalCustomerInfo
+   :members:
+   :undoc-members:
+
 .. autoclass:: GcpKeyInfo
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/serving.rst b/docs/dbdataclasses/serving.rst
index 3deefc873..abaeb5355 100644
--- a/docs/dbdataclasses/serving.rst
+++ b/docs/dbdataclasses/serving.rst
@@ -22,8 +22,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: AiGatewayGuardrailPiiBehaviorBehavior
 
-   Behavior for PII filter. Currently only 'BLOCK' is supported. If 'BLOCK' is set for the input guardrail and the request contains PII, the request is not sent to the model server and 400 status code is returned; if 'BLOCK' is set for the output guardrail and the model response contains PII, the PII info in the response is redacted and 400 status code is returned.
-
    .. py:attribute:: BLOCK
       :value: "BLOCK"
 
@@ -44,8 +42,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: AiGatewayRateLimitKey
 
-   Key field for a rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.
-
    .. py:attribute:: ENDPOINT
       :value: "ENDPOINT"
 
@@ -54,8 +50,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: AiGatewayRateLimitRenewalPeriod
 
-   Renewal period field for a rate limit. Currently, only 'minute' is supported.
-
    .. py:attribute:: MINUTE
       :value: "MINUTE"
 
@@ -69,8 +63,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: AmazonBedrockConfigBedrockProvider
 
-   The underlying provider in Amazon Bedrock. Supported values (case insensitive) include: Anthropic, Cohere, AI21Labs, Amazon.
-
    .. py:attribute:: AI21LABS
       :value: "AI21LABS"
 
@@ -128,6 +120,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: DataPlaneInfo
+   :members:
+   :undoc-members:
+
 .. autoclass:: DatabricksModelServingConfig
    :members:
    :undoc-members:
@@ -173,8 +169,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: EndpointStateConfigUpdate
 
-   The state of an endpoint's config update. This informs the user if the pending_config is in progress, if the update failed, or if there is no update in progress. Note that if the endpoint's config_update state value is IN_PROGRESS, another update can not be made until the update completes or fails.
-
    .. py:attribute:: IN_PROGRESS
       :value: "IN_PROGRESS"
 
@@ -189,8 +183,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: EndpointStateReady
 
-   The state of an endpoint, indicating whether or not the endpoint is queryable. An endpoint is READY if all of the served entities in its active configuration are ready. If any of the actively served entities are in a non-ready state, the endpoint state will be NOT_READY.
-
    .. py:attribute:: NOT_READY
       :value: "NOT_READY"
 
@@ -201,18 +193,41 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: EndpointTags
+   :members:
+   :undoc-members:
+
 .. autoclass:: ExportMetricsResponse
    :members:
    :undoc-members:
 
+.. autoclass:: ExternalFunctionRequest
+   :members:
+   :undoc-members:
+
+.. py:class:: ExternalFunctionRequestHttpMethod
+
+   .. py:attribute:: DELETE
+      :value: "DELETE"
+
+   .. py:attribute:: GET
+      :value: "GET"
+
+   .. py:attribute:: PATCH
+      :value: "PATCH"
+
+   .. py:attribute:: POST
+      :value: "POST"
+
+   .. py:attribute:: PUT
+      :value: "PUT"
+
 .. autoclass:: ExternalModel
    :members:
    :undoc-members:
 
 .. py:class:: ExternalModelProvider
 
-   The name of the provider for the external model. Currently, the supported providers are 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'google-cloud-vertex-ai', 'openai', and 'palm'.",
-
    .. py:attribute:: AI21LABS
       :value: "AI21LABS"
 
@@ -257,6 +272,10 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: HttpRequestResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: ListEndpointsResponse
    :members:
    :undoc-members:
@@ -281,10 +300,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: PutAiGatewayRequest
+   :members:
+   :undoc-members:
+
 .. autoclass:: PutAiGatewayResponse
    :members:
    :undoc-members:
 
+.. autoclass:: PutRequest
+   :members:
+   :undoc-members:
+
 .. autoclass:: PutResponse
    :members:
    :undoc-members:
@@ -316,8 +343,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: RateLimitKey
 
-   Key field for a serving endpoint rate limit. Currently, only 'user' and 'endpoint' are supported, with 'endpoint' being the default if not specified.
-
    .. py:attribute:: ENDPOINT
       :value: "ENDPOINT"
 
@@ -326,8 +351,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: RateLimitRenewalPeriod
 
-   Renewal period field for a serving endpoint rate limit. Currently, only 'minute' is supported.
-
    .. py:attribute:: MINUTE
       :value: "MINUTE"
 
@@ -353,8 +376,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: ServedModelInputWorkloadSize
 
-   The workload size of the served model. The workload size corresponds to a range of provisioned concurrency that the compute will autoscale between. A single unit of provisioned concurrency can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size will be 0.
-
    .. py:attribute:: LARGE
       :value: "LARGE"
 
@@ -366,9 +387,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: ServedModelInputWorkloadType
 
-   The workload type of the served model. The workload type selects which type of compute to use in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU acceleration is available by selecting workload types like GPU_SMALL and others. See the available [GPU types].
-   [GPU types]: https://docs.databricks.com/machine-learning/model-serving/create-manage-serving-endpoints.html#gpu-workload-types
-
    .. py:attribute:: CPU
       :value: "CPU"
 
@@ -398,8 +416,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: ServedModelStateDeployment
 
-   The state of the served entity deployment. DEPLOYMENT_CREATING indicates that the served entity is not ready yet because the deployment is still being created (i.e container image is building, model server is deploying for the first time, etc.). DEPLOYMENT_RECOVERING indicates that the served entity was previously in a ready state but no longer is and is attempting to recover. DEPLOYMENT_READY indicates that the served entity is ready to receive traffic. DEPLOYMENT_FAILED indicates that there was an error trying to bring up the served entity (e.g container image build failed, the model server failed to start due to a model loading error, etc.) DEPLOYMENT_ABORTED indicates that the deployment was terminated likely due to a failure in bringing up another served entity under the same endpoint and config version.
-
    .. py:attribute:: ABORTED
       :value: "ABORTED"
 
@@ -437,8 +453,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
 .. py:class:: ServingEndpointDetailedPermissionLevel
 
-   The permission level of the principal making the request.
-
    .. py:attribute:: CAN_MANAGE
       :value: "CAN_MANAGE"
 
@@ -477,6 +491,23 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. py:class:: ServingModelWorkloadType
+
+   .. py:attribute:: CPU
+      :value: "CPU"
+
+   .. py:attribute:: GPU_LARGE
+      :value: "GPU_LARGE"
+
+   .. py:attribute:: GPU_MEDIUM
+      :value: "GPU_MEDIUM"
+
+   .. py:attribute:: GPU_SMALL
+      :value: "GPU_SMALL"
+
+   .. py:attribute:: MULTIGPU_MEDIUM
+      :value: "MULTIGPU_MEDIUM"
+
 .. autoclass:: TrafficConfig
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/settings.rst b/docs/dbdataclasses/settings.rst
index 12043e3c5..2325c4023 100644
--- a/docs/dbdataclasses/settings.rst
+++ b/docs/dbdataclasses/settings.rst
@@ -4,6 +4,37 @@ Settings
 These dataclasses are used in the SDK to represent API requests and responses for services in the ``databricks.sdk.service.settings`` module.
 
 .. py:currentmodule:: databricks.sdk.service.settings
+.. autoclass:: AccountIpAccessEnable
+   :members:
+   :undoc-members:
+
+.. autoclass:: AibiDashboardEmbeddingAccessPolicy
+   :members:
+   :undoc-members:
+
+.. py:class:: AibiDashboardEmbeddingAccessPolicyAccessPolicyType
+
+   .. py:attribute:: ALLOW_ALL_DOMAINS
+      :value: "ALLOW_ALL_DOMAINS"
+
+   .. py:attribute:: ALLOW_APPROVED_DOMAINS
+      :value: "ALLOW_APPROVED_DOMAINS"
+
+   .. py:attribute:: DENY_ALL_DOMAINS
+      :value: "DENY_ALL_DOMAINS"
+
+.. autoclass:: AibiDashboardEmbeddingAccessPolicySetting
+   :members:
+   :undoc-members:
+
+.. autoclass:: AibiDashboardEmbeddingApprovedDomains
+   :members:
+   :undoc-members:
+
+.. autoclass:: AibiDashboardEmbeddingApprovedDomainsSetting
+   :members:
+   :undoc-members:
+
 .. autoclass:: AutomaticClusterUpdateSetting
    :members:
    :undoc-members:
@@ -108,9 +139,15 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: HIPAA
       :value: "HIPAA"
 
+   .. py:attribute:: HITRUST
+      :value: "HITRUST"
+
    .. py:attribute:: IRAP_PROTECTED
       :value: "IRAP_PROTECTED"
 
+   .. py:attribute:: ISMAP
+      :value: "ISMAP"
+
    .. py:attribute:: ITAR_EAR
       :value: "ITAR_EAR"
 
@@ -188,6 +225,18 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: DeleteAccountIpAccessEnableResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: DeleteAibiDashboardEmbeddingAccessPolicySettingResponse
+   :members:
+   :undoc-members:
+
+.. autoclass:: DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse
+   :members:
+   :undoc-members:
+
 .. autoclass:: DeleteDefaultNamespaceSettingResponse
    :members:
    :undoc-members:
@@ -249,6 +298,83 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: EgressNetworkPolicy
+   :members:
+   :undoc-members:
+
+.. autoclass:: EgressNetworkPolicyInternetAccessPolicy
+   :members:
+   :undoc-members:
+
+.. autoclass:: EgressNetworkPolicyInternetAccessPolicyInternetDestination
+   :members:
+   :undoc-members:
+
+.. py:class:: EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationFilteringProtocol
+
+   The filtering protocol used by the DP. For private and public preview, SEG will only support TCP filtering (i.e. DNS based filtering, filtering by destination IP address), so protocol will be set to TCP by default and hidden from the user. In the future, users may be able to select HTTP filtering (i.e. SNI based filtering, filtering by FQDN).
+
+   .. py:attribute:: TCP
+      :value: "TCP"
+
+.. py:class:: EgressNetworkPolicyInternetAccessPolicyInternetDestinationInternetDestinationType
+
+   .. py:attribute:: FQDN
+      :value: "FQDN"
+
+.. autoclass:: EgressNetworkPolicyInternetAccessPolicyLogOnlyMode
+   :members:
+   :undoc-members:
+
+.. py:class:: EgressNetworkPolicyInternetAccessPolicyLogOnlyModeLogOnlyModeType
+
+   .. py:attribute:: ALL_SERVICES
+      :value: "ALL_SERVICES"
+
+   .. py:attribute:: SELECTED_SERVICES
+      :value: "SELECTED_SERVICES"
+
+.. py:class:: EgressNetworkPolicyInternetAccessPolicyLogOnlyModeWorkloadType
+
+   The values should match the list of workloads used in networkconfig.proto
+
+   .. py:attribute:: DBSQL
+      :value: "DBSQL"
+
+   .. py:attribute:: ML_SERVING
+      :value: "ML_SERVING"
+
+.. py:class:: EgressNetworkPolicyInternetAccessPolicyRestrictionMode
+
+   At which level can Databricks and Databricks managed compute access Internet. FULL_ACCESS: Databricks can access Internet. No blocking rules will apply. RESTRICTED_ACCESS: Databricks can only access explicitly allowed internet and storage destinations, as well as UC connections and external locations. PRIVATE_ACCESS_ONLY (not used): Databricks can only access destinations via private link.
+
+   .. py:attribute:: FULL_ACCESS
+      :value: "FULL_ACCESS"
+
+   .. py:attribute:: PRIVATE_ACCESS_ONLY
+      :value: "PRIVATE_ACCESS_ONLY"
+
+   .. py:attribute:: RESTRICTED_ACCESS
+      :value: "RESTRICTED_ACCESS"
+
+.. autoclass:: EgressNetworkPolicyInternetAccessPolicyStorageDestination
+   :members:
+   :undoc-members:
+
+.. py:class:: EgressNetworkPolicyInternetAccessPolicyStorageDestinationStorageDestinationType
+
+   .. py:attribute:: AWS_S3
+      :value: "AWS_S3"
+
+   .. py:attribute:: AZURE_STORAGE
+      :value: "AZURE_STORAGE"
+
+   .. py:attribute:: CLOUDFLARE_R2
+      :value: "CLOUDFLARE_R2"
+
+   .. py:attribute:: GOOGLE_CLOUD_STORAGE
+      :value: "GOOGLE_CLOUD_STORAGE"
+
 .. autoclass:: EmailConfig
    :members:
    :undoc-members:
@@ -540,9 +666,24 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: ARCLIGHT_AZURE_EXCHANGE_TOKEN
       :value: "ARCLIGHT_AZURE_EXCHANGE_TOKEN"
 
+   .. py:attribute:: ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY
+      :value: "ARCLIGHT_AZURE_EXCHANGE_TOKEN_WITH_USER_DELEGATION_KEY"
+
    .. py:attribute:: AZURE_ACTIVE_DIRECTORY_TOKEN
       :value: "AZURE_ACTIVE_DIRECTORY_TOKEN"
 
+.. autoclass:: UpdateAccountIpAccessEnableRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: UpdateAibiDashboardEmbeddingAccessPolicySettingRequest
+   :members:
+   :undoc-members:
+
+.. autoclass:: UpdateAibiDashboardEmbeddingApprovedDomainsSettingRequest
+   :members:
+   :undoc-members:
+
 .. autoclass:: UpdateAutomaticClusterUpdateSettingRequest
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/sharing.rst b/docs/dbdataclasses/sharing.rst
index ded587fe5..ed4a4c006 100644
--- a/docs/dbdataclasses/sharing.rst
+++ b/docs/dbdataclasses/sharing.rst
@@ -14,117 +14,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: TOKEN
       :value: "TOKEN"
 
-.. autoclass:: CentralCleanRoomInfo
-   :members:
-   :undoc-members:
-
-.. autoclass:: CleanRoomAssetInfo
-   :members:
-   :undoc-members:
-
-.. autoclass:: CleanRoomCatalog
-   :members:
-   :undoc-members:
-
-.. autoclass:: CleanRoomCatalogUpdate
-   :members:
-   :undoc-members:
-
-.. autoclass:: CleanRoomCollaboratorInfo
-   :members:
-   :undoc-members:
-
-.. autoclass:: CleanRoomInfo
-   :members:
-   :undoc-members:
-
-.. autoclass:: CleanRoomNotebookInfo
-   :members:
-   :undoc-members:
-
-.. autoclass:: CleanRoomTableInfo
-   :members:
-   :undoc-members:
-
-.. autoclass:: ColumnInfo
-   :members:
-   :undoc-members:
-
-.. autoclass:: ColumnMask
-   :members:
-   :undoc-members:
-
-.. py:class:: ColumnTypeName
-
-   Name of type (INT, STRUCT, MAP, etc.).
-
-   .. py:attribute:: ARRAY
-      :value: "ARRAY"
-
-   .. py:attribute:: BINARY
-      :value: "BINARY"
-
-   .. py:attribute:: BOOLEAN
-      :value: "BOOLEAN"
-
-   .. py:attribute:: BYTE
-      :value: "BYTE"
-
-   .. py:attribute:: CHAR
-      :value: "CHAR"
-
-   .. py:attribute:: DATE
-      :value: "DATE"
-
-   .. py:attribute:: DECIMAL
-      :value: "DECIMAL"
-
-   .. py:attribute:: DOUBLE
-      :value: "DOUBLE"
-
-   .. py:attribute:: FLOAT
-      :value: "FLOAT"
-
-   .. py:attribute:: INT
-      :value: "INT"
-
-   .. py:attribute:: INTERVAL
-      :value: "INTERVAL"
-
-   .. py:attribute:: LONG
-      :value: "LONG"
-
-   .. py:attribute:: MAP
-      :value: "MAP"
-
-   .. py:attribute:: NULL
-      :value: "NULL"
-
-   .. py:attribute:: SHORT
-      :value: "SHORT"
-
-   .. py:attribute:: STRING
-      :value: "STRING"
-
-   .. py:attribute:: STRUCT
-      :value: "STRUCT"
-
-   .. py:attribute:: TABLE_TYPE
-      :value: "TABLE_TYPE"
-
-   .. py:attribute:: TIMESTAMP
-      :value: "TIMESTAMP"
-
-   .. py:attribute:: TIMESTAMP_NTZ
-      :value: "TIMESTAMP_NTZ"
-
-   .. py:attribute:: USER_DEFINED_TYPE
-      :value: "USER_DEFINED_TYPE"
-
-.. autoclass:: CreateCleanRoom
-   :members:
-   :undoc-members:
-
 .. autoclass:: CreateProvider
    :members:
    :undoc-members:
@@ -153,10 +42,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: ListCleanRoomsResponse
-   :members:
-   :undoc-members:
-
 .. autoclass:: ListProviderSharesResponse
    :members:
    :undoc-members:
@@ -177,14 +62,16 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
+.. autoclass:: PartitionSpecificationPartition
+   :members:
+   :undoc-members:
+
 .. autoclass:: PartitionValue
    :members:
    :undoc-members:
 
 .. py:class:: PartitionValueOp
 
-   The operator to apply for the value.
-
    .. py:attribute:: EQUAL
       :value: "EQUAL"
 
@@ -223,6 +110,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CREATE_FOREIGN_CATALOG
       :value: "CREATE_FOREIGN_CATALOG"
 
+   .. py:attribute:: CREATE_FOREIGN_SECURABLE
+      :value: "CREATE_FOREIGN_SECURABLE"
+
    .. py:attribute:: CREATE_FUNCTION
       :value: "CREATE_FUNCTION"
 
@@ -377,6 +267,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo
 
    The type of the data object.
 
+   .. py:attribute:: FEATURE_SPEC
+      :value: "FEATURE_SPEC"
+
+   .. py:attribute:: FUNCTION
+      :value: "FUNCTION"
+
    .. py:attribute:: MATERIALIZED_VIEW
       :value: "MATERIALIZED_VIEW"
 
@@ -435,10 +331,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: UPDATE
       :value: "UPDATE"
 
-.. autoclass:: UpdateCleanRoom
-   :members:
-   :undoc-members:
-
 .. autoclass:: UpdatePermissionsResponse
    :members:
    :undoc-members:
@@ -451,10 +343,6 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    :members:
    :undoc-members:
 
-.. autoclass:: UpdateResponse
-   :members:
-   :undoc-members:
-
 .. autoclass:: UpdateShare
    :members:
    :undoc-members:
diff --git a/docs/dbdataclasses/sql.rst b/docs/dbdataclasses/sql.rst
index 1657146c3..c63fe7cd2 100644
--- a/docs/dbdataclasses/sql.rst
+++ b/docs/dbdataclasses/sql.rst
@@ -114,8 +114,12 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: CHANNEL_NAME_PREVIEW
       :value: "CHANNEL_NAME_PREVIEW"
 
-   .. py:attribute:: CHANNEL_NAME_UNSPECIFIED
-      :value: "CHANNEL_NAME_UNSPECIFIED"
+   .. py:attribute:: CHANNEL_NAME_PREVIOUS
+      :value: "CHANNEL_NAME_PREVIOUS"
+
+.. autoclass:: ClientConfig
+   :members:
+   :undoc-members:
 
 .. autoclass:: ColumnInfo
    :members:
diff --git a/docs/dbdataclasses/workspace.rst b/docs/dbdataclasses/workspace.rst
index 9ff3eb66b..bd0785db4 100644
--- a/docs/dbdataclasses/workspace.rst
+++ b/docs/dbdataclasses/workspace.rst
@@ -157,6 +157,9 @@ These dataclasses are used in the SDK to represent API requests and responses fo
    .. py:attribute:: JUPYTER
       :value: "JUPYTER"
 
+   .. py:attribute:: RAW
+      :value: "RAW"
+
    .. py:attribute:: R_MARKDOWN
       :value: "R_MARKDOWN"
 
diff --git a/docs/gen-client-docs.py b/docs/gen-client-docs.py
index 5c32beffe..6ebfa7bab 100644
--- a/docs/gen-client-docs.py
+++ b/docs/gen-client-docs.py
@@ -248,6 +248,7 @@ class Generator:
         Package("dashboards", "Dashboards", "Manage Lakeview dashboards"),
         Package("marketplace", "Marketplace", "Manage AI and analytics assets such as ML models, notebooks, applications in an open marketplace"),
         Package("apps", "Apps", "Build custom applications on Databricks"),
+        Package("cleanrooms", "Clean Rooms", "Manage clean rooms and their assets and task runs"),
     ]
 
     def __init__(self):
@@ -266,11 +267,22 @@ def _load_mapping(self) -> dict[str, Tag]:
         pkgs = {p.name: p for p in self.packages}
         spec = json.loads(self._openapi_spec())
         for tag in spec['tags']:
+            is_account=tag.get('x-databricks-is-accounts')
+            # Unique identifier for the tag. Note that the service name may not be unique
+            key = 'a' if is_account else 'w'
+            parent_service = tag.get('x-databricks-parent-service')
+            if parent_service:
+                # SDK generation removes the "account" prefix from account services
+                clean_parent_service = parent_service.lower().removeprefix("account")
+                key = f"{key}.{clean_parent_service}"
+
+            key = f"{key}.{tag['x-databricks-service']}".lower()
+
             t = Tag(name=tag['name'],
                     service=tag['x-databricks-service'],
                     is_account=tag.get('x-databricks-is-accounts', False),
                     package=pkgs[tag['x-databricks-package']])
-            mapping[tag['name']] = t
+            mapping[key] = t
         return mapping
 
     @staticmethod
@@ -359,7 +371,7 @@ def service_docs(self, client_inst, client_prefix: str) -> list[ServiceDoc]:
                            service_name=service_name,
                            class_name=class_name,
                            doc=class_doc,
-                           tag=self._get_tag_name(service_inst.__class__.__name__, service_name),
+                           tag=self._get_tag_name(service_inst.__class__.__name__, client_prefix, service_name),
                            methods=self.class_methods(service_inst),
                            property=self.class_properties(service_inst)))
         return all
@@ -375,13 +387,19 @@ def _make_folder_if_not_exists(folder):
 
     def write_dataclass_docs(self):
         self._make_folder_if_not_exists(f'{__dir__}/dbdataclasses')
+        all_packages = []
         for pkg in self.packages:
-            module = importlib.import_module(f'databricks.sdk.service.{pkg.name}')
+            try:
+                module = importlib.import_module(f'databricks.sdk.service.{pkg.name}')
+            except ModuleNotFoundError:
+                print(f'No module found for {pkg.name}, continuing')
+                continue
+            all_packages.append(pkg.name)
             all_members = [name for name, _ in inspect.getmembers(module, predicate=self._should_document)]
             doc = DataclassesDoc(package=pkg, dataclasses=sorted(all_members))
             with open(f'{__dir__}/dbdataclasses/{pkg.name}.rst', 'w') as f:
                 f.write(doc.as_rst())
-        all = "\n   ".join(sorted([p.name for p in self.packages]))
+        all = "\n   ".join(sorted(all_packages))
         with open(f'{__dir__}/dbdataclasses/index.rst', 'w') as f:
             f.write(f'''
 Dataclasses
@@ -392,13 +410,13 @@ def write_dataclass_docs(self):
    
    {all}''')
 
-    def _get_tag_name(self, class_name, service_name) -> Tag:
+    def _get_tag_name(self, class_name, client_prefix, service_name) -> Tag:
         if class_name[-3:] == 'Ext':
             # ClustersExt, DbfsExt, WorkspaceExt, but not ExternalLocations
             class_name = class_name.replace('Ext', 'API')
         class_name = class_name[:-3]
-        for tag_name, t in self.mapping.items():
-            if t.service.lower() == str(class_name).lower():
+        for key, t in self.mapping.items(): 
+            if key == f'{client_prefix}.{str(class_name).lower()}':
                 return t
         raise KeyError(f'Cannot find {class_name} / {service_name} tag')
 
diff --git a/docs/workspace/apps/apps.rst b/docs/workspace/apps/apps.rst
index 774e75b8b..af7229f34 100644
--- a/docs/workspace/apps/apps.rst
+++ b/docs/workspace/apps/apps.rst
@@ -7,26 +7,22 @@
     Apps run directly on a customer’s Databricks instance, integrate with their data, use and extend
     Databricks services, and enable users to interact through single sign-on.
 
-    .. py:method:: create(name: str [, description: Optional[str], resources: Optional[List[AppResource]]]) -> Wait[App]
+    .. py:method:: create( [, app: Optional[App], no_compute: Optional[bool]]) -> Wait[App]
 
         Create an app.
         
         Creates a new app.
         
-        :param name: str
-          The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
-          must be unique within the workspace.
-        :param description: str (optional)
-          The description of the app.
-        :param resources: List[:class:`AppResource`] (optional)
-          Resources for the app.
+        :param app: :class:`App` (optional)
+        :param no_compute: bool (optional)
+          If true, the app will not be started after creation.
         
         :returns:
           Long-running operation waiter for :class:`App`.
           See :method:wait_get_app_active for more details.
         
 
-    .. py:method:: create_and_wait(name: str [, description: Optional[str], resources: Optional[List[AppResource]], timeout: datetime.timedelta = 0:20:00]) -> App
+    .. py:method:: create_and_wait( [, app: Optional[App], no_compute: Optional[bool], timeout: datetime.timedelta = 0:20:00]) -> App
 
 
     .. py:method:: delete(name: str) -> App
@@ -41,7 +37,7 @@
         :returns: :class:`App`
         
 
-    .. py:method:: deploy(app_name: str [, deployment_id: Optional[str], mode: Optional[AppDeploymentMode], source_code_path: Optional[str]]) -> Wait[AppDeployment]
+    .. py:method:: deploy(app_name: str [, app_deployment: Optional[AppDeployment]]) -> Wait[AppDeployment]
 
         Create an app deployment.
         
@@ -49,23 +45,14 @@
         
         :param app_name: str
           The name of the app.
-        :param deployment_id: str (optional)
-          The unique id of the deployment.
-        :param mode: :class:`AppDeploymentMode` (optional)
-          The mode of which the deployment will manage the source code.
-        :param source_code_path: str (optional)
-          The workspace file system path of the source code used to create the app deployment. This is
-          different from `deployment_artifacts.source_code_path`, which is the path used by the deployed app.
-          The former refers to the original source code location of the app in the workspace during deployment
-          creation, whereas the latter provides a system generated stable snapshotted source code path used by
-          the deployment.
+        :param app_deployment: :class:`AppDeployment` (optional)
         
         :returns:
           Long-running operation waiter for :class:`AppDeployment`.
           See :method:wait_get_deployment_app_succeeded for more details.
         
 
-    .. py:method:: deploy_and_wait(app_name: str [, deployment_id: Optional[str], mode: Optional[AppDeploymentMode], source_code_path: Optional[str], timeout: datetime.timedelta = 0:20:00]) -> AppDeployment
+    .. py:method:: deploy_and_wait(app_name: str [, app_deployment: Optional[AppDeployment], timeout: datetime.timedelta = 0:20:00]) -> AppDeployment
 
 
     .. py:method:: get(name: str) -> App
@@ -152,7 +139,8 @@
 
         Set app permissions.
         
-        Sets permissions on an app. Apps can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param app_name: str
           The app for which to get or manage permissions.
@@ -195,7 +183,7 @@
     .. py:method:: stop_and_wait(name: str, timeout: datetime.timedelta = 0:20:00) -> App
 
 
-    .. py:method:: update(name: str [, description: Optional[str], resources: Optional[List[AppResource]]]) -> App
+    .. py:method:: update(name: str [, app: Optional[App]]) -> App
 
         Update an app.
         
@@ -204,10 +192,7 @@
         :param name: str
           The name of the app. The name must contain only lowercase alphanumeric characters and hyphens. It
           must be unique within the workspace.
-        :param description: str (optional)
-          The description of the app.
-        :param resources: List[:class:`AppResource`] (optional)
-          Resources for the app.
+        :param app: :class:`App` (optional)
         
         :returns: :class:`App`
         
diff --git a/docs/workspace/catalog/catalogs.rst b/docs/workspace/catalog/catalogs.rst
index 200168ee6..1d6b6dc2a 100644
--- a/docs/workspace/catalog/catalogs.rst
+++ b/docs/workspace/catalog/catalogs.rst
@@ -143,7 +143,7 @@
         :returns: Iterator over :class:`CatalogInfo`
         
 
-    .. py:method:: update(name: str [, comment: Optional[str], enable_predictive_optimization: Optional[EnablePredictiveOptimization], isolation_mode: Optional[CatalogIsolationMode], new_name: Optional[str], owner: Optional[str], properties: Optional[Dict[str, str]]]) -> CatalogInfo
+    .. py:method:: update(name: str [, comment: Optional[str], enable_predictive_optimization: Optional[EnablePredictiveOptimization], isolation_mode: Optional[CatalogIsolationMode], new_name: Optional[str], options: Optional[Dict[str, str]], owner: Optional[str], properties: Optional[Dict[str, str]]]) -> CatalogInfo
 
 
         Usage:
@@ -178,6 +178,8 @@
           Whether the current securable is accessible from all workspaces or a specific set of workspaces.
         :param new_name: str (optional)
           New name for the catalog.
+        :param options: Dict[str,str] (optional)
+          A map of key-value properties attached to the securable.
         :param owner: str (optional)
           Username of current owner of catalog.
         :param properties: Dict[str,str] (optional)
diff --git a/docs/workspace/catalog/credentials.rst b/docs/workspace/catalog/credentials.rst
new file mode 100644
index 000000000..3927e6351
--- /dev/null
+++ b/docs/workspace/catalog/credentials.rst
@@ -0,0 +1,193 @@
+``w.credentials``: Credentials
+==============================
+.. currentmodule:: databricks.sdk.service.catalog
+
+.. py:class:: CredentialsAPI
+
+    A credential represents an authentication and authorization mechanism for accessing services on your cloud
+    tenant. Each credential is subject to Unity Catalog access-control policies that control which users and
+    groups can access the credential.
+    
+    To create credentials, you must be a Databricks account admin or have the `CREATE SERVICE CREDENTIAL`
+    privilege. The user who creates the credential can delegate ownership to another user or group to manage
+    permissions on it.
+
+    .. py:method:: create_credential(name: str [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], azure_service_principal: Optional[AzureServicePrincipal], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], purpose: Optional[CredentialPurpose], read_only: Optional[bool], skip_validation: Optional[bool]]) -> CredentialInfo
+
+        Create a credential.
+        
+        Creates a new credential. The type of credential to be created is determined by the **purpose** field,
+        which should be either **SERVICE** or **STORAGE**.
+        
+        The caller must be a metastore admin or have the metastore privilege **CREATE_STORAGE_CREDENTIAL** for
+        storage credentials, or **CREATE_SERVICE_CREDENTIAL** for service credentials.
+        
+        :param name: str
+          The credential name. The name must be unique among storage and service credentials within the
+          metastore.
+        :param aws_iam_role: :class:`AwsIamRole` (optional)
+          The AWS IAM role configuration
+        :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
+          The Azure managed identity configuration.
+        :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
+          The Azure service principal configuration. Only applicable when purpose is **STORAGE**.
+        :param comment: str (optional)
+          Comment associated with the credential.
+        :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional)
+          GCP long-lived credential. Databricks-created Google Cloud Storage service account.
+        :param purpose: :class:`CredentialPurpose` (optional)
+          Indicates the purpose of the credential.
+        :param read_only: bool (optional)
+          Whether the credential is usable only for read operations. Only applicable when purpose is
+          **STORAGE**.
+        :param skip_validation: bool (optional)
+          Optional. Supplying true to this argument skips validation of the created set of credentials.
+        
+        :returns: :class:`CredentialInfo`
+        
+
+    .. py:method:: delete_credential(name_arg: str [, force: Optional[bool]])
+
+        Delete a credential.
+        
+        Deletes a service or storage credential from the metastore. The caller must be an owner of the
+        credential.
+        
+        :param name_arg: str
+          Name of the credential.
+        :param force: bool (optional)
+          Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
+          external locations and external tables (when purpose is **STORAGE**).
+        
+        
+        
+
+    .. py:method:: generate_temporary_service_credential(credential_name: str [, azure_options: Optional[GenerateTemporaryServiceCredentialAzureOptions], gcp_options: Optional[GenerateTemporaryServiceCredentialGcpOptions]]) -> TemporaryCredentials
+
+        Generate a temporary service credential.
+        
+        Returns a set of temporary credentials generated using the specified service credential. The caller
+        must be a metastore admin or have the metastore privilege **ACCESS** on the service credential.
+        
+        :param credential_name: str
+          The name of the service credential used to generate a temporary credential
+        :param azure_options: :class:`GenerateTemporaryServiceCredentialAzureOptions` (optional)
+          The Azure cloud options to customize the requested temporary credential
+        :param gcp_options: :class:`GenerateTemporaryServiceCredentialGcpOptions` (optional)
+          The GCP cloud options to customize the requested temporary credential
+        
+        :returns: :class:`TemporaryCredentials`
+        
+
+    .. py:method:: get_credential(name_arg: str) -> CredentialInfo
+
+        Get a credential.
+        
+        Gets a service or storage credential from the metastore. The caller must be a metastore admin, the
+        owner of the credential, or have any permission on the credential.
+        
+        :param name_arg: str
+          Name of the credential.
+        
+        :returns: :class:`CredentialInfo`
+        
+
+    .. py:method:: list_credentials( [, max_results: Optional[int], page_token: Optional[str], purpose: Optional[CredentialPurpose]]) -> Iterator[CredentialInfo]
+
+        List credentials.
+        
+        Gets an array of credentials (as __CredentialInfo__ objects).
+        
+        The array is limited to only the credentials that the caller has permission to access. If the caller
+        is a metastore admin, retrieval of credentials is unrestricted. There is no guarantee of a specific
+        ordering of the elements in the array.
+        
+        :param max_results: int (optional)
+          Maximum number of credentials to return. - If not set, the default max page size is used. - When set
+          to a value greater than 0, the page length is the minimum of this value and a server-configured
+          value. - When set to 0, the page length is set to a server-configured value (recommended). - When
+          set to a value less than 0, an invalid parameter error is returned.
+        :param page_token: str (optional)
+          Opaque token to retrieve the next page of results.
+        :param purpose: :class:`CredentialPurpose` (optional)
+          Return only credentials for the specified purpose.
+        
+        :returns: Iterator over :class:`CredentialInfo`
+        
+
+    .. py:method:: update_credential(name_arg: str [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], azure_service_principal: Optional[AzureServicePrincipal], comment: Optional[str], databricks_gcp_service_account: Optional[DatabricksGcpServiceAccount], force: Optional[bool], isolation_mode: Optional[IsolationMode], new_name: Optional[str], owner: Optional[str], read_only: Optional[bool], skip_validation: Optional[bool]]) -> CredentialInfo
+
+        Update a credential.
+        
+        Updates a service or storage credential on the metastore.
+        
+        The caller must be the owner of the credential or a metastore admin or have the `MANAGE` permission.
+        If the caller is a metastore admin, only the __owner__ field can be changed.
+        
+        :param name_arg: str
+          Name of the credential.
+        :param aws_iam_role: :class:`AwsIamRole` (optional)
+          The AWS IAM role configuration
+        :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
+          The Azure managed identity configuration.
+        :param azure_service_principal: :class:`AzureServicePrincipal` (optional)
+          The Azure service principal configuration. Only applicable when purpose is **STORAGE**.
+        :param comment: str (optional)
+          Comment associated with the credential.
+        :param databricks_gcp_service_account: :class:`DatabricksGcpServiceAccount` (optional)
+          GCP long-lived credential. Databricks-created Google Cloud Storage service account.
+        :param force: bool (optional)
+          Force an update even if there are dependent services (when purpose is **SERVICE**) or dependent
+          external locations and external tables (when purpose is **STORAGE**).
+        :param isolation_mode: :class:`IsolationMode` (optional)
+          Whether the current securable is accessible from all workspaces or a specific set of workspaces.
+        :param new_name: str (optional)
+          New name of credential.
+        :param owner: str (optional)
+          Username of current owner of credential.
+        :param read_only: bool (optional)
+          Whether the credential is usable only for read operations. Only applicable when purpose is
+          **STORAGE**.
+        :param skip_validation: bool (optional)
+          Supply true to this argument to skip validation of the updated credential.
+        
+        :returns: :class:`CredentialInfo`
+        
+
+    .. py:method:: validate_credential( [, aws_iam_role: Optional[AwsIamRole], azure_managed_identity: Optional[AzureManagedIdentity], credential_name: Optional[str], external_location_name: Optional[str], purpose: Optional[CredentialPurpose], read_only: Optional[bool], url: Optional[str]]) -> ValidateCredentialResponse
+
+        Validate a credential.
+        
+        Validates a credential.
+        
+        For service credentials (purpose is **SERVICE**), either the __credential_name__ or the cloud-specific
+        credential must be provided.
+        
+        For storage credentials (purpose is **STORAGE**), at least one of __external_location_name__ and
+        __url__ need to be provided. If only one of them is provided, it will be used for validation. And if
+        both are provided, the __url__ will be used for validation, and __external_location_name__ will be
+        ignored when checking overlapping urls. Either the __credential_name__ or the cloud-specific
+        credential must be provided.
+        
+        The caller must be a metastore admin or the credential owner or have the required permission on the
+        metastore and the credential (e.g., **CREATE_EXTERNAL_LOCATION** when purpose is **STORAGE**).
+        
+        :param aws_iam_role: :class:`AwsIamRole` (optional)
+          The AWS IAM role configuration
+        :param azure_managed_identity: :class:`AzureManagedIdentity` (optional)
+          The Azure managed identity configuration.
+        :param credential_name: str (optional)
+          Required. The name of an existing credential or long-lived cloud credential to validate.
+        :param external_location_name: str (optional)
+          The name of an existing external location to validate. Only applicable for storage credentials
+          (purpose is **STORAGE**.)
+        :param purpose: :class:`CredentialPurpose` (optional)
+          The purpose of the credential. This should only be used when the credential is specified.
+        :param read_only: bool (optional)
+          Whether the credential is only usable for read operations. Only applicable for storage credentials
+          (purpose is **STORAGE**.)
+        :param url: str (optional)
+          The external location url to validate. Only applicable when purpose is **STORAGE**.
+        
+        :returns: :class:`ValidateCredentialResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/catalog/external_locations.rst b/docs/workspace/catalog/external_locations.rst
index 365007b09..fc60b18f6 100644
--- a/docs/workspace/catalog/external_locations.rst
+++ b/docs/workspace/catalog/external_locations.rst
@@ -221,7 +221,6 @@
         :param force: bool (optional)
           Force update even if changing url invalidates dependent external tables or mounts.
         :param isolation_mode: :class:`IsolationMode` (optional)
-          Whether the current securable is accessible from all workspaces or a specific set of workspaces.
         :param new_name: str (optional)
           New name for the external location.
         :param owner: str (optional)
diff --git a/docs/workspace/catalog/index.rst b/docs/workspace/catalog/index.rst
index 1372ca5a1..471804098 100644
--- a/docs/workspace/catalog/index.rst
+++ b/docs/workspace/catalog/index.rst
@@ -10,6 +10,7 @@ Configure data governance with Unity Catalog for metastores, catalogs, schemas,
    artifact_allowlists
    catalogs
    connections
+   credentials
    external_locations
    functions
    grants
diff --git a/docs/workspace/catalog/online_tables.rst b/docs/workspace/catalog/online_tables.rst
index 164832b0f..d0119657f 100644
--- a/docs/workspace/catalog/online_tables.rst
+++ b/docs/workspace/catalog/online_tables.rst
@@ -6,20 +6,23 @@
 
     Online tables provide lower latency and higher QPS access to data from Delta tables.
 
-    .. py:method:: create( [, name: Optional[str], spec: Optional[OnlineTableSpec]]) -> OnlineTable
+    .. py:method:: create( [, table: Optional[OnlineTable]]) -> Wait[OnlineTable]
 
         Create an Online Table.
         
         Create a new Online Table.
         
-        :param name: str (optional)
-          Full three-part (catalog, schema, table) name of the table.
-        :param spec: :class:`OnlineTableSpec` (optional)
-          Specification of the online table.
+        :param table: :class:`OnlineTable` (optional)
+          Online Table information.
         
-        :returns: :class:`OnlineTable`
+        :returns:
+          Long-running operation waiter for :class:`OnlineTable`.
+          See :method:wait_get_online_table_active for more details.
         
 
+    .. py:method:: create_and_wait( [, table: Optional[OnlineTable], timeout: datetime.timedelta = 0:20:00]) -> OnlineTable
+
+
     .. py:method:: delete(name: str)
 
         Delete an Online Table.
@@ -44,4 +47,6 @@
           Full three-part (catalog, schema, table) name of the table.
         
         :returns: :class:`OnlineTable`
-        
\ No newline at end of file
+        
+
+    .. py:method:: wait_get_online_table_active(name: str, timeout: datetime.timedelta = 0:20:00, callback: Optional[Callable[[OnlineTable], None]]) -> OnlineTable
diff --git a/docs/workspace/catalog/storage_credentials.rst b/docs/workspace/catalog/storage_credentials.rst
index 30b04654c..cac70a944 100644
--- a/docs/workspace/catalog/storage_credentials.rst
+++ b/docs/workspace/catalog/storage_credentials.rst
@@ -193,7 +193,6 @@
         :param force: bool (optional)
           Force update even if there are dependent external locations or external tables.
         :param isolation_mode: :class:`IsolationMode` (optional)
-          Whether the current securable is accessible from all workspaces or a specific set of workspaces.
         :param new_name: str (optional)
           New name for the storage credential.
         :param owner: str (optional)
diff --git a/docs/workspace/catalog/tables.rst b/docs/workspace/catalog/tables.rst
index 4cb458b46..15cfb1cac 100644
--- a/docs/workspace/catalog/tables.rst
+++ b/docs/workspace/catalog/tables.rst
@@ -100,7 +100,7 @@
         :returns: :class:`TableInfo`
         
 
-    .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool], include_manifest_capabilities: Optional[bool], max_results: Optional[int], omit_columns: Optional[bool], omit_properties: Optional[bool], page_token: Optional[str]]) -> Iterator[TableInfo]
+    .. py:method:: list(catalog_name: str, schema_name: str [, include_browse: Optional[bool], include_delta_metadata: Optional[bool], include_manifest_capabilities: Optional[bool], max_results: Optional[int], omit_columns: Optional[bool], omit_properties: Optional[bool], omit_username: Optional[bool], page_token: Optional[str]]) -> Iterator[TableInfo]
 
 
         Usage:
@@ -151,6 +151,9 @@
           Whether to omit the columns of the table from the response or not.
         :param omit_properties: bool (optional)
           Whether to omit the properties of the table from the response or not.
+        :param omit_username: bool (optional)
+          Whether to omit the username of the table (e.g. owner, updated_by, created_by) from the response or
+          not.
         :param page_token: str (optional)
           Opaque token to send for the next page of results (pagination).
         
diff --git a/docs/workspace/cleanrooms/clean_room_assets.rst b/docs/workspace/cleanrooms/clean_room_assets.rst
new file mode 100644
index 000000000..fe282543a
--- /dev/null
+++ b/docs/workspace/cleanrooms/clean_room_assets.rst
@@ -0,0 +1,94 @@
+``w.clean_room_assets``: Assets
+===============================
+.. currentmodule:: databricks.sdk.service.cleanrooms
+
+.. py:class:: CleanRoomAssetsAPI
+
+    Clean room assets are data and code objects — Tables, volumes, and notebooks that are shared with the
+    clean room.
+
+    .. py:method:: create(clean_room_name: str [, asset: Optional[CleanRoomAsset]]) -> CleanRoomAsset
+
+        Create an asset.
+        
+        Create a clean room asset —share an asset like a notebook or table into the clean room. For each UC
+        asset that is added through this method, the clean room owner must also have enough privilege on the
+        asset to consume it. The privilege must be maintained indefinitely for the clean room to be able to
+        access the asset. Typically, you should use a group as the clean room owner.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset: :class:`CleanRoomAsset` (optional)
+          Metadata of the clean room asset
+        
+        :returns: :class:`CleanRoomAsset`
+        
+
+    .. py:method:: delete(clean_room_name: str, asset_type: CleanRoomAssetAssetType, asset_full_name: str)
+
+        Delete an asset.
+        
+        Delete a clean room asset - unshare/remove the asset from the clean room
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset_type: :class:`CleanRoomAssetAssetType`
+          The type of the asset.
+        :param asset_full_name: str
+          The fully qualified name of the asset, it is same as the name field in CleanRoomAsset.
+        
+        
+        
+
+    .. py:method:: get(clean_room_name: str, asset_type: CleanRoomAssetAssetType, asset_full_name: str) -> CleanRoomAsset
+
+        Get an asset.
+        
+        Get the details of a clean room asset by its type and full name.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset_type: :class:`CleanRoomAssetAssetType`
+          The type of the asset.
+        :param asset_full_name: str
+          The fully qualified name of the asset, it is same as the name field in CleanRoomAsset.
+        
+        :returns: :class:`CleanRoomAsset`
+        
+
+    .. py:method:: list(clean_room_name: str [, page_token: Optional[str]]) -> Iterator[CleanRoomAsset]
+
+        List assets.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`CleanRoomAsset`
+        
+
+    .. py:method:: update(clean_room_name: str, asset_type: CleanRoomAssetAssetType, name: str [, asset: Optional[CleanRoomAsset]]) -> CleanRoomAsset
+
+        Update an asset.
+        
+        Update a clean room asset. For example, updating the content of a notebook; changing the shared
+        partitions of a table; etc.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param asset_type: :class:`CleanRoomAssetAssetType`
+          The type of the asset.
+        :param name: str
+          A fully qualified name that uniquely identifies the asset within the clean room. This is also the
+          name displayed in the clean room UI.
+          
+          For UC securable assets (tables, volumes, etc.), the format is
+          *shared_catalog*.*shared_schema*.*asset_name*
+          
+          For notebooks, the name is the notebook file name.
+        :param asset: :class:`CleanRoomAsset` (optional)
+          Metadata of the clean room asset
+        
+        :returns: :class:`CleanRoomAsset`
+        
\ No newline at end of file
diff --git a/docs/workspace/cleanrooms/clean_room_task_runs.rst b/docs/workspace/cleanrooms/clean_room_task_runs.rst
new file mode 100644
index 000000000..dcf59037c
--- /dev/null
+++ b/docs/workspace/cleanrooms/clean_room_task_runs.rst
@@ -0,0 +1,25 @@
+``w.clean_room_task_runs``: Task Runs
+=====================================
+.. currentmodule:: databricks.sdk.service.cleanrooms
+
+.. py:class:: CleanRoomTaskRunsAPI
+
+    Clean room task runs are the executions of notebooks in a clean room.
+
+    .. py:method:: list(clean_room_name: str [, notebook_name: Optional[str], page_size: Optional[int], page_token: Optional[str]]) -> Iterator[CleanRoomNotebookTaskRun]
+
+        List notebook task runs.
+        
+        List all the historical notebook task runs in a clean room.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param notebook_name: str (optional)
+          Notebook name
+        :param page_size: int (optional)
+          The maximum number of task runs to return
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`CleanRoomNotebookTaskRun`
+        
\ No newline at end of file
diff --git a/docs/workspace/cleanrooms/clean_rooms.rst b/docs/workspace/cleanrooms/clean_rooms.rst
new file mode 100644
index 000000000..8ef5d8827
--- /dev/null
+++ b/docs/workspace/cleanrooms/clean_rooms.rst
@@ -0,0 +1,95 @@
+``w.clean_rooms``: Clean Rooms
+==============================
+.. currentmodule:: databricks.sdk.service.cleanrooms
+
+.. py:class:: CleanRoomsAPI
+
+    A clean room uses Delta Sharing and serverless compute to provide a secure and privacy-protecting
+    environment where multiple parties can work together on sensitive enterprise data without direct access to
+    each other’s data.
+
+    .. py:method:: create( [, clean_room: Optional[CleanRoom]]) -> CleanRoom
+
+        Create a clean room.
+        
+        Create a new clean room with the specified collaborators. This method is asynchronous; the returned
+        name field inside the clean_room field can be used to poll the clean room status, using the
+        :method:cleanrooms/get method. When this method returns, the clean room will be in a PROVISIONING
+        state, with only name, owner, comment, created_at and status populated. The clean room will be usable
+        once it enters an ACTIVE state.
+        
+        The caller must be a metastore admin or have the **CREATE_CLEAN_ROOM** privilege on the metastore.
+        
+        :param clean_room: :class:`CleanRoom` (optional)
+        
+        :returns: :class:`CleanRoom`
+        
+
+    .. py:method:: create_output_catalog(clean_room_name: str [, output_catalog: Optional[CleanRoomOutputCatalog]]) -> CreateCleanRoomOutputCatalogResponse
+
+        Create an output catalog.
+        
+        Create the output catalog of the clean room.
+        
+        :param clean_room_name: str
+          Name of the clean room.
+        :param output_catalog: :class:`CleanRoomOutputCatalog` (optional)
+        
+        :returns: :class:`CreateCleanRoomOutputCatalogResponse`
+        
+
+    .. py:method:: delete(name: str)
+
+        Delete a clean room.
+        
+        Delete a clean room. After deletion, the clean room will be removed from the metastore. If the other
+        collaborators have not deleted the clean room, they will still have the clean room in their metastore,
+        but it will be in a DELETED state and no operations other than deletion can be performed on it.
+        
+        :param name: str
+          Name of the clean room.
+        
+        
+        
+
+    .. py:method:: get(name: str) -> CleanRoom
+
+        Get a clean room.
+        
+        Get the details of a clean room given its name.
+        
+        :param name: str
+        
+        :returns: :class:`CleanRoom`
+        
+
+    .. py:method:: list( [, page_size: Optional[int], page_token: Optional[str]]) -> Iterator[CleanRoom]
+
+        List clean rooms.
+        
+        Get a list of all clean rooms of the metastore. Only clean rooms the caller has access to are
+        returned.
+        
+        :param page_size: int (optional)
+          Maximum number of clean rooms to return (i.e., the page length). Defaults to 100.
+        :param page_token: str (optional)
+          Opaque pagination token to go to next page based on previous query.
+        
+        :returns: Iterator over :class:`CleanRoom`
+        
+
+    .. py:method:: update(name: str [, clean_room: Optional[CleanRoom]]) -> CleanRoom
+
+        Update a clean room.
+        
+        Update a clean room. The caller must be the owner of the clean room, have **MODIFY_CLEAN_ROOM**
+        privilege, or be metastore admin.
+        
+        When the caller is a metastore admin, only the __owner__ field can be updated.
+        
+        :param name: str
+          Name of the clean room.
+        :param clean_room: :class:`CleanRoom` (optional)
+        
+        :returns: :class:`CleanRoom`
+        
\ No newline at end of file
diff --git a/docs/workspace/cleanrooms/index.rst b/docs/workspace/cleanrooms/index.rst
new file mode 100644
index 000000000..a979ac201
--- /dev/null
+++ b/docs/workspace/cleanrooms/index.rst
@@ -0,0 +1,12 @@
+
+Clean Rooms
+===========
+
+Manage clean rooms and their assets and task runs
+
+.. toctree::
+   :maxdepth: 1
+
+   clean_room_assets
+   clean_room_task_runs
+   clean_rooms
\ No newline at end of file
diff --git a/docs/workspace/compute/cluster_policies.rst b/docs/workspace/compute/cluster_policies.rst
index 1cefc8ca6..65066964c 100644
--- a/docs/workspace/compute/cluster_policies.rst
+++ b/docs/workspace/compute/cluster_policies.rst
@@ -267,7 +267,8 @@
 
         Set cluster policy permissions.
         
-        Sets permissions on a cluster policy. Cluster policies can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param cluster_policy_id: str
           The cluster policy for which to get or manage permissions.
diff --git a/docs/workspace/compute/clusters.rst b/docs/workspace/compute/clusters.rst
index ac52edecb..4e97857eb 100644
--- a/docs/workspace/compute/clusters.rst
+++ b/docs/workspace/compute/clusters.rst
@@ -71,7 +71,7 @@
         
         
 
-    .. py:method:: create(spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], clone_from: Optional[CloneCluster], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], workload_type: Optional[WorkloadType]]) -> Wait[ClusterDetails]
+    .. py:method:: create(spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], clone_from: Optional[CloneCluster], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType]]) -> Wait[ClusterDetails]
 
 
         Usage:
@@ -134,11 +134,11 @@
         :param clone_from: :class:`CloneCluster` (optional)
           When specified, this clones libraries from a source cluster during the creation of a new cluster.
         :param cluster_log_conf: :class:`ClusterLogConf` (optional)
-          The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-          destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. If
-          the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of
-          driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is
-          `$destination/$clusterId/executor`.
+          The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+          destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+          specified for one cluster. If the conf is given, the logs will be delivered to the destination every
+          `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination
+          of executor logs is `$destination/$clusterId/executor`.
         :param cluster_name: str (optional)
           Cluster name requested by the user. This doesn't have to be unique. If not specified at creation,
           the cluster name will be an empty string.
@@ -152,13 +152,19 @@
         :param data_security_mode: :class:`DataSecurityMode` (optional)
           Data security mode decides what data governance model to use when accessing data from a cluster.
           
-          * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are
-          not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a
-          single user specified in `single_user_name`. Most programming languages, cluster features and data
-          governance features are available in this mode. * `USER_ISOLATION`: A secure cluster that can be
-          shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data
-          and credentials. Most data governance features are supported in this mode. But programming languages
-          and cluster features might be limited.
+          The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+          choose the most appropriate access mode depending on your compute configuration. *
+          `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias
+          for `SINGLE_USER`.
+          
+          The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple
+          users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`:
+          A secure cluster that can only be exclusively used by a single user specified in `single_user_name`.
+          Most programming languages, cluster features and data governance features are available in this
+          mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are
+          fully isolated so that they cannot see each other's data and credentials. Most data governance
+          features are supported in this mode. But programming languages and cluster features might be
+          limited.
           
           The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
           future Databricks Runtime versions:
@@ -190,6 +196,17 @@
           logs are sent to `//init_scripts`.
         :param instance_pool_id: str (optional)
           The optional ID of the instance pool to which the cluster belongs.
+        :param is_single_node: bool (optional)
+          This field can only be used with `kind`.
+          
+          When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`,
+          and `num_workers`
+        :param kind: :class:`Kind` (optional)
+          The kind of compute described by this compute specification.
+          
+          Depending on `kind`, different validations and default values will be applied.
+          
+          The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
         :param node_type_id: str (optional)
           This field encodes, through a single value, the resources available to each of the Spark nodes in
           this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
@@ -236,6 +253,11 @@
           SSH public key contents that will be added to each Spark node in this cluster. The corresponding
           private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be
           specified.
+        :param use_ml_runtime: bool (optional)
+          This field can only be used with `kind`.
+          
+          `effective_spark_version` is determined by `spark_version` (DBR release), this field
+          `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
         :param workload_type: :class:`WorkloadType` (optional)
         
         :returns:
@@ -243,7 +265,7 @@
           See :method:wait_get_cluster_running for more details.
         
 
-    .. py:method:: create_and_wait(spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], clone_from: Optional[CloneCluster], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], workload_type: Optional[WorkloadType], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails
+    .. py:method:: create_and_wait(spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], clone_from: Optional[CloneCluster], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails
 
 
     .. py:method:: delete(cluster_id: str) -> Wait[ClusterDetails]
@@ -292,7 +314,7 @@
     .. py:method:: delete_and_wait(cluster_id: str, timeout: datetime.timedelta = 0:20:00) -> ClusterDetails
 
 
-    .. py:method:: edit(cluster_id: str, spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], workload_type: Optional[WorkloadType]]) -> Wait[ClusterDetails]
+    .. py:method:: edit(cluster_id: str, spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType]]) -> Wait[ClusterDetails]
 
 
         Usage:
@@ -341,7 +363,7 @@
         Clusters created by the Databricks Jobs service cannot be edited.
         
         :param cluster_id: str
-          ID of the cluser
+          ID of the cluster
         :param spark_version: str
           The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be
           retrieved by using the :method:clusters/sparkVersions API call.
@@ -362,11 +384,11 @@
           Attributes related to clusters running on Microsoft Azure. If not specified at cluster creation, a
           set of default values will be used.
         :param cluster_log_conf: :class:`ClusterLogConf` (optional)
-          The configuration for delivering spark logs to a long-term storage destination. Two kinds of
-          destinations (dbfs and s3) are supported. Only one destination can be specified for one cluster. If
-          the conf is given, the logs will be delivered to the destination every `5 mins`. The destination of
-          driver logs is `$destination/$clusterId/driver`, while the destination of executor logs is
-          `$destination/$clusterId/executor`.
+          The configuration for delivering spark logs to a long-term storage destination. Three kinds of
+          destinations (DBFS, S3 and Unity Catalog volumes) are supported. Only one destination can be
+          specified for one cluster. If the conf is given, the logs will be delivered to the destination every
+          `5 mins`. The destination of driver logs is `$destination/$clusterId/driver`, while the destination
+          of executor logs is `$destination/$clusterId/executor`.
         :param cluster_name: str (optional)
           Cluster name requested by the user. This doesn't have to be unique. If not specified at creation,
           the cluster name will be an empty string.
@@ -380,13 +402,19 @@
         :param data_security_mode: :class:`DataSecurityMode` (optional)
           Data security mode decides what data governance model to use when accessing data from a cluster.
           
-          * `NONE`: No security isolation for multiple users sharing the cluster. Data governance features are
-          not available in this mode. * `SINGLE_USER`: A secure cluster that can only be exclusively used by a
-          single user specified in `single_user_name`. Most programming languages, cluster features and data
-          governance features are available in this mode. * `USER_ISOLATION`: A secure cluster that can be
-          shared by multiple users. Cluster users are fully isolated so that they cannot see each other's data
-          and credentials. Most data governance features are supported in this mode. But programming languages
-          and cluster features might be limited.
+          The following modes can only be used with `kind`. * `DATA_SECURITY_MODE_AUTO`: Databricks will
+          choose the most appropriate access mode depending on your compute configuration. *
+          `DATA_SECURITY_MODE_STANDARD`: Alias for `USER_ISOLATION`. * `DATA_SECURITY_MODE_DEDICATED`: Alias
+          for `SINGLE_USER`.
+          
+          The following modes can be used regardless of `kind`. * `NONE`: No security isolation for multiple
+          users sharing the cluster. Data governance features are not available in this mode. * `SINGLE_USER`:
+          A secure cluster that can only be exclusively used by a single user specified in `single_user_name`.
+          Most programming languages, cluster features and data governance features are available in this
+          mode. * `USER_ISOLATION`: A secure cluster that can be shared by multiple users. Cluster users are
+          fully isolated so that they cannot see each other's data and credentials. Most data governance
+          features are supported in this mode. But programming languages and cluster features might be
+          limited.
           
           The following modes are deprecated starting with Databricks Runtime 15.0 and will be removed for
           future Databricks Runtime versions:
@@ -418,6 +446,17 @@
           logs are sent to `//init_scripts`.
         :param instance_pool_id: str (optional)
           The optional ID of the instance pool to which the cluster belongs.
+        :param is_single_node: bool (optional)
+          This field can only be used with `kind`.
+          
+          When set to true, Databricks will automatically set single node related `custom_tags`, `spark_conf`,
+          and `num_workers`
+        :param kind: :class:`Kind` (optional)
+          The kind of compute described by this compute specification.
+          
+          Depending on `kind`, different validations and default values will be applied.
+          
+          The first usage of this value is for the simple cluster form where it sets `kind = CLASSIC_PREVIEW`.
         :param node_type_id: str (optional)
           This field encodes, through a single value, the resources available to each of the Spark nodes in
           this cluster. For example, the Spark nodes can be provisioned and optimized for memory or compute
@@ -464,6 +503,11 @@
           SSH public key contents that will be added to each Spark node in this cluster. The corresponding
           private keys can be used to login with the user name `ubuntu` on port `2200`. Up to 10 keys can be
           specified.
+        :param use_ml_runtime: bool (optional)
+          This field can only be used with `kind`.
+          
+          `effective_spark_version` is determined by `spark_version` (DBR release), this field
+          `use_ml_runtime`, and whether `node_type_id` is gpu node or not.
         :param workload_type: :class:`WorkloadType` (optional)
         
         :returns:
@@ -471,7 +515,7 @@
           See :method:wait_get_cluster_running for more details.
         
 
-    .. py:method:: edit_and_wait(cluster_id: str, spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], workload_type: Optional[WorkloadType], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails
+    .. py:method:: edit_and_wait(cluster_id: str, spark_version: str [, apply_policy_default_values: Optional[bool], autoscale: Optional[AutoScale], autotermination_minutes: Optional[int], aws_attributes: Optional[AwsAttributes], azure_attributes: Optional[AzureAttributes], cluster_log_conf: Optional[ClusterLogConf], cluster_name: Optional[str], custom_tags: Optional[Dict[str, str]], data_security_mode: Optional[DataSecurityMode], docker_image: Optional[DockerImage], driver_instance_pool_id: Optional[str], driver_node_type_id: Optional[str], enable_elastic_disk: Optional[bool], enable_local_disk_encryption: Optional[bool], gcp_attributes: Optional[GcpAttributes], init_scripts: Optional[List[InitScriptInfo]], instance_pool_id: Optional[str], is_single_node: Optional[bool], kind: Optional[Kind], node_type_id: Optional[str], num_workers: Optional[int], policy_id: Optional[str], runtime_engine: Optional[RuntimeEngine], single_user_name: Optional[str], spark_conf: Optional[Dict[str, str]], spark_env_vars: Optional[Dict[str, str]], ssh_public_keys: Optional[List[str]], use_ml_runtime: Optional[bool], workload_type: Optional[WorkloadType], timeout: datetime.timedelta = 0:20:00]) -> ClusterDetails
 
 
     .. py:method:: ensure_cluster_is_running(cluster_id: str)
@@ -906,7 +950,8 @@
 
         Set cluster permissions.
         
-        Sets permissions on a cluster. Clusters can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param cluster_id: str
           The cluster for which to get or manage permissions.
diff --git a/docs/workspace/compute/instance_pools.rst b/docs/workspace/compute/instance_pools.rst
index 277844170..333c44938 100644
--- a/docs/workspace/compute/instance_pools.rst
+++ b/docs/workspace/compute/instance_pools.rst
@@ -245,7 +245,8 @@
 
         Set instance pool permissions.
         
-        Sets permissions on an instance pool. Instance pools can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param instance_pool_id: str
           The instance pool for which to get or manage permissions.
diff --git a/docs/workspace/dashboards/genie.rst b/docs/workspace/dashboards/genie.rst
index 5581870b9..3908c6472 100644
--- a/docs/workspace/dashboards/genie.rst
+++ b/docs/workspace/dashboards/genie.rst
@@ -80,6 +80,25 @@
         :returns: :class:`GenieGetMessageQueryResultResponse`
         
 
+    .. py:method:: get_message_query_result_by_attachment(space_id: str, conversation_id: str, message_id: str, attachment_id: str) -> GenieGetMessageQueryResultResponse
+
+        Get conversation message SQL query result by attachment id.
+        
+        Get the result of SQL query by attachment id This is only available if a message has a query
+        attachment and the message status is `EXECUTING_QUERY`.
+        
+        :param space_id: str
+          Genie space ID
+        :param conversation_id: str
+          Conversation ID
+        :param message_id: str
+          Message ID
+        :param attachment_id: str
+          Attachment ID
+        
+        :returns: :class:`GenieGetMessageQueryResultResponse`
+        
+
     .. py:method:: start_conversation(space_id: str, content: str) -> Wait[GenieMessage]
 
         Start conversation.
diff --git a/docs/workspace/dashboards/index.rst b/docs/workspace/dashboards/index.rst
index 6d1565bb6..acea442bb 100644
--- a/docs/workspace/dashboards/index.rst
+++ b/docs/workspace/dashboards/index.rst
@@ -8,4 +8,6 @@ Manage Lakeview dashboards
    :maxdepth: 1
 
    genie
-   lakeview
\ No newline at end of file
+   lakeview
+   lakeview_embedded
+   query_execution
diff --git a/docs/workspace/dashboards/lakeview.rst b/docs/workspace/dashboards/lakeview.rst
index fe358063c..c37479dcb 100644
--- a/docs/workspace/dashboards/lakeview.rst
+++ b/docs/workspace/dashboards/lakeview.rst
@@ -7,47 +7,29 @@
     These APIs provide specific management operations for Lakeview dashboards. Generic resource management can
     be done with Workspace API (import, export, get-status, list, delete).
 
-    .. py:method:: create(display_name: str [, parent_path: Optional[str], serialized_dashboard: Optional[str], warehouse_id: Optional[str]]) -> Dashboard
+    .. py:method:: create( [, dashboard: Optional[Dashboard]]) -> Dashboard
 
         Create dashboard.
         
         Create a draft dashboard.
         
-        :param display_name: str
-          The display name of the dashboard.
-        :param parent_path: str (optional)
-          The workspace path of the folder containing the dashboard. Includes leading slash and no trailing
-          slash. This field is excluded in List Dashboards responses.
-        :param serialized_dashboard: str (optional)
-          The contents of the dashboard in serialized string form. This field is excluded in List Dashboards
-          responses. Use the [get dashboard API] to retrieve an example response, which includes the
-          `serialized_dashboard` field. This field provides the structure of the JSON string that represents
-          the dashboard's layout and components.
-          
-          [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get
-        :param warehouse_id: str (optional)
-          The warehouse ID used to run the dashboard.
+        :param dashboard: :class:`Dashboard` (optional)
         
         :returns: :class:`Dashboard`
         
 
-    .. py:method:: create_schedule(dashboard_id: str, cron_schedule: CronSchedule [, display_name: Optional[str], pause_status: Optional[SchedulePauseStatus]]) -> Schedule
+    .. py:method:: create_schedule(dashboard_id: str [, schedule: Optional[Schedule]]) -> Schedule
 
         Create dashboard schedule.
         
         :param dashboard_id: str
           UUID identifying the dashboard to which the schedule belongs.
-        :param cron_schedule: :class:`CronSchedule`
-          The cron expression describing the frequency of the periodic refresh for this schedule.
-        :param display_name: str (optional)
-          The display name for schedule.
-        :param pause_status: :class:`SchedulePauseStatus` (optional)
-          The status indicates whether this schedule is paused or not.
+        :param schedule: :class:`Schedule` (optional)
         
         :returns: :class:`Schedule`
         
 
-    .. py:method:: create_subscription(dashboard_id: str, schedule_id: str, subscriber: Subscriber) -> Subscription
+    .. py:method:: create_subscription(dashboard_id: str, schedule_id: str [, subscription: Optional[Subscription]]) -> Subscription
 
         Create schedule subscription.
         
@@ -55,8 +37,7 @@
           UUID identifying the dashboard to which the subscription belongs.
         :param schedule_id: str
           UUID identifying the schedule to which the subscription belongs.
-        :param subscriber: :class:`Subscriber`
-          Subscriber details for users and destinations to be added as subscribers to the schedule.
+        :param subscription: :class:`Subscription` (optional)
         
         :returns: :class:`Subscription`
         
@@ -112,7 +93,7 @@
         Get the current published dashboard.
         
         :param dashboard_id: str
-          UUID identifying the dashboard to be published.
+          UUID identifying the published dashboard.
         
         :returns: :class:`PublishedDashboard`
         
@@ -166,7 +147,7 @@
         List dashboard schedules.
         
         :param dashboard_id: str
-          UUID identifying the dashboard to which the schedule belongs.
+          UUID identifying the dashboard to which the schedules belongs.
         :param page_size: int (optional)
           The number of schedules to return per page.
         :param page_token: str (optional)
@@ -181,9 +162,9 @@
         List schedule subscriptions.
         
         :param dashboard_id: str
-          UUID identifying the dashboard to which the subscription belongs.
+          UUID identifying the dashboard which the subscriptions belongs.
         :param schedule_id: str
-          UUID identifying the schedule to which the subscription belongs.
+          UUID identifying the schedule which the subscriptions belongs.
         :param page_size: int (optional)
           The number of subscriptions to return per page.
         :param page_token: str (optional)
@@ -193,7 +174,7 @@
         :returns: Iterator over :class:`Subscription`
         
 
-    .. py:method:: migrate(source_dashboard_id: str [, display_name: Optional[str], parent_path: Optional[str]]) -> Dashboard
+    .. py:method:: migrate(source_dashboard_id: str [, display_name: Optional[str], parent_path: Optional[str], update_parameter_syntax: Optional[bool]]) -> Dashboard
 
         Migrate dashboard.
         
@@ -205,6 +186,9 @@
           Display name for the new Lakeview dashboard.
         :param parent_path: str (optional)
           The workspace path of the folder to contain the migrated Lakeview dashboard.
+        :param update_parameter_syntax: bool (optional)
+          Flag to indicate if mustache parameter syntax ({{ param }}) should be auto-updated to named syntax
+          (:param) when converting datasets in the dashboard.
         
         :returns: :class:`Dashboard`
         
@@ -245,12 +229,12 @@
         Unpublish the dashboard.
         
         :param dashboard_id: str
-          UUID identifying the dashboard to be published.
+          UUID identifying the published dashboard.
         
         
         
 
-    .. py:method:: update(dashboard_id: str [, display_name: Optional[str], etag: Optional[str], serialized_dashboard: Optional[str], warehouse_id: Optional[str]]) -> Dashboard
+    .. py:method:: update(dashboard_id: str [, dashboard: Optional[Dashboard]]) -> Dashboard
 
         Update dashboard.
         
@@ -258,25 +242,12 @@
         
         :param dashboard_id: str
           UUID identifying the dashboard.
-        :param display_name: str (optional)
-          The display name of the dashboard.
-        :param etag: str (optional)
-          The etag for the dashboard. Can be optionally provided on updates to ensure that the dashboard has
-          not been modified since the last read. This field is excluded in List Dashboards responses.
-        :param serialized_dashboard: str (optional)
-          The contents of the dashboard in serialized string form. This field is excluded in List Dashboards
-          responses. Use the [get dashboard API] to retrieve an example response, which includes the
-          `serialized_dashboard` field. This field provides the structure of the JSON string that represents
-          the dashboard's layout and components.
-          
-          [get dashboard API]: https://docs.databricks.com/api/workspace/lakeview/get
-        :param warehouse_id: str (optional)
-          The warehouse ID used to run the dashboard.
+        :param dashboard: :class:`Dashboard` (optional)
         
         :returns: :class:`Dashboard`
         
 
-    .. py:method:: update_schedule(dashboard_id: str, schedule_id: str, cron_schedule: CronSchedule [, display_name: Optional[str], etag: Optional[str], pause_status: Optional[SchedulePauseStatus]]) -> Schedule
+    .. py:method:: update_schedule(dashboard_id: str, schedule_id: str [, schedule: Optional[Schedule]]) -> Schedule
 
         Update dashboard schedule.
         
@@ -284,15 +255,7 @@
           UUID identifying the dashboard to which the schedule belongs.
         :param schedule_id: str
           UUID identifying the schedule.
-        :param cron_schedule: :class:`CronSchedule`
-          The cron expression describing the frequency of the periodic refresh for this schedule.
-        :param display_name: str (optional)
-          The display name for schedule.
-        :param etag: str (optional)
-          The etag for the schedule. Must be left empty on create, must be provided on updates to ensure that
-          the schedule has not been modified since the last read, and can be optionally provided on delete.
-        :param pause_status: :class:`SchedulePauseStatus` (optional)
-          The status indicates whether this schedule is paused or not.
+        :param schedule: :class:`Schedule` (optional)
         
         :returns: :class:`Schedule`
         
\ No newline at end of file
diff --git a/docs/workspace/dashboards/lakeview_embedded.rst b/docs/workspace/dashboards/lakeview_embedded.rst
new file mode 100644
index 000000000..4c06031f5
--- /dev/null
+++ b/docs/workspace/dashboards/lakeview_embedded.rst
@@ -0,0 +1,19 @@
+``w.lakeview_embedded``: Lakeview Embedded
+==========================================
+.. currentmodule:: databricks.sdk.service.dashboards
+
+.. py:class:: LakeviewEmbeddedAPI
+
+    Token-based Lakeview APIs for embedding dashboards in external applications.
+
+    .. py:method:: get_published_dashboard_embedded(dashboard_id: str)
+
+        Read a published dashboard in an embedded ui.
+        
+        Get the current published dashboard within an embedded context.
+        
+        :param dashboard_id: str
+          UUID identifying the published dashboard.
+        
+        
+        
\ No newline at end of file
diff --git a/docs/workspace/dashboards/query_execution.rst b/docs/workspace/dashboards/query_execution.rst
new file mode 100644
index 000000000..5672183d9
--- /dev/null
+++ b/docs/workspace/dashboards/query_execution.rst
@@ -0,0 +1,46 @@
+``w.query_execution``: Query Execution
+======================================
+.. currentmodule:: databricks.sdk.service.dashboards
+
+.. py:class:: QueryExecutionAPI
+
+    Query execution APIs for AI / BI Dashboards
+
+    .. py:method:: cancel_published_query_execution(dashboard_name: str, dashboard_revision_id: str [, tokens: Optional[List[str]]]) -> CancelQueryExecutionResponse
+
+        Cancel the results for the a query for a published, embedded dashboard.
+        
+        :param dashboard_name: str
+        :param dashboard_revision_id: str
+        :param tokens: List[str] (optional)
+          Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+        
+        :returns: :class:`CancelQueryExecutionResponse`
+        
+
+    .. py:method:: execute_published_dashboard_query(dashboard_name: str, dashboard_revision_id: str [, override_warehouse_id: Optional[str]])
+
+        Execute a query for a published dashboard.
+        
+        :param dashboard_name: str
+          Dashboard name and revision_id is required to retrieve PublishedDatasetDataModel which contains the
+          list of datasets, warehouse_id, and embedded_credentials
+        :param dashboard_revision_id: str
+        :param override_warehouse_id: str (optional)
+          A dashboard schedule can override the warehouse used as compute for processing the published
+          dashboard queries
+        
+        
+        
+
+    .. py:method:: poll_published_query_status(dashboard_name: str, dashboard_revision_id: str [, tokens: Optional[List[str]]]) -> PollQueryStatusResponse
+
+        Poll the results for the a query for a published, embedded dashboard.
+        
+        :param dashboard_name: str
+        :param dashboard_revision_id: str
+        :param tokens: List[str] (optional)
+          Example: EC0A..ChAB7WCEn_4Qo4vkLqEbXsxxEgh3Y2pbWw45WhoQXgZSQo9aS5q2ZvFcbvbx9CgA-PAEAQ
+        
+        :returns: :class:`PollQueryStatusResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/files/files.rst b/docs/workspace/files/files.rst
index db20b2192..0151fcce2 100644
--- a/docs/workspace/files/files.rst
+++ b/docs/workspace/files/files.rst
@@ -13,9 +13,12 @@
     /Volumes/<catalog_name>/<schema_name>/<volume_name>/<path_to_file>.
     
     The Files API has two distinct endpoints, one for working with files (`/fs/files`) and another one for
-    working with directories (`/fs/directories`). Both endpoints, use the standard HTTP methods GET, HEAD,
-    PUT, and DELETE to manage files and directories specified using their URI path. The path is always
-    absolute.
+    working with directories (`/fs/directories`). Both endpoints use the standard HTTP methods GET, HEAD, PUT,
+    and DELETE to manage files and directories specified using their URI path. The path is always absolute.
+    
+    Some Files API client features are currently experimental. To enable them, set
+    `enable_experimental_files_api_client = True` in your configuration profile or use the environment
+    variable `DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT=True`.
     
     [Unity Catalog volumes]: https://docs.databricks.com/en/connect/unity-catalog/volumes.html
 
@@ -64,8 +67,8 @@
 
         Download a file.
         
-        Downloads a file of up to 5 GiB. The file contents are the response body. This is a standard HTTP file
-        download, not a JSON RPC.
+        Downloads a file. The file contents are the response body. This is a standard HTTP file download, not
+        a JSON RPC. It supports the Range and If-Unmodified-Since HTTP headers.
         
         :param file_path: str
           The absolute path of the file.
diff --git a/docs/workspace/iam/access_control.rst b/docs/workspace/iam/access_control.rst
new file mode 100644
index 000000000..a5f1feeda
--- /dev/null
+++ b/docs/workspace/iam/access_control.rst
@@ -0,0 +1,23 @@
+``w.access_control``: RbacService
+=================================
+.. currentmodule:: databricks.sdk.service.iam
+
+.. py:class:: AccessControlAPI
+
+    Rule based Access Control for Databricks Resources.
+
+    .. py:method:: check_policy(actor: Actor, permission: str, resource: str, consistency_token: ConsistencyToken, authz_identity: RequestAuthzIdentity [, resource_info: Optional[ResourceInfo]]) -> CheckPolicyResponse
+
+        Check access policy to a resource.
+        
+        :param actor: :class:`Actor`
+        :param permission: str
+        :param resource: str
+          Ex: (servicePrincipal/use, accounts//servicePrincipals/) Ex:
+          (servicePrincipal.ruleSet/update, accounts//servicePrincipals//ruleSets/default)
+        :param consistency_token: :class:`ConsistencyToken`
+        :param authz_identity: :class:`RequestAuthzIdentity`
+        :param resource_info: :class:`ResourceInfo` (optional)
+        
+        :returns: :class:`CheckPolicyResponse`
+        
\ No newline at end of file
diff --git a/docs/workspace/iam/index.rst b/docs/workspace/iam/index.rst
index 2a98cc9ae..00a7f1fe7 100644
--- a/docs/workspace/iam/index.rst
+++ b/docs/workspace/iam/index.rst
@@ -7,6 +7,7 @@ Manage users, service principals, groups and their permissions in Accounts and W
 .. toctree::
    :maxdepth: 1
 
+   access_control
    account_access_control_proxy
    current_user
    groups
diff --git a/docs/workspace/iam/permissions.rst b/docs/workspace/iam/permissions.rst
index 1f2fd2851..bf8f8e77f 100644
--- a/docs/workspace/iam/permissions.rst
+++ b/docs/workspace/iam/permissions.rst
@@ -153,7 +153,8 @@
 
         Set object permissions.
         
-        Sets permissions on an object. Objects can inherit permissions from their parent objects or root
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their parent objects or root
         object.
         
         :param request_object_type: str
diff --git a/docs/workspace/iam/users.rst b/docs/workspace/iam/users.rst
index 2eae834a2..616ef7b86 100644
--- a/docs/workspace/iam/users.rst
+++ b/docs/workspace/iam/users.rst
@@ -239,7 +239,8 @@
 
         Set password permissions.
         
-        Sets permissions on all passwords. Passwords can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param access_control_list: List[:class:`PasswordAccessControlRequest`] (optional)
         
diff --git a/docs/workspace/index.rst b/docs/workspace/index.rst
index 1b6c5708c..dc86a0e78 100644
--- a/docs/workspace/index.rst
+++ b/docs/workspace/index.rst
@@ -9,6 +9,7 @@ These APIs are available from WorkspaceClient
 
    apps/index
    catalog/index
+   cleanrooms/index
    compute/index
    dashboards/index
    files/index
diff --git a/docs/workspace/jobs/jobs.rst b/docs/workspace/jobs/jobs.rst
index b097c94c8..36f7d7d39 100644
--- a/docs/workspace/jobs/jobs.rst
+++ b/docs/workspace/jobs/jobs.rst
@@ -1,8 +1,8 @@
-``w.jobs``: Jobs
-================
+``w.jobs``: Jobs (2.2)
+======================
 .. currentmodule:: databricks.sdk.service.jobs
 
-.. py:class:: JobsAPI
+.. py:class:: JobsExt
 
     The Jobs API allows you to create, edit, and delete jobs.
     
@@ -120,7 +120,7 @@
     .. py:method:: cancel_run_and_wait(run_id: int, timeout: datetime.timedelta = 0:20:00) -> Run
 
 
-    .. py:method:: create( [, access_control_list: Optional[List[JobAccessControlRequest]], budget_policy_id: Optional[str], continuous: Optional[Continuous], deployment: Optional[JobDeployment], description: Optional[str], edit_mode: Optional[JobEditMode], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], format: Optional[Format], git_source: Optional[GitSource], health: Optional[JobsHealthRules], job_clusters: Optional[List[JobCluster]], max_concurrent_runs: Optional[int], name: Optional[str], notification_settings: Optional[JobNotificationSettings], parameters: Optional[List[JobParameterDefinition]], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], schedule: Optional[CronSchedule], tags: Optional[Dict[str, str]], tasks: Optional[List[Task]], timeout_seconds: Optional[int], trigger: Optional[TriggerSettings], webhook_notifications: Optional[WebhookNotifications]]) -> CreateResponse
+    .. py:method:: create( [, access_control_list: Optional[List[JobAccessControlRequest]], budget_policy_id: Optional[str], continuous: Optional[Continuous], deployment: Optional[JobDeployment], description: Optional[str], edit_mode: Optional[JobEditMode], email_notifications: Optional[JobEmailNotifications], environments: Optional[List[JobEnvironment]], format: Optional[Format], git_source: Optional[GitSource], health: Optional[JobsHealthRules], job_clusters: Optional[List[JobCluster]], max_concurrent_runs: Optional[int], name: Optional[str], notification_settings: Optional[JobNotificationSettings], parameters: Optional[List[JobParameterDefinition]], performance_target: Optional[PerformanceTarget], queue: Optional[QueueSettings], run_as: Optional[JobRunAs], schedule: Optional[CronSchedule], tags: Optional[Dict[str, str]], tasks: Optional[List[Task]], timeout_seconds: Optional[int], trigger: Optional[TriggerSettings], webhook_notifications: Optional[WebhookNotifications]]) -> CreateResponse
 
 
         Usage:
@@ -199,6 +199,7 @@
         :param job_clusters: List[:class:`JobCluster`] (optional)
           A list of job cluster specifications that can be shared and reused by tasks of this job. Libraries
           cannot be declared in a shared job cluster. You must declare dependent libraries in task settings.
+          If more than 100 job clusters are available, you can paginate through them using :method:jobs/get.
         :param max_concurrent_runs: int (optional)
           An optional maximum allowed number of concurrent runs of the job. Set this value if you want to be
           able to execute multiple runs of the same job concurrently. This is useful for example if you
@@ -215,14 +216,16 @@
           `email_notifications` and `webhook_notifications` for this job.
         :param parameters: List[:class:`JobParameterDefinition`] (optional)
           Job-level parameter definitions
+        :param performance_target: :class:`PerformanceTarget` (optional)
+          PerformanceTarget defines how performant or cost efficient the execution of run on serverless should
+          be.
         :param queue: :class:`QueueSettings` (optional)
           The queue settings of the job.
         :param run_as: :class:`JobRunAs` (optional)
-          Write-only setting. Specifies the user, service principal or group that the job/pipeline runs as. If
-          not specified, the job/pipeline runs as the user who created the job/pipeline.
+          Write-only setting. Specifies the user or service principal that the job runs as. If not specified,
+          the job runs as the user who created the job.
           
-          Exactly one of `user_name`, `service_principal_name`, `group_name` should be specified. If not, an
-          error is thrown.
+          Either `user_name` or `service_principal_name` should be specified. If not, an error is thrown.
         :param schedule: :class:`CronSchedule` (optional)
           An optional periodic schedule for this job. The default behavior is that the job only runs when
           triggered by clicking “Run Now” in the Jobs UI or sending an API request to `runNow`.
@@ -231,7 +234,9 @@
           clusters, and are subject to the same limitations as cluster tags. A maximum of 25 tags can be added
           to the job.
         :param tasks: List[:class:`Task`] (optional)
-          A list of task specifications to be executed by this job.
+          A list of task specifications to be executed by this job. If more than 100 tasks are available, you
+          can paginate through them using :method:jobs/get. Use the `next_page_token` field at the object root
+          to determine if more results are available.
         :param timeout_seconds: int (optional)
           An optional timeout applied to each run of this job. A value of `0` means no timeout.
         :param trigger: :class:`TriggerSettings` (optional)
@@ -316,7 +321,7 @@
         :returns: :class:`ExportRunOutput`
         
 
-    .. py:method:: get(job_id: int) -> Job
+    .. py:method:: get(job_id: int [, page_token: Optional[str]]) -> Job
 
 
         Usage:
@@ -352,8 +357,16 @@
         
         Retrieves the details for a single job.
         
+        In Jobs API 2.2, requests for a single job support pagination of `tasks` and `job_clusters` when
+        either exceeds 100 elements. Use the `next_page_token` field to check for more results and pass its
+        value as the `page_token` in subsequent requests. Arrays with fewer than 100 elements in a page will
+        be empty on later pages.
+        
         :param job_id: int
           The canonical identifier of the job to retrieve information about. This field is required.
+        :param page_token: str (optional)
+          Use `next_page_token` returned from the previous GetJob to request the next page of the job's
+          sub-resources.
         
         :returns: :class:`Job`
         
@@ -382,7 +395,7 @@
         :returns: :class:`JobPermissions`
         
 
-    .. py:method:: get_run(run_id: int [, include_history: Optional[bool], include_resolved_values: Optional[bool], page_token: Optional[str]]) -> Run
+    .. py:method:: get_run(run_id: int [, include_history: bool, include_resolved_values: bool, page_token: str]) -> Run
 
 
         Usage:
@@ -415,9 +428,9 @@
             w.jobs.delete_run(run_id=run.run_id)
 
         Get a single job run.
-        
-        Retrieve the metadata of a run.
-        
+
+        Retrieve the metadata of a run. If a run has multiple pages of tasks, it will paginate through all pages of tasks, iterations, job_clusters, job_parameters, and repair history.
+
         :param run_id: int
           The canonical identifier of the run for which to retrieve the metadata. This field is required.
         :param include_history: bool (optional)
@@ -425,9 +438,9 @@
         :param include_resolved_values: bool (optional)
           Whether to include resolved parameter values in the response.
         :param page_token: str (optional)
-          To list the next page or the previous page of job tasks, set this field to the value of the
-          `next_page_token` or `prev_page_token` returned in the GetJob response.
-        
+          To list the next page of job tasks, set this field to the value of the `next_page_token` returned in
+          the GetJob response.
+
         :returns: :class:`Run`
         
 
@@ -519,7 +532,8 @@
         Retrieves a list of jobs.
         
         :param expand_tasks: bool (optional)
-          Whether to include task and cluster details in the response.
+          Whether to include task and cluster details in the response. Note that in API 2.2, only the first
+          100 elements will be shown. Use :method:jobs/get to paginate through all tasks and clusters.
         :param limit: int (optional)
           The number of jobs to return. This value must be greater than 0 and less or equal to 100. The
           default value is 20.
@@ -581,7 +595,8 @@
           If completed_only is `true`, only completed runs are included in the results; otherwise, lists both
           active and completed runs. This field cannot be `true` when active_only is `true`.
         :param expand_tasks: bool (optional)
-          Whether to include task and cluster details in the response.
+          Whether to include task and cluster details in the response. Note that in API 2.2, only the first
+          100 elements will be shown. Use :method:jobs/getrun to paginate through all tasks and clusters.
         :param job_id: int (optional)
           The job for which to list runs. If omitted, the Jobs service lists runs from all jobs.
         :param limit: int (optional)
@@ -661,8 +676,9 @@
           in conjunction with notebook_params. The JSON representation of this field (for example
           `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
           
-          Use [Task parameter variables](/jobs.html"#parameter-variables") to set parameters containing
-          information about job runs.
+          Use [Task parameter variables] to set parameters containing information about job runs.
+          
+          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
         :param job_parameters: Dict[str,str] (optional)
           Job-level parameters used in the run. for example `"param": "overriding_val"`
         :param latest_repair_id: int (optional)
@@ -791,7 +807,7 @@
         
         
 
-    .. py:method:: run_now(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]]]) -> Wait[Run]
+    .. py:method:: run_now(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], only: Optional[List[str]], performance_target: Optional[PerformanceTarget], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]]]) -> Wait[Run]
 
 
         Usage:
@@ -854,8 +870,9 @@
           in conjunction with notebook_params. The JSON representation of this field (for example
           `{"jar_params":["john doe","35"]}`) cannot exceed 10,000 bytes.
           
-          Use [Task parameter variables](/jobs.html"#parameter-variables") to set parameters containing
-          information about job runs.
+          Use [Task parameter variables] to set parameters containing information about job runs.
+          
+          [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
         :param job_parameters: Dict[str,str] (optional)
           Job-level parameters used in the run. for example `"param": "overriding_val"`
         :param notebook_params: Dict[str,str] (optional)
@@ -874,6 +891,13 @@
           
           [Task parameter variables]: https://docs.databricks.com/jobs.html#parameter-variables
           [dbutils.widgets.get]: https://docs.databricks.com/dev-tools/databricks-utils.html
+        :param only: List[str] (optional)
+          A list of task keys to run inside of the job. If this field is not provided, all tasks in the job
+          will be run.
+        :param performance_target: :class:`PerformanceTarget` (optional)
+          PerformanceTarget defines how performant or cost efficient the execution of run on serverless
+          compute should be. For RunNow request, the run will execute with this settings instead of ones
+          defined in job.
         :param pipeline_params: :class:`PipelineParams` (optional)
           Controls whether the pipeline should perform a full refresh
         :param python_named_params: Dict[str,str] (optional)
@@ -919,14 +943,15 @@
           See :method:wait_get_run_job_terminated_or_skipped for more details.
         
 
-    .. py:method:: run_now_and_wait(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]], timeout: datetime.timedelta = 0:20:00]) -> Run
+    .. py:method:: run_now_and_wait(job_id: int [, dbt_commands: Optional[List[str]], idempotency_token: Optional[str], jar_params: Optional[List[str]], job_parameters: Optional[Dict[str, str]], notebook_params: Optional[Dict[str, str]], only: Optional[List[str]], performance_target: Optional[PerformanceTarget], pipeline_params: Optional[PipelineParams], python_named_params: Optional[Dict[str, str]], python_params: Optional[List[str]], queue: Optional[QueueSettings], spark_submit_params: Optional[List[str]], sql_params: Optional[Dict[str, str]], timeout: datetime.timedelta = 0:20:00]) -> Run
 
 
     .. py:method:: set_permissions(job_id: str [, access_control_list: Optional[List[JobAccessControlRequest]]]) -> JobPermissions
 
         Set job permissions.
         
-        Sets permissions on a job. Jobs can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param job_id: str
           The job for which to get or manage permissions.
diff --git a/docs/workspace/ml/experiments.rst b/docs/workspace/ml/experiments.rst
index c09cfe353..44ceeef8c 100644
--- a/docs/workspace/ml/experiments.rst
+++ b/docs/workspace/ml/experiments.rst
@@ -578,7 +578,8 @@
 
         Set experiment permissions.
         
-        Sets permissions on an experiment. Experiments can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param experiment_id: str
           The experiment for which to get or manage permissions.
diff --git a/docs/workspace/ml/model_registry.rst b/docs/workspace/ml/model_registry.rst
index 8ac52916f..d08a85415 100644
--- a/docs/workspace/ml/model_registry.rst
+++ b/docs/workspace/ml/model_registry.rst
@@ -658,8 +658,8 @@
 
         Set registered model permissions.
         
-        Sets permissions on a registered model. Registered models can inherit permissions from their root
-        object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param registered_model_id: str
           The registered model for which to get or manage permissions.
diff --git a/docs/workspace/pipelines/pipelines.rst b/docs/workspace/pipelines/pipelines.rst
index 9801a200e..ec31991ef 100644
--- a/docs/workspace/pipelines/pipelines.rst
+++ b/docs/workspace/pipelines/pipelines.rst
@@ -15,7 +15,7 @@
     also enforce data quality with Delta Live Tables expectations. Expectations allow you to define expected
     data quality and specify how to handle records that fail those expectations.
 
-    .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse
+    .. py:method:: create( [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], dry_run: Optional[bool], edition: Optional[str], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]]) -> CreatePipelineResponse
 
 
         Usage:
@@ -79,7 +79,7 @@
         :param filters: :class:`Filters` (optional)
           Filters on which Pipeline packages to include in the deployed graph.
         :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
-          The definition of a gateway pipeline to support CDC.
+          The definition of a gateway pipeline to support change data capture.
         :param id: str (optional)
           Unique identifier for this pipeline.
         :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
@@ -93,6 +93,14 @@
           List of notification settings for this pipeline.
         :param photon: bool (optional)
           Whether Photon is enabled for this pipeline.
+        :param restart_window: :class:`RestartWindow` (optional)
+          Restart window of this pipeline.
+        :param run_as: :class:`RunAs` (optional)
+          Write-only setting, available only in Create/Update calls. Specifies the user or service principal
+          that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
+          
+          Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
+          thrown.
         :param schema: str (optional)
           The default schema (database) where tables are read from or published to. The presence of this field
           implies that the pipeline is in direct publishing mode.
@@ -324,7 +332,8 @@
 
         Set pipeline permissions.
         
-        Sets permissions on a pipeline. Pipelines can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param pipeline_id: str
           The pipeline for which to get or manage permissions.
@@ -376,7 +385,7 @@
     .. py:method:: stop_and_wait(pipeline_id: str, timeout: datetime.timedelta = 0:20:00) -> GetPipelineResponse
 
 
-    .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]])
+    .. py:method:: update(pipeline_id: str [, allow_duplicate_names: Optional[bool], budget_policy_id: Optional[str], catalog: Optional[str], channel: Optional[str], clusters: Optional[List[PipelineCluster]], configuration: Optional[Dict[str, str]], continuous: Optional[bool], deployment: Optional[PipelineDeployment], development: Optional[bool], edition: Optional[str], expected_last_modified: Optional[int], filters: Optional[Filters], gateway_definition: Optional[IngestionGatewayPipelineDefinition], id: Optional[str], ingestion_definition: Optional[IngestionPipelineDefinition], libraries: Optional[List[PipelineLibrary]], name: Optional[str], notifications: Optional[List[Notifications]], photon: Optional[bool], restart_window: Optional[RestartWindow], run_as: Optional[RunAs], schema: Optional[str], serverless: Optional[bool], storage: Optional[str], target: Optional[str], trigger: Optional[PipelineTrigger]])
 
 
         Usage:
@@ -456,7 +465,7 @@
         :param filters: :class:`Filters` (optional)
           Filters on which Pipeline packages to include in the deployed graph.
         :param gateway_definition: :class:`IngestionGatewayPipelineDefinition` (optional)
-          The definition of a gateway pipeline to support CDC.
+          The definition of a gateway pipeline to support change data capture.
         :param id: str (optional)
           Unique identifier for this pipeline.
         :param ingestion_definition: :class:`IngestionPipelineDefinition` (optional)
@@ -470,6 +479,14 @@
           List of notification settings for this pipeline.
         :param photon: bool (optional)
           Whether Photon is enabled for this pipeline.
+        :param restart_window: :class:`RestartWindow` (optional)
+          Restart window of this pipeline.
+        :param run_as: :class:`RunAs` (optional)
+          Write-only setting, available only in Create/Update calls. Specifies the user or service principal
+          that the pipeline runs as. If not specified, the pipeline runs as the user who created the pipeline.
+          
+          Only `user_name` or `service_principal_name` can be specified. If both are specified, an error is
+          thrown.
         :param schema: str (optional)
           The default schema (database) where tables are read from or published to. The presence of this field
           implies that the pipeline is in direct publishing mode.
diff --git a/docs/workspace/provisioning/credentials.rst b/docs/workspace/provisioning/credentials.rst
new file mode 100644
index 000000000..8f38d13c4
--- /dev/null
+++ b/docs/workspace/provisioning/credentials.rst
@@ -0,0 +1,123 @@
+``w.credentials``: Credential configurations
+============================================
+.. currentmodule:: databricks.sdk.service.provisioning
+
+.. py:class:: CredentialsAPI
+
+    These APIs manage credential configurations for this workspace. Databricks needs access to a cross-account
+    service IAM role in your AWS account so that Databricks can deploy clusters in the appropriate VPC for the
+    new workspace. A credential configuration encapsulates this role information, and its ID is used when
+    creating a new workspace.
+
+    .. py:method:: create(credentials_name: str, aws_credentials: CreateCredentialAwsCredentials) -> Credential
+
+
+        Usage:
+
+        .. code-block::
+
+            import os
+            import time
+            
+            from databricks.sdk import AccountClient
+            from databricks.sdk.service import provisioning
+            
+            a = AccountClient()
+            
+            role = a.credentials.create(
+                credentials_name=f'sdk-{time.time_ns()}',
+                aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
+                    role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])))
+            
+            # cleanup
+            a.credentials.delete(credentials_id=role.credentials_id)
+
+        Create credential configuration.
+        
+        Creates a Databricks credential configuration that represents cloud cross-account credentials for a
+        specified account. Databricks uses this to set up network infrastructure properly to host Databricks
+        clusters. For your AWS IAM role, you need to trust the External ID (the Databricks Account API account
+        ID) in the returned credential object, and configure the required access policy.
+        
+        Save the response's `credentials_id` field, which is the ID for your new credential configuration
+        object.
+        
+        For information about how to create a new workspace with this API, see [Create a new workspace using
+        the Account API]
+        
+        [Create a new workspace using the Account API]: http://docs.databricks.com/administration-guide/account-api/new-workspace.html
+        
+        :param credentials_name: str
+          The human-readable name of the credential configuration object.
+        :param aws_credentials: :class:`CreateCredentialAwsCredentials`
+        
+        :returns: :class:`Credential`
+        
+
+    .. py:method:: delete(credentials_id: str)
+
+        Delete credential configuration.
+        
+        Deletes a Databricks credential configuration object for an account, both specified by ID. You cannot
+        delete a credential that is associated with any workspace.
+        
+        :param credentials_id: str
+          Databricks Account API credential configuration ID
+        
+        
+        
+
+    .. py:method:: get(credentials_id: str) -> Credential
+
+
+        Usage:
+
+        .. code-block::
+
+            import os
+            import time
+            
+            from databricks.sdk import AccountClient
+            from databricks.sdk.service import provisioning
+            
+            a = AccountClient()
+            
+            role = a.credentials.create(
+                credentials_name=f'sdk-{time.time_ns()}',
+                aws_credentials=provisioning.CreateCredentialAwsCredentials(sts_role=provisioning.CreateCredentialStsRole(
+                    role_arn=os.environ["TEST_CROSSACCOUNT_ARN"])))
+            
+            by_id = a.credentials.get(credentials_id=role.credentials_id)
+            
+            # cleanup
+            a.credentials.delete(credentials_id=role.credentials_id)
+
+        Get credential configuration.
+        
+        Gets a Databricks credential configuration object for an account, both specified by ID.
+        
+        :param credentials_id: str
+          Databricks Account API credential configuration ID
+        
+        :returns: :class:`Credential`
+        
+
+    .. py:method:: list() -> Iterator[Credential]
+
+
+        Usage:
+
+        .. code-block::
+
+            from databricks.sdk import AccountClient
+            
+            a = AccountClient()
+            
+            configs = a.credentials.list()
+
+        Get all credential configurations.
+        
+        Gets all Databricks credential configurations associated with an account specified by ID.
+        
+        :returns: Iterator over :class:`Credential`
+        
\ No newline at end of file
diff --git a/docs/workspace/provisioning/index.rst b/docs/workspace/provisioning/index.rst
new file mode 100644
index 000000000..efe541424
--- /dev/null
+++ b/docs/workspace/provisioning/index.rst
@@ -0,0 +1,10 @@
+
+Provisioning
+============
+
+Resource management for secure Databricks Workspace deployment, cross-account IAM roles, storage, encryption, networking and private access.
+
+.. toctree::
+   :maxdepth: 1
+
+   credentials
\ No newline at end of file
diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst
index cbcbca964..f6bfe82f4 100644
--- a/docs/workspace/serving/serving_endpoints.rst
+++ b/docs/workspace/serving/serving_endpoints.rst
@@ -29,14 +29,17 @@
         :returns: :class:`BuildLogsResponse`
         
 
-    .. py:method:: create(name: str, config: EndpointCoreConfigInput [, ai_gateway: Optional[AiGatewayConfig], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]]]) -> Wait[ServingEndpointDetailed]
+    .. py:method:: create(name: str [, ai_gateway: Optional[AiGatewayConfig], config: Optional[EndpointCoreConfigInput], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]]]) -> Wait[ServingEndpointDetailed]
 
         Create a new serving endpoint.
         
         :param name: str
           The name of the serving endpoint. This field is required and must be unique across a Databricks
           workspace. An endpoint name can consist of alphanumeric characters, dashes, and underscores.
-        :param config: :class:`EndpointCoreConfigInput`
+        :param ai_gateway: :class:`AiGatewayConfig` (optional)
+          The AI Gateway configuration for the serving endpoint. NOTE: Only external model and provisioned
+          throughput endpoints are currently supported.
+        :param config: :class:`EndpointCoreConfigInput` (optional)
           The core config of the serving endpoint.
         :param ai_gateway: :class:`AiGatewayConfig` (optional)
           The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
@@ -54,7 +57,7 @@
           See :method:wait_get_serving_endpoint_not_updating for more details.
         
 
-    .. py:method:: create_and_wait(name: str, config: EndpointCoreConfigInput [, ai_gateway: Optional[AiGatewayConfig], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]], timeout: datetime.timedelta = 0:20:00]) -> ServingEndpointDetailed
+    .. py:method:: create_and_wait(name: str [, ai_gateway: Optional[AiGatewayConfig], config: Optional[EndpointCoreConfigInput], rate_limits: Optional[List[RateLimit]], route_optimized: Optional[bool], tags: Optional[List[EndpointTag]], timeout: datetime.timedelta = 0:20:00]) -> ServingEndpointDetailed
 
 
     .. py:method:: delete(name: str)
@@ -62,7 +65,6 @@
         Delete a serving endpoint.
         
         :param name: str
-          The name of the serving endpoint. This field is required.
         
         
         
@@ -98,7 +100,7 @@
     .. py:method:: get_open_ai_client()
 
 
-    .. py:method:: get_open_api(name: str)
+    .. py:method:: get_open_api(name: str) -> GetOpenApiResponse
 
         Get the schema for a serving endpoint.
         
@@ -108,7 +110,7 @@
         :param name: str
           The name of the serving endpoint that the served model belongs to. This field is required.
         
-        
+        :returns: :class:`GetOpenApiResponse`
         
 
     .. py:method:: get_permission_levels(serving_endpoint_id: str) -> GetServingEndpointPermissionLevelsResponse
@@ -136,6 +138,26 @@
         :returns: :class:`ServingEndpointPermissions`
         
 
+    .. py:method:: http_request(conn: str, method: ExternalFunctionRequestHttpMethod, path: str [, headers: typing.Dict[str, str], json: typing.Dict[str, str], params: typing.Dict[str, str]]) -> Response
+
+        Make external services call using the credentials stored in UC Connection.
+        **NOTE:** Experimental: This API may change or be removed in a future release without warning.
+        :param conn: str
+          The connection name to use. This is required to identify the external connection.
+        :param method: :class:`ExternalFunctionRequestHttpMethod`
+          The HTTP method to use (e.g., 'GET', 'POST'). This is required.
+        :param path: str
+          The relative path for the API endpoint. This is required.
+        :param headers: Dict[str,str] (optional)
+          Additional headers for the request. If not provided, only auth headers from connections would be
+          passed.
+        :param json: Dict[str,str] (optional)
+          JSON payload for the request.
+        :param params: Dict[str,str] (optional)
+          Query parameters for the request.
+        :returns: :class:`Response`
+        
+
     .. py:method:: list() -> Iterator[ServingEndpoint]
 
         Get all serving endpoints.
@@ -157,7 +179,7 @@
         :returns: :class:`ServerLogsResponse`
         
 
-    .. py:method:: patch(name: str [, add_tags: Optional[List[EndpointTag]], delete_tags: Optional[List[str]]]) -> Iterator[EndpointTag]
+    .. py:method:: patch(name: str [, add_tags: Optional[List[EndpointTag]], delete_tags: Optional[List[str]]]) -> EndpointTags
 
         Update tags of a serving endpoint.
         
@@ -170,7 +192,7 @@
         :param delete_tags: List[str] (optional)
           List of tag keys to delete
         
-        :returns: Iterator over :class:`EndpointTag`
+        :returns: :class:`EndpointTags`
         
 
     .. py:method:: put(name: str [, rate_limits: Optional[List[RateLimit]]]) -> PutResponse
@@ -192,8 +214,8 @@
 
         Update AI Gateway of a serving endpoint.
         
-        Used to update the AI Gateway of a serving endpoint. NOTE: Only external model endpoints are currently
-        supported.
+        Used to update the AI Gateway of a serving endpoint. NOTE: Only external model and provisioned
+        throughput endpoints are currently supported.
         
         :param name: str
           The name of the serving endpoint whose AI Gateway is being updated. This field is required.
@@ -266,8 +288,8 @@
 
         Set serving endpoint permissions.
         
-        Sets permissions on a serving endpoint. Serving endpoints can inherit permissions from their root
-        object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param serving_endpoint_id: str
           The serving endpoint for which to get or manage permissions.
@@ -288,14 +310,16 @@
           The name of the serving endpoint to update. This field is required.
         :param auto_capture_config: :class:`AutoCaptureConfigInput` (optional)
           Configuration for Inference Tables which automatically logs requests and responses to Unity Catalog.
+          Note: this field is deprecated for creating new provisioned throughput endpoints, or updating
+          existing provisioned throughput endpoints that never have inference table configured; in these cases
+          please use AI Gateway to manage inference tables.
         :param served_entities: List[:class:`ServedEntityInput`] (optional)
-          A list of served entities for the endpoint to serve. A serving endpoint can have up to 15 served
-          entities.
+          The list of served entities under the serving endpoint config.
         :param served_models: List[:class:`ServedModelInput`] (optional)
-          (Deprecated, use served_entities instead) A list of served models for the endpoint to serve. A
-          serving endpoint can have up to 15 served models.
+          (Deprecated, use served_entities instead) The list of served models under the serving endpoint
+          config.
         :param traffic_config: :class:`TrafficConfig` (optional)
-          The traffic config defining how invocations to the serving endpoint should be routed.
+          The traffic configuration associated with the serving endpoint config.
         
         :returns:
           Long-running operation waiter for :class:`ServingEndpointDetailed`.
diff --git a/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst
new file mode 100644
index 000000000..66c621997
--- /dev/null
+++ b/docs/workspace/settings/aibi_dashboard_embedding_access_policy.rst
@@ -0,0 +1,64 @@
+``w.settings.aibi_dashboard_embedding_access_policy``: AI/BI Dashboard Embedding Access Policy
+==============================================================================================
+.. currentmodule:: databricks.sdk.service.settings
+
+.. py:class:: AibiDashboardEmbeddingAccessPolicyAPI
+
+    Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the
+    workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS).
+
+    .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAibiDashboardEmbeddingAccessPolicySettingResponse
+
+        Delete the AI/BI dashboard embedding access policy.
+        
+        Delete the AI/BI dashboard embedding access policy, reverting back to the default.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteAibiDashboardEmbeddingAccessPolicySettingResponse`
+        
+
+    .. py:method:: get( [, etag: Optional[str]]) -> AibiDashboardEmbeddingAccessPolicySetting
+
+        Retrieve the AI/BI dashboard embedding access policy.
+        
+        Retrieves the AI/BI dashboard embedding access policy. The default setting is ALLOW_APPROVED_DOMAINS,
+        permitting AI/BI dashboards to be embedded on approved domains.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
+        
+
+    .. py:method:: update(allow_missing: bool, setting: AibiDashboardEmbeddingAccessPolicySetting, field_mask: str) -> AibiDashboardEmbeddingAccessPolicySetting
+
+        Update the AI/BI dashboard embedding access policy.
+        
+        Updates the AI/BI dashboard embedding access policy at the workspace level.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`AibiDashboardEmbeddingAccessPolicySetting`
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`AibiDashboardEmbeddingAccessPolicySetting`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst
new file mode 100644
index 000000000..0c9294130
--- /dev/null
+++ b/docs/workspace/settings/aibi_dashboard_embedding_approved_domains.rst
@@ -0,0 +1,65 @@
+``w.settings.aibi_dashboard_embedding_approved_domains``: AI/BI Dashboard Embedding Approved Domains
+====================================================================================================
+.. currentmodule:: databricks.sdk.service.settings
+
+.. py:class:: AibiDashboardEmbeddingApprovedDomainsAPI
+
+    Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains list
+    can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS.
+
+    .. py:method:: delete( [, etag: Optional[str]]) -> DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse
+
+        Delete AI/BI dashboard embedding approved domains.
+        
+        Delete the list of domains approved to host embedded AI/BI dashboards, reverting back to the default
+        empty list.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`DeleteAibiDashboardEmbeddingApprovedDomainsSettingResponse`
+        
+
+    .. py:method:: get( [, etag: Optional[str]]) -> AibiDashboardEmbeddingApprovedDomainsSetting
+
+        Retrieve the list of domains approved to host embedded AI/BI dashboards.
+        
+        Retrieves the list of domains approved to host embedded AI/BI dashboards.
+        
+        :param etag: str (optional)
+          etag used for versioning. The response is at least as fresh as the eTag provided. This is used for
+          optimistic concurrency control as a way to help prevent simultaneous writes of a setting overwriting
+          each other. It is strongly suggested that systems make use of the etag in the read -> delete pattern
+          to perform setting deletions in order to avoid race conditions. That is, get an etag from a GET
+          request, and pass it with the DELETE request to identify the rule set version you are deleting.
+        
+        :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
+        
+
+    .. py:method:: update(allow_missing: bool, setting: AibiDashboardEmbeddingApprovedDomainsSetting, field_mask: str) -> AibiDashboardEmbeddingApprovedDomainsSetting
+
+        Update the list of domains approved to host embedded AI/BI dashboards.
+        
+        Updates the list of domains approved to host embedded AI/BI dashboards. This update will fail if the
+        current workspace access policy is not ALLOW_APPROVED_DOMAINS.
+        
+        :param allow_missing: bool
+          This should always be set to true for Settings API. Added for AIP compliance.
+        :param setting: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
+        :param field_mask: str
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
+        
+        :returns: :class:`AibiDashboardEmbeddingApprovedDomainsSetting`
+        
\ No newline at end of file
diff --git a/docs/workspace/settings/automatic_cluster_update.rst b/docs/workspace/settings/automatic_cluster_update.rst
index 2219e1130..350e0e713 100644
--- a/docs/workspace/settings/automatic_cluster_update.rst
+++ b/docs/workspace/settings/automatic_cluster_update.rst
@@ -36,9 +36,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`AutomaticClusterUpdateSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`AutomaticClusterUpdateSetting`
         
\ No newline at end of file
diff --git a/docs/workspace/settings/compliance_security_profile.rst b/docs/workspace/settings/compliance_security_profile.rst
index f503830bc..855451b82 100644
--- a/docs/workspace/settings/compliance_security_profile.rst
+++ b/docs/workspace/settings/compliance_security_profile.rst
@@ -38,9 +38,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`ComplianceSecurityProfileSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`ComplianceSecurityProfileSetting`
         
\ No newline at end of file
diff --git a/docs/workspace/settings/default_namespace.rst b/docs/workspace/settings/default_namespace.rst
index 061a0e34e..960949930 100644
--- a/docs/workspace/settings/default_namespace.rst
+++ b/docs/workspace/settings/default_namespace.rst
@@ -72,9 +72,15 @@
           restart of clusters and SQL warehouses to take effect. Additionally, the default namespace only
           applies when using Unity Catalog-enabled compute.
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`DefaultNamespaceSetting`
         
\ No newline at end of file
diff --git a/docs/workspace/settings/disable_legacy_access.rst b/docs/workspace/settings/disable_legacy_access.rst
index c8baba3a7..a015e777f 100644
--- a/docs/workspace/settings/disable_legacy_access.rst
+++ b/docs/workspace/settings/disable_legacy_access.rst
@@ -53,9 +53,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`DisableLegacyAccess`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`DisableLegacyAccess`
         
\ No newline at end of file
diff --git a/docs/workspace/settings/disable_legacy_dbfs.rst b/docs/workspace/settings/disable_legacy_dbfs.rst
index ad11fa606..502111fe4 100644
--- a/docs/workspace/settings/disable_legacy_dbfs.rst
+++ b/docs/workspace/settings/disable_legacy_dbfs.rst
@@ -49,9 +49,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`DisableLegacyDbfs`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`DisableLegacyDbfs`
         
\ No newline at end of file
diff --git a/docs/workspace/settings/enhanced_security_monitoring.rst b/docs/workspace/settings/enhanced_security_monitoring.rst
index fe7668973..c9dfb547d 100644
--- a/docs/workspace/settings/enhanced_security_monitoring.rst
+++ b/docs/workspace/settings/enhanced_security_monitoring.rst
@@ -40,9 +40,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`EnhancedSecurityMonitoringSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`EnhancedSecurityMonitoringSetting`
         
\ No newline at end of file
diff --git a/docs/workspace/settings/index.rst b/docs/workspace/settings/index.rst
index 22655853b..c9e4f335d 100644
--- a/docs/workspace/settings/index.rst
+++ b/docs/workspace/settings/index.rst
@@ -11,6 +11,8 @@ Manage security settings for Accounts and Workspaces
    ip_access_lists
    notification_destinations
    settings
+   aibi_dashboard_embedding_access_policy
+   aibi_dashboard_embedding_approved_domains
    automatic_cluster_update
    compliance_security_profile
    default_namespace
diff --git a/docs/workspace/settings/notification_destinations.rst b/docs/workspace/settings/notification_destinations.rst
index 29d947f55..8fb2d0c3c 100644
--- a/docs/workspace/settings/notification_destinations.rst
+++ b/docs/workspace/settings/notification_destinations.rst
@@ -65,6 +65,7 @@
         required in the request body.
         
         :param id: str
+          UUID identifying notification destination.
         :param config: :class:`Config` (optional)
           The configuration for the notification destination. Must wrap EXACTLY one of the nested configs.
         :param display_name: str (optional)
diff --git a/docs/workspace/settings/restrict_workspace_admins.rst b/docs/workspace/settings/restrict_workspace_admins.rst
index 47660fda4..b025112cc 100644
--- a/docs/workspace/settings/restrict_workspace_admins.rst
+++ b/docs/workspace/settings/restrict_workspace_admins.rst
@@ -62,9 +62,15 @@
           This should always be set to true for Settings API. Added for AIP compliance.
         :param setting: :class:`RestrictWorkspaceAdminsSetting`
         :param field_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         
         :returns: :class:`RestrictWorkspaceAdminsSetting`
         
\ No newline at end of file
diff --git a/docs/workspace/settings/settings.rst b/docs/workspace/settings/settings.rst
index 588031926..aa806280e 100644
--- a/docs/workspace/settings/settings.rst
+++ b/docs/workspace/settings/settings.rst
@@ -6,6 +6,18 @@
 
     Workspace Settings API allows users to manage settings at the workspace level.
 
+    .. py:property:: aibi_dashboard_embedding_access_policy
+        :type: AibiDashboardEmbeddingAccessPolicyAPI
+
+        Controls whether AI/BI published dashboard embedding is enabled, conditionally enabled, or disabled at the
+        workspace level. By default, this setting is conditionally enabled (ALLOW_APPROVED_DOMAINS).
+
+    .. py:property:: aibi_dashboard_embedding_approved_domains
+        :type: AibiDashboardEmbeddingApprovedDomainsAPI
+
+        Controls the list of domains approved to host the embedded AI/BI dashboards. The approved domains list
+        can't be mutated when the current access policy is not set to ALLOW_APPROVED_DOMAINS.
+
     .. py:property:: automatic_cluster_update
         :type: AutomaticClusterUpdateAPI
 
diff --git a/docs/workspace/settings/token_management.rst b/docs/workspace/settings/token_management.rst
index d030a432f..50dbe1328 100644
--- a/docs/workspace/settings/token_management.rst
+++ b/docs/workspace/settings/token_management.rst
@@ -53,7 +53,7 @@
         Deletes a token, specified by its ID.
         
         :param token_id: str
-          The ID of the token to get.
+          The ID of the token to revoke.
         
         
         
@@ -143,7 +143,8 @@
 
         Set token permissions.
         
-        Sets permissions on all tokens. Tokens can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param access_control_list: List[:class:`TokenAccessControlRequest`] (optional)
         
diff --git a/docs/workspace/sharing/index.rst b/docs/workspace/sharing/index.rst
index e012eb548..09452b490 100644
--- a/docs/workspace/sharing/index.rst
+++ b/docs/workspace/sharing/index.rst
@@ -7,7 +7,6 @@ Configure data sharing with Unity Catalog for providers, recipients, and shares
 .. toctree::
    :maxdepth: 1
 
-   clean_rooms
    providers
    recipient_activation
    recipients
diff --git a/docs/workspace/sharing/providers.rst b/docs/workspace/sharing/providers.rst
index 7cf398ac0..7d27acc3d 100644
--- a/docs/workspace/sharing/providers.rst
+++ b/docs/workspace/sharing/providers.rst
@@ -44,7 +44,8 @@
         :param comment: str (optional)
           Description about the provider.
         :param recipient_profile_str: str (optional)
-          This field is required when the __authentication_type__ is **TOKEN** or not provided.
+          This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS**
+          or not provided.
         
         :returns: :class:`ProviderInfo`
         
@@ -228,7 +229,8 @@
         :param owner: str (optional)
           Username of Provider owner.
         :param recipient_profile_str: str (optional)
-          This field is required when the __authentication_type__ is **TOKEN** or not provided.
+          This field is required when the __authentication_type__ is **TOKEN**, **OAUTH_CLIENT_CREDENTIALS**
+          or not provided.
         
         :returns: :class:`ProviderInfo`
         
\ No newline at end of file
diff --git a/docs/workspace/sharing/recipients.rst b/docs/workspace/sharing/recipients.rst
index 44f2042bb..76e1da171 100644
--- a/docs/workspace/sharing/recipients.rst
+++ b/docs/workspace/sharing/recipients.rst
@@ -39,7 +39,7 @@
         Create a share recipient.
         
         Creates a new recipient with the delta sharing authentication type in the metastore. The caller must
-        be a metastore admin or has the **CREATE_RECIPIENT** privilege on the metastore.
+        be a metastore admin or have the **CREATE_RECIPIENT** privilege on the metastore.
         
         :param name: str
           Name of Recipient.
@@ -48,8 +48,8 @@
         :param comment: str (optional)
           Description about the recipient.
         :param data_recipient_global_metastore_id: str (optional)
-          The global Unity Catalog metastore id provided by the data recipient. This field is required when
-          the __authentication_type__ is **DATABRICKS**. The identifier is of format
+          The global Unity Catalog metastore id provided by the data recipient. This field is only present
+          when the __authentication_type__ is **DATABRICKS**. The identifier is of format
           __cloud__:__region__:__metastore-uuid__.
         :param expiration_time: int (optional)
           Expiration timestamp of the token, in epoch milliseconds.
@@ -58,9 +58,11 @@
         :param owner: str (optional)
           Username of the recipient owner.
         :param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional)
-          Recipient properties as map of string key-value pairs.
+          Recipient properties as map of string key-value pairs. When provided in update request, the
+          specified properties will override the existing properties. To add and remove properties, one would
+          need to perform a read-modify-write.
         :param sharing_code: str (optional)
-          The one-time sharing code provided by the data recipient. This field is required when the
+          The one-time sharing code provided by the data recipient. This field is only present when the
           __authentication_type__ is **DATABRICKS**.
         
         :returns: :class:`RecipientInfo`
@@ -174,7 +176,7 @@
         The caller must be the owner of the recipient.
         
         :param name: str
-          The name of the recipient.
+          The name of the Recipient.
         :param existing_token_expire_in_seconds: int
           The expiration time of the bearer token in ISO 8601 format. This will set the expiration_time of
           existing token only to a smaller timestamp, it cannot extend the expiration_time. Use 0 to expire
@@ -224,7 +226,7 @@
         :returns: :class:`GetRecipientSharePermissionsResponse`
         
 
-    .. py:method:: update(name: str [, comment: Optional[str], expiration_time: Optional[int], ip_access_list: Optional[IpAccessList], new_name: Optional[str], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs]])
+    .. py:method:: update(name: str [, comment: Optional[str], expiration_time: Optional[int], ip_access_list: Optional[IpAccessList], new_name: Optional[str], owner: Optional[str], properties_kvpairs: Optional[SecurablePropertiesKvPairs]]) -> RecipientInfo
 
 
         Usage:
@@ -259,7 +261,7 @@
         :param ip_access_list: :class:`IpAccessList` (optional)
           IP Access List
         :param new_name: str (optional)
-          New name for the recipient.
+          New name for the recipient. .
         :param owner: str (optional)
           Username of the recipient owner.
         :param properties_kvpairs: :class:`SecurablePropertiesKvPairs` (optional)
@@ -267,5 +269,5 @@
           specified properties will override the existing properties. To add and remove properties, one would
           need to perform a read-modify-write.
         
-        
+        :returns: :class:`RecipientInfo`
         
\ No newline at end of file
diff --git a/docs/workspace/sql/alerts.rst b/docs/workspace/sql/alerts.rst
index c552d5f80..c8d9c31ab 100644
--- a/docs/workspace/sql/alerts.rst
+++ b/docs/workspace/sql/alerts.rst
@@ -182,9 +182,15 @@
         
         :param id: str
         :param update_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         :param alert: :class:`UpdateAlertRequestAlert` (optional)
         
         :returns: :class:`Alert`
diff --git a/docs/workspace/sql/dashboards.rst b/docs/workspace/sql/dashboards.rst
index 97ea1014d..f22c7c96b 100644
--- a/docs/workspace/sql/dashboards.rst
+++ b/docs/workspace/sql/dashboards.rst
@@ -1,5 +1,5 @@
-``w.dashboards``: Dashboards
-============================
+``w.dashboards``: Dashboards (legacy)
+=====================================
 .. currentmodule:: databricks.sdk.service.sql
 
 .. py:class:: DashboardsAPI
diff --git a/docs/workspace/sql/index.rst b/docs/workspace/sql/index.rst
index 728730209..bddb6a827 100644
--- a/docs/workspace/sql/index.rst
+++ b/docs/workspace/sql/index.rst
@@ -18,5 +18,6 @@ Manage Databricks SQL assets, including warehouses, dashboards, queries and quer
    query_history
    query_visualizations
    query_visualizations_legacy
+   redash_config
    statement_execution
    warehouses
\ No newline at end of file
diff --git a/docs/workspace/sql/queries.rst b/docs/workspace/sql/queries.rst
index 1f01c2f1d..959552850 100644
--- a/docs/workspace/sql/queries.rst
+++ b/docs/workspace/sql/queries.rst
@@ -151,9 +151,15 @@
         
         :param id: str
         :param update_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         :param query: :class:`UpdateQueryRequestQuery` (optional)
         
         :returns: :class:`Query`
diff --git a/docs/workspace/sql/query_visualizations.rst b/docs/workspace/sql/query_visualizations.rst
index 95095fb20..ac3d6c565 100644
--- a/docs/workspace/sql/query_visualizations.rst
+++ b/docs/workspace/sql/query_visualizations.rst
@@ -37,9 +37,15 @@
         
         :param id: str
         :param update_mask: str
-          Field mask is required to be passed into the PATCH request. Field mask specifies which fields of the
-          setting payload will be updated. The field mask needs to be supplied as single string. To specify
-          multiple fields in the field mask, use comma as the separator (no space).
+          The field mask must be a single string, with multiple fields separated by commas (no spaces). The
+          field path is relative to the resource object, using a dot (`.`) to navigate sub-fields (e.g.,
+          `author.given_name`). Specification of elements in sequence or map fields is not allowed, as only
+          the entire collection field can be specified. Field names must exactly match the resource field
+          names.
+          
+          A field mask of `*` indicates full replacement. It’s recommended to always explicitly list the
+          fields being updated and avoid using `*` wildcards, as it can lead to unintended results if the API
+          changes in the future.
         :param visualization: :class:`UpdateVisualizationRequestVisualization` (optional)
         
         :returns: :class:`Visualization`
diff --git a/docs/workspace/sql/redash_config.rst b/docs/workspace/sql/redash_config.rst
new file mode 100644
index 000000000..9b4382dd5
--- /dev/null
+++ b/docs/workspace/sql/redash_config.rst
@@ -0,0 +1,14 @@
+``w.redash_config``: Redash Config
+==================================
+.. currentmodule:: databricks.sdk.service.sql
+
+.. py:class:: RedashConfigAPI
+
+    Redash V2 service for workspace configurations (internal)
+
+    .. py:method:: get_config() -> ClientConfig
+
+        Read workspace configuration for Redash-v2.
+        
+        :returns: :class:`ClientConfig`
+        
\ No newline at end of file
diff --git a/docs/workspace/sql/statement_execution.rst b/docs/workspace/sql/statement_execution.rst
index 716fa4fdc..44f64b512 100644
--- a/docs/workspace/sql/statement_execution.rst
+++ b/docs/workspace/sql/statement_execution.rst
@@ -80,11 +80,10 @@
     outstanding statement might have already completed execution when the cancel request arrives. Polling for
     status until a terminal state is reached is a reliable way to determine the final state. - Wait timeouts
     are approximate, occur server-side, and cannot account for things such as caller delays and network
-    latency from caller to service. - The system will auto-close a statement after one hour if the client
-    stops polling and thus you must poll at least once an hour. - The results are only available for one hour
-    after success; polling does not extend this. - The SQL Execution API must be used for the entire lifecycle
-    of the statement. For example, you cannot use the Jobs API to execute the command, and then the SQL
-    Execution API to cancel it.
+    latency from caller to service. - To guarantee that the statement is kept alive, you must poll at least
+    once every 15 minutes. - The results are only available for one hour after success; polling does not
+    extend this. - The SQL Execution API must be used for the entire lifecycle of the statement. For example,
+    you cannot use the Jobs API to execute the command, and then the SQL Execution API to cancel it.
     
     [Apache Arrow Columnar]: https://arrow.apache.org/overview/
     [Databricks SQL Statement Execution API tutorial]: https://docs.databricks.com/sql/api/sql-execution-tutorial.html
diff --git a/docs/workspace/sql/warehouses.rst b/docs/workspace/sql/warehouses.rst
index 58b8a3fc0..fd55d5b0c 100644
--- a/docs/workspace/sql/warehouses.rst
+++ b/docs/workspace/sql/warehouses.rst
@@ -315,7 +315,8 @@
 
         Set SQL warehouse permissions.
         
-        Sets permissions on a SQL warehouse. SQL warehouses can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param warehouse_id: str
           The SQL warehouse for which to get or manage permissions.
diff --git a/docs/workspace/workspace/repos.rst b/docs/workspace/workspace/repos.rst
index 01b1c875f..5f3e3e290 100644
--- a/docs/workspace/workspace/repos.rst
+++ b/docs/workspace/workspace/repos.rst
@@ -62,7 +62,7 @@
         Deletes the specified repo.
         
         :param repo_id: int
-          ID of the Git folder (repo) object in the workspace.
+          The ID for the corresponding repo to delete.
         
         
         
@@ -157,7 +157,8 @@
 
         Set repo permissions.
         
-        Sets permissions on a repo. Repos can inherit permissions from their root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their root object.
         
         :param repo_id: str
           The repo for which to get or manage permissions.
diff --git a/docs/workspace/workspace/workspace.rst b/docs/workspace/workspace/workspace.rst
index 4aee0a2b6..595872deb 100644
--- a/docs/workspace/workspace/workspace.rst
+++ b/docs/workspace/workspace/workspace.rst
@@ -272,8 +272,9 @@
 
         Set workspace object permissions.
         
-        Sets permissions on a workspace object. Workspace objects can inherit permissions from their parent
-        objects or root object.
+        Sets permissions on an object, replacing existing permissions if they exist. Deletes all direct
+        permissions if none are specified. Objects can inherit permissions from their parent objects or root
+        object.
         
         :param workspace_object_type: str
           The workspace object type for which to get or manage permissions.
diff --git a/setup.py b/setup.py
index 021f4e430..bc7327e31 100644
--- a/setup.py
+++ b/setup.py
@@ -8,55 +8,36 @@
 with version_file.open('r') as f:
     exec(f.read(), version_data)
 
-setup(
-    name="sync-databricks-sdk",
-    version=version_data["__version__"],
-    packages=find_packages(exclude=["tests", "*tests.*", "*tests"]),
-    package_data={"databricks.sdk": ["py.typed"]},
-    python_requires=">=3.7",
-    install_requires=["requests>=2.28.1,<3", "google-auth~=2.0"],
-    extras_require={
-        "dev": [
-            "pytest",
-            "pytest-cov",
-            "pytest-xdist",
-            "pytest-mock",
-            "yapf",
-            "pycodestyle",
-            "autoflake",
-            "isort",
-            "wheel",
-            "ipython",
-            "ipywidgets",
-            "requests-mock",
-            "pyfakefs",
-            "databricks-connect",
-            "pytest-rerunfailures",
-            "openai",
-            'langchain-openai; python_version > "3.7"',
-            "httpx",
-        ],
-        "notebook": ["ipython>=8,<9", "ipywidgets>=8,<9"],
-    },
-    author="Sync Computing",
-    author_email="info@synccomputing.com",
-    description="Sync Fork Databricks SDK for Python (Beta)",
-    long_description=io.open("README.md", encoding="utf-8").read(),
-    long_description_content_type="text/markdown",
-    url="https://databricks-sdk-py.readthedocs.io",
-    keywords="databricks sdk",
-    classifiers=[
-        "Development Status :: 4 - Beta",
-        "Intended Audience :: Developers",
-        "Intended Audience :: Science/Research",
-        "Intended Audience :: System Administrators",
-        "License :: OSI Approved :: Apache Software License",
-        "Programming Language :: Python :: 3.7",
-        "Programming Language :: Python :: 3.8",
-        "Programming Language :: Python :: 3.9",
-        "Programming Language :: Python :: 3.10",
-        "Programming Language :: Python :: 3.11",
-        "Programming Language :: Python :: 3.12",
-        "Operating System :: OS Independent",
-    ],
-)
+setup(name="sync-databricks-sdk",
+      version=version_data['__version__'],
+      packages=find_packages(exclude=["tests", "*tests.*", "*tests"]),
+      package_data = {"databricks.sdk": ["py.typed"]},
+      python_requires=">=3.7",
+      install_requires=["requests>=2.28.1,<3", "google-auth~=2.0"],
+      extras_require={"dev": ["pytest", "pytest-cov", "pytest-xdist", "pytest-mock",
+                              "yapf", "pycodestyle", "autoflake", "isort", "wheel",
+                              "ipython", "ipywidgets", "requests-mock", "pyfakefs",
+                              "databricks-connect", "pytest-rerunfailures", "openai", 
+                              'langchain-openai; python_version > "3.7"', "httpx"],
+                      "notebook": ["ipython>=8,<9", "ipywidgets>=8,<9"],
+                      "openai": ["openai", 'langchain-openai; python_version > "3.7"', "httpx"]},
+      author="Sync Computing",
+      author_email="info@synccomputing.com",
+      description="Sync Fork Databricks SDK for Python (Beta)",
+      long_description=io.open("README.md", encoding="utf-8").read(),
+      long_description_content_type='text/markdown',
+      url="https://databricks-sdk-py.readthedocs.io",
+      keywords="databricks sdk",
+      classifiers=[
+          "Development Status :: 4 - Beta",
+          "Intended Audience :: Developers",
+          "Intended Audience :: Science/Research",
+          "Intended Audience :: System Administrators",
+          "License :: OSI Approved :: Apache Software License",
+          "Programming Language :: Python :: 3.7",
+          "Programming Language :: Python :: 3.8",
+          "Programming Language :: Python :: 3.9",
+          "Programming Language :: Python :: 3.10",
+          "Programming Language :: Python :: 3.11",
+          "Programming Language :: Python :: 3.12",
+          "Operating System :: OS Independent"])
diff --git a/tests/integration/test_auth.py b/tests/integration/test_auth.py
index 0bf7f951d..3ee271778 100644
--- a/tests/integration/test_auth.py
+++ b/tests/integration/test_auth.py
@@ -133,15 +133,16 @@ def _test_runtime_auth_from_jobs_inner(w, env_or_skip, random, dbr_versions, lib
 
     tasks = []
     for v in dbr_versions:
-        t = Task(task_key=f'test_{v.key.replace(".", "_")}',
-                 notebook_task=NotebookTask(notebook_path=notebook_path),
-                 new_cluster=ClusterSpec(
-                     spark_version=v.key,
-                     num_workers=1,
-                     instance_pool_id=instance_pool_id,
-                     # GCP uses "custom" data security mode by default, which does not support UC.
-                     data_security_mode=DataSecurityMode.SINGLE_USER),
-                 libraries=[library])
+        t = Task(
+            task_key=f'test_{v.key.replace(".", "_")}',
+            notebook_task=NotebookTask(notebook_path=notebook_path),
+            new_cluster=ClusterSpec(
+                spark_version=v.key,
+                num_workers=1,
+                instance_pool_id=instance_pool_id,
+                # GCP uses "custom" data security mode by default, which does not support UC.
+                data_security_mode=DataSecurityMode.SINGLE_USER),
+            libraries=[library])
         tasks.append(t)
 
     waiter = w.jobs.submit(run_name=f'Runtime Native Auth {random(10)}', tasks=tasks)
diff --git a/tests/integration/test_clusters.py b/tests/integration/test_clusters.py
index 930989943..f3a9c6c89 100644
--- a/tests/integration/test_clusters.py
+++ b/tests/integration/test_clusters.py
@@ -41,7 +41,7 @@ def test_create_cluster(w, env_or_skip, random):
 
 def test_error_unmarshall(w, random):
     with pytest.raises(DatabricksError) as exc_info:
-        w.clusters.get('__non_existing__')
+        w.clusters.get('123__non_existing__')
     err = exc_info.value
-    assert 'Cluster __non_existing__ does not exist' in str(err)
+    assert 'Cluster 123__non_existing__ does not exist' in str(err)
     assert 'INVALID_PARAMETER_VALUE' == err.error_code
diff --git a/tests/integration/test_dbutils.py b/tests/integration/test_dbutils.py
index e6e2a8668..e486f2282 100644
--- a/tests/integration/test_dbutils.py
+++ b/tests/integration/test_dbutils.py
@@ -192,20 +192,21 @@ def test_secrets(w, random):
 
     from databricks.sdk.runtime import dbutils
 
+    all_scopes = dbutils.secrets.listScopes()
+    assert random_scope in [scope.getName() for scope in all_scopes]
+
     all_secrets = {}
-    for secret_scope in dbutils.secrets.listScopes():
-        scope = secret_scope.name
-        for secret_metadata in dbutils.secrets.list(scope):
-            key = secret_metadata.key
-            try:
-                all_secrets[f'{scope}.{key}'] = dbutils.secrets.get(scope, key)
-            except DatabricksError as e:
-                if e.error_code == 'BAD_REQUEST':
-                    pytest.skip('dbconnect is not enabled on this workspace')
-                raise e
+    for secret_metadata in dbutils.secrets.list(random_scope):
+        key = secret_metadata.key
+        try:
+            all_secrets[key] = dbutils.secrets.get(random_scope, key)
+        except DatabricksError as e:
+            if e.error_code == 'BAD_REQUEST':
+                pytest.skip('dbconnect is not enabled on this workspace')
+            raise e
 
     logger.info(f'After loading secret: {random_value}')
     logging.getLogger('databricks.sdk').info(f'After loading secret: {random_value}')
 
-    assert all_secrets[f'{random_scope}.{key_for_string}'] == random_value
-    assert all_secrets[f'{random_scope}.{key_for_bytes}'] == random_value
+    assert all_secrets[key_for_string] == random_value
+    assert all_secrets[key_for_bytes] == random_value
diff --git a/tests/integration/test_jobs.py b/tests/integration/test_jobs.py
index 8fd5f8820..768752a75 100644
--- a/tests/integration/test_jobs.py
+++ b/tests/integration/test_jobs.py
@@ -17,18 +17,19 @@ def test_submitting_jobs(w, random, env_or_skip):
     with w.dbfs.open(py_on_dbfs, write=True, overwrite=True) as f:
         f.write(b'import time; time.sleep(10); print("Hello, World!")')
 
-    waiter = w.jobs.submit(run_name=f'py-sdk-{random(8)}',
-                           tasks=[
-                               jobs.SubmitTask(
-                                   task_key='pi',
-                                   new_cluster=compute.ClusterSpec(
-                                       spark_version=w.clusters.select_spark_version(long_term_support=True),
-                                       # node_type_id=w.clusters.select_node_type(local_disk=True),
-                                       instance_pool_id=env_or_skip('TEST_INSTANCE_POOL_ID'),
-                                       num_workers=1),
-                                   spark_python_task=jobs.SparkPythonTask(python_file=f'dbfs:{py_on_dbfs}'),
-                               )
-                           ])
+    waiter = w.jobs.submit(
+        run_name=f'py-sdk-{random(8)}',
+        tasks=[
+            jobs.SubmitTask(
+                task_key='pi',
+                new_cluster=compute.ClusterSpec(
+                    spark_version=w.clusters.select_spark_version(long_term_support=True),
+                    # node_type_id=w.clusters.select_node_type(local_disk=True),
+                    instance_pool_id=env_or_skip('TEST_INSTANCE_POOL_ID'),
+                    num_workers=1),
+                spark_python_task=jobs.SparkPythonTask(python_file=f'dbfs:{py_on_dbfs}'),
+            )
+        ])
 
     logging.info(f'starting to poll: {waiter.run_id}')
 
diff --git a/tests/test_base_client.py b/tests/test_base_client.py
index e9e7324a9..16a8ecfc4 100644
--- a/tests/test_base_client.py
+++ b/tests/test_base_client.py
@@ -1,18 +1,22 @@
+import io
+import random
 from http.server import BaseHTTPRequestHandler
-from typing import Iterator, List
+from typing import Callable, Iterator, List, Optional, Tuple, Type
+from unittest.mock import Mock
 
 import pytest
-import requests
+from requests import PreparedRequest, Response, Timeout
 
 from databricks.sdk import errors, useragent
-from databricks.sdk._base_client import _BaseClient, _StreamingResponse
+from databricks.sdk._base_client import (_BaseClient, _RawResponse,
+                                         _StreamingResponse)
 from databricks.sdk.core import DatabricksError
 
 from .clock import FakeClock
 from .fixture_server import http_fixture_server
 
 
-class DummyResponse(requests.Response):
+class DummyResponse(_RawResponse):
     _content: Iterator[bytes]
     _closed: bool = False
 
@@ -276,3 +280,215 @@ def inner(h: BaseHTTPRequestHandler):
         assert 'foo' in res
 
     assert len(requests) == 2
+
+
+@pytest.mark.parametrize(
+    'chunk_size,expected_chunks,data_size',
+    [
+        (5, 20, 100), # 100 / 5 bytes per chunk = 20 chunks
+        (10, 10, 100), # 100 / 10 bytes per chunk = 10 chunks
+        (200, 1, 100), # 100 / 200 bytes per chunk = 1 chunk
+    ])
+def test_streaming_response_chunk_size(chunk_size, expected_chunks, data_size):
+    rng = random.Random(42)
+    test_data = bytes(rng.getrandbits(8) for _ in range(data_size))
+
+    content_chunks = []
+    mock_response = Mock(spec=_RawResponse)
+
+    def mock_iter_content(chunk_size: int, decode_unicode: bool):
+        # Simulate how requests would chunk the data.
+        for i in range(0, len(test_data), chunk_size):
+            chunk = test_data[i:i + chunk_size]
+            content_chunks.append(chunk) # track chunks for verification
+            yield chunk
+
+    mock_response.iter_content = mock_iter_content
+    stream = _StreamingResponse(mock_response)
+    stream.set_chunk_size(chunk_size)
+
+    # Read all data one byte at a time.
+    received_data = b""
+    while True:
+        chunk = stream.read(1)
+        if not chunk:
+            break
+        received_data += chunk
+
+    assert received_data == test_data # all data was received correctly
+    assert len(content_chunks) == expected_chunks # correct number of chunks
+    assert all(len(c) <= chunk_size for c in content_chunks) # chunks don't exceed size
+
+
+def test_is_seekable_stream():
+    client = _BaseClient()
+
+    # Test various input types that are not streams.
+    assert not client._is_seekable_stream(None) # None
+    assert not client._is_seekable_stream("string data") # str
+    assert not client._is_seekable_stream(b"binary data") # bytes
+    assert not client._is_seekable_stream(["list", "data"]) # list
+    assert not client._is_seekable_stream(42) # int
+
+    # Test non-seekable stream.
+    non_seekable = io.BytesIO(b"test data")
+    non_seekable.seekable = lambda: False
+    assert not client._is_seekable_stream(non_seekable)
+
+    # Test seekable streams.
+    assert client._is_seekable_stream(io.BytesIO(b"test data")) # BytesIO
+    assert client._is_seekable_stream(io.StringIO("test data")) # StringIO
+
+    # Test file objects.
+    with open(__file__, 'rb') as f:
+        assert client._is_seekable_stream(f) # File object
+
+    # Test custom seekable stream.
+    class CustomSeekableStream(io.IOBase):
+
+        def seekable(self):
+            return True
+
+        def seek(self, offset, whence=0):
+            return 0
+
+        def tell(self):
+            return 0
+
+    assert client._is_seekable_stream(CustomSeekableStream())
+
+
+class RetryTestCase:
+
+    def __init__(self, data_provider: Callable, offset: Optional[int], expected_failure: bool,
+                 expected_result: bytes):
+        self._data_provider = data_provider
+        self._offset = offset
+        self._expected_result = expected_result
+        self._expected_failure = expected_failure
+
+    def get_data(self):
+        data = self._data_provider()
+        if self._offset is not None:
+            data.seek(self._offset)
+        return data
+
+    @classmethod
+    def create_non_seekable_stream(cls, data: bytes):
+        result = io.BytesIO(data)
+        result.seekable = lambda: False # makes the stream appear non-seekable
+        return result
+
+
+class MockSession:
+
+    def __init__(self, failure_count: int, failure_provider: Callable[[], Response]):
+        self._failure_count = failure_count
+        self._received_requests: List[bytes] = []
+        self._failure_provider = failure_provider
+
+    @classmethod
+    def raise_timeout_exception(cls):
+        raise Timeout("Fake timeout")
+
+    @classmethod
+    def return_retryable_response(cls):
+        # fill response fields so that logging does not fail
+        response = Response()
+        response._content = b''
+        response.status_code = 429
+        response.headers = {'Retry-After': '1'}
+        response.url = 'http://test.com/'
+
+        response.request = PreparedRequest()
+        response.request.url = response.url
+        response.request.method = 'POST'
+        response.request.headers = None
+        response.request.body = b''
+        return response
+
+    # following the signature of Session.request()
+    def request(self,
+                method,
+                url,
+                params=None,
+                data=None,
+                headers=None,
+                cookies=None,
+                files=None,
+                auth=None,
+                timeout=None,
+                allow_redirects=True,
+                proxies=None,
+                hooks=None,
+                stream=None,
+                verify=None,
+                cert=None,
+                json=None):
+        request_body = data.read()
+
+        if isinstance(request_body, str):
+            request_body = request_body.encode('utf-8') # to be able to compare with expected bytes
+
+        self._received_requests.append(request_body)
+        if self._failure_count > 0:
+            self._failure_count -= 1
+            return self._failure_provider()
+            #
+        else:
+            # fill response fields so that logging does not fail
+            response = Response()
+            response._content = b''
+            response.status_code = 200
+            response.reason = 'OK'
+            response.url = url
+
+            response.request = PreparedRequest()
+            response.request.url = url
+            response.request.method = method
+            response.request.headers = headers
+            response.request.body = data
+            return response
+
+
+@pytest.mark.parametrize(
+    'test_case',
+    [
+        # bytes -> BytesIO
+        RetryTestCase(lambda: b"0123456789", None, False, b"0123456789"),
+        # str -> BytesIO
+        RetryTestCase(lambda: "0123456789", None, False, b"0123456789"),
+        # BytesIO directly
+        RetryTestCase(lambda: io.BytesIO(b"0123456789"), None, False, b"0123456789"),
+        # BytesIO directly with offset
+        RetryTestCase(lambda: io.BytesIO(b"0123456789"), 4, False, b"456789"),
+        # StringIO
+        RetryTestCase(lambda: io.StringIO("0123456789"), None, False, b"0123456789"),
+        # Non-seekable
+        RetryTestCase(lambda: RetryTestCase.create_non_seekable_stream(b"0123456789"), None, True,
+                      b"0123456789")
+    ])
+@pytest.mark.parametrize('failure', [[MockSession.raise_timeout_exception, Timeout],
+                                     [MockSession.return_retryable_response, errors.TooManyRequests]])
+def test_rewind_seekable_stream(test_case: RetryTestCase, failure: Tuple[Callable[[], Response], Type]):
+    failure_count = 2
+
+    data = test_case.get_data()
+
+    session = MockSession(failure_count, failure[0])
+    client = _BaseClient()
+    client._session = session
+
+    def do():
+        client.do('POST', f'test.com/foo', data=data)
+
+    if test_case._expected_failure:
+        expected_attempts_made = 1
+        exception_class = failure[1]
+        with pytest.raises(exception_class):
+            do()
+    else:
+        expected_attempts_made = failure_count + 1
+        do()
+
+    assert session._received_requests == [test_case._expected_result for _ in range(expected_attempts_made)]
diff --git a/tests/test_config.py b/tests/test_config.py
index 2eac6d2f8..ebc8d683a 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -42,6 +42,11 @@ class MockUname:
         def system(self):
             return 'TestOS'
 
+    # Clear all environment variables and cached CICD provider.
+    for k in os.environ:
+        monkeypatch.delenv(k, raising=False)
+    useragent._cicd_provider = None
+
     monkeypatch.setattr(platform, 'python_version', lambda: '3.0.0')
     monkeypatch.setattr(platform, 'uname', MockUname)
     monkeypatch.setenv('DATABRICKS_SDK_UPSTREAM', "upstream-product")
diff --git a/tests/test_core.py b/tests/test_core.py
index 16a4c2ad6..32431172b 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -8,7 +8,7 @@
 
 import pytest
 
-from databricks.sdk import WorkspaceClient, errors
+from databricks.sdk import WorkspaceClient, errors, useragent
 from databricks.sdk.core import ApiClient, Config, DatabricksError
 from databricks.sdk.credentials_provider import (CliTokenSource,
                                                  CredentialsProvider,
@@ -178,6 +178,11 @@ class MockUname:
         def system(self):
             return 'TestOS'
 
+    # Clear all environment variables and cached CICD provider.
+    for k in os.environ:
+        monkeypatch.delenv(k, raising=False)
+    useragent._cicd_provider = None
+
     monkeypatch.setattr(platform, 'python_version', lambda: '3.0.0')
     monkeypatch.setattr(platform, 'uname', MockUname)
     monkeypatch.setenv('DATABRICKS_SDK_UPSTREAM', "upstream-product")
@@ -370,14 +375,20 @@ def inner(h: BaseHTTPRequestHandler):
         assert {'Authorization': 'Taker this-is-it'} == headers
 
 
-@pytest.mark.parametrize(['azure_environment', 'expected'],
-                         [('PUBLIC', ENVIRONMENTS['PUBLIC']), ('USGOVERNMENT', ENVIRONMENTS['USGOVERNMENT']),
-                          ('CHINA', ENVIRONMENTS['CHINA']), ('public', ENVIRONMENTS['PUBLIC']),
-                          ('usgovernment', ENVIRONMENTS['USGOVERNMENT']), ('china', ENVIRONMENTS['CHINA']),
-                          # Kept for historical compatibility
-                          ('AzurePublicCloud', ENVIRONMENTS['PUBLIC']),
-                          ('AzureUSGovernment', ENVIRONMENTS['USGOVERNMENT']),
-                          ('AzureChinaCloud', ENVIRONMENTS['CHINA']), ])
+@pytest.mark.parametrize(
+    ['azure_environment', 'expected'],
+    [
+        ('PUBLIC', ENVIRONMENTS['PUBLIC']),
+        ('USGOVERNMENT', ENVIRONMENTS['USGOVERNMENT']),
+        ('CHINA', ENVIRONMENTS['CHINA']),
+        ('public', ENVIRONMENTS['PUBLIC']),
+        ('usgovernment', ENVIRONMENTS['USGOVERNMENT']),
+        ('china', ENVIRONMENTS['CHINA']),
+        # Kept for historical compatibility
+        ('AzurePublicCloud', ENVIRONMENTS['PUBLIC']),
+        ('AzureUSGovernment', ENVIRONMENTS['USGOVERNMENT']),
+        ('AzureChinaCloud', ENVIRONMENTS['CHINA']),
+    ])
 def test_azure_environment(azure_environment, expected):
     c = Config(credentials_strategy=noop_credentials,
                azure_workspace_resource_id='...',
diff --git a/tests/test_credentials_provider.py b/tests/test_credentials_provider.py
new file mode 100644
index 000000000..67e6f5b35
--- /dev/null
+++ b/tests/test_credentials_provider.py
@@ -0,0 +1,145 @@
+from unittest.mock import Mock
+
+from databricks.sdk.credentials_provider import external_browser
+
+
+def test_external_browser_refresh_success(mocker):
+    """Tests successful refresh of existing credentials."""
+
+    # Mock Config.
+    mock_cfg = Mock()
+    mock_cfg.auth_type = 'external-browser'
+    mock_cfg.host = 'test-host'
+    mock_cfg.oidc_endpoints = {'token_endpoint': 'test-token-endpoint'}
+    mock_cfg.client_id = 'test-client-id' # Or use azure_client_id
+    mock_cfg.client_secret = 'test-client-secret' # Or use azure_client_secret
+
+    # Mock TokenCache.
+    mock_token_cache = Mock()
+    mock_session_credentials = Mock()
+    mock_session_credentials.token.return_value = "valid_token" # Simulate successful refresh
+    mock_token_cache.load.return_value = mock_session_credentials
+
+    # Mock SessionCredentials.
+    want_credentials_provider = lambda c: "new_credentials"
+    mock_session_credentials.return_value = want_credentials_provider
+
+    # Inject the mock implementations.
+    mocker.patch('databricks.sdk.credentials_provider.TokenCache', return_value=mock_token_cache)
+
+    got_credentials_provider = external_browser(mock_cfg)
+
+    mock_token_cache.load.assert_called_once()
+    mock_session_credentials.token.assert_called_once() # Verify token refresh was attempted
+    assert got_credentials_provider == want_credentials_provider
+
+
+def test_external_browser_refresh_failure_new_oauth_flow(mocker):
+    """Tests failed refresh, triggering a new OAuth flow."""
+
+    # Mock Config.
+    mock_cfg = Mock()
+    mock_cfg.auth_type = 'external-browser'
+    mock_cfg.host = 'test-host'
+    mock_cfg.oidc_endpoints = {'token_endpoint': 'test-token-endpoint'}
+    mock_cfg.client_id = 'test-client-id'
+    mock_cfg.client_secret = 'test-client-secret'
+
+    # Mock TokenCache.
+    mock_token_cache = Mock()
+    mock_session_credentials = Mock()
+    mock_session_credentials.token.side_effect = Exception(
+        "Simulated refresh error") # Simulate a failed refresh
+    mock_token_cache.load.return_value = mock_session_credentials
+
+    # Mock SessionCredentials.
+    want_credentials_provider = lambda c: "new_credentials"
+    mock_session_credentials.return_value = want_credentials_provider
+
+    # Mock OAuthClient.
+    mock_oauth_client = Mock()
+    mock_consent = Mock()
+    mock_consent.launch_external_browser.return_value = mock_session_credentials
+    mock_oauth_client.initiate_consent.return_value = mock_consent
+
+    # Inject the mock implementations.
+    mocker.patch('databricks.sdk.credentials_provider.TokenCache', return_value=mock_token_cache)
+    mocker.patch('databricks.sdk.credentials_provider.OAuthClient', return_value=mock_oauth_client)
+
+    got_credentials_provider = external_browser(mock_cfg)
+
+    mock_token_cache.load.assert_called_once()
+    mock_session_credentials.token.assert_called_once() # Refresh attempt
+    mock_oauth_client.initiate_consent.assert_called_once()
+    mock_consent.launch_external_browser.assert_called_once()
+    mock_token_cache.save.assert_called_once_with(mock_session_credentials)
+    assert got_credentials_provider == want_credentials_provider
+
+
+def test_external_browser_no_cached_credentials(mocker):
+    """Tests the case where there are no cached credentials, initiating a new OAuth flow."""
+
+    # Mock Config.
+    mock_cfg = Mock()
+    mock_cfg.auth_type = 'external-browser'
+    mock_cfg.host = 'test-host'
+    mock_cfg.oidc_endpoints = {'token_endpoint': 'test-token-endpoint'}
+    mock_cfg.client_id = 'test-client-id'
+    mock_cfg.client_secret = 'test-client-secret'
+
+    # Mock TokenCache.
+    mock_token_cache = Mock()
+    mock_token_cache.load.return_value = None # No cached credentials
+
+    # Mock SessionCredentials.
+    mock_session_credentials = Mock()
+    want_credentials_provider = lambda c: "new_credentials"
+    mock_session_credentials.return_value = want_credentials_provider
+
+    # Mock OAuthClient.
+    mock_consent = Mock()
+    mock_consent.launch_external_browser.return_value = mock_session_credentials
+    mock_oauth_client = Mock()
+    mock_oauth_client.initiate_consent.return_value = mock_consent
+
+    # Inject the mock implementations.
+    mocker.patch('databricks.sdk.credentials_provider.TokenCache', return_value=mock_token_cache)
+    mocker.patch('databricks.sdk.credentials_provider.OAuthClient', return_value=mock_oauth_client)
+
+    got_credentials_provider = external_browser(mock_cfg)
+
+    mock_token_cache.load.assert_called_once()
+    mock_oauth_client.initiate_consent.assert_called_once()
+    mock_consent.launch_external_browser.assert_called_once()
+    mock_token_cache.save.assert_called_once_with(mock_session_credentials)
+    assert got_credentials_provider == want_credentials_provider
+
+
+def test_external_browser_consent_fails(mocker):
+    """Tests the case where OAuth consent initiation fails."""
+
+    # Mock Config.
+    mock_cfg = Mock()
+    mock_cfg.auth_type = 'external-browser'
+    mock_cfg.host = 'test-host'
+    mock_cfg.oidc_endpoints = {'token_endpoint': 'test-token-endpoint'}
+    mock_cfg.client_id = 'test-client-id'
+    mock_cfg.client_secret = 'test-client-secret'
+
+    # Mock TokenCache.
+    mock_token_cache = Mock()
+    mock_token_cache.load.return_value = None # No cached credentials
+
+    # Mock OAuthClient.
+    mock_oauth_client = Mock()
+    mock_oauth_client.initiate_consent.return_value = None # Simulate consent failure
+
+    # Inject the mock implementations.
+    mocker.patch('databricks.sdk.credentials_provider.TokenCache', return_value=mock_token_cache)
+    mocker.patch('databricks.sdk.credentials_provider.OAuthClient', return_value=mock_oauth_client)
+
+    got_credentials_provider = external_browser(mock_cfg)
+
+    mock_token_cache.load.assert_called_once()
+    mock_oauth_client.initiate_consent.assert_called_once()
+    assert got_credentials_provider is None
diff --git a/tests/test_data_plane.py b/tests/test_data_plane.py
index a74658964..1eac92382 100644
--- a/tests/test_data_plane.py
+++ b/tests/test_data_plane.py
@@ -2,7 +2,7 @@
 
 from databricks.sdk.data_plane import DataPlaneService
 from databricks.sdk.oauth import Token
-from databricks.sdk.service.oauth2 import DataPlaneInfo
+from databricks.sdk.service.serving import DataPlaneInfo
 
 info = DataPlaneInfo(authorization_details="authDetails", endpoint_url="url")
 
diff --git a/tests/test_files.py b/tests/test_files.py
new file mode 100644
index 000000000..f4d916f6f
--- /dev/null
+++ b/tests/test_files.py
@@ -0,0 +1,340 @@
+import logging
+import os
+import re
+from dataclasses import dataclass
+from typing import List, Union
+
+import pytest
+from requests import RequestException
+
+from databricks.sdk import WorkspaceClient
+from databricks.sdk.core import Config
+
+logger = logging.getLogger(__name__)
+
+
+@dataclass
+class RequestData:
+
+    def __init__(self, offset: int):
+        self._offset: int = offset
+
+
+class DownloadTestCase:
+
+    def __init__(self, name: str, enable_new_client: bool, file_size: int,
+                 failure_at_absolute_offset: List[int], max_recovers_total: Union[int, None],
+                 max_recovers_without_progressing: Union[int, None], expected_success: bool,
+                 expected_requested_offsets: List[int]):
+        self.name = name
+        self.enable_new_client = enable_new_client
+        self.file_size = file_size
+        self.failure_at_absolute_offset = failure_at_absolute_offset
+        self.max_recovers_total = max_recovers_total
+        self.max_recovers_without_progressing = max_recovers_without_progressing
+        self.expected_success = expected_success
+        self.expected_requested_offsets = expected_requested_offsets
+
+    @staticmethod
+    def to_string(test_case):
+        return test_case.name
+
+    def run(self, config: Config):
+        config = config.copy()
+        config.enable_experimental_files_api_client = self.enable_new_client
+        config.files_api_client_download_max_total_recovers = self.max_recovers_total
+        config.files_api_client_download_max_total_recovers_without_progressing = self.max_recovers_without_progressing
+
+        w = WorkspaceClient(config=config)
+
+        session = MockSession(self)
+        w.files._api._api_client._session = session
+
+        response = w.files.download("/test").contents
+        if self.expected_success:
+            actual_content = response.read()
+            assert (len(actual_content) == len(session.content))
+            assert (actual_content == session.content)
+        else:
+            with pytest.raises(RequestException):
+                response.read()
+
+        received_requests = session.received_requests
+
+        assert (len(self.expected_requested_offsets) == len(received_requests))
+        for idx, requested_offset in enumerate(self.expected_requested_offsets):
+            assert (requested_offset == received_requests[idx]._offset)
+
+
+class MockSession:
+
+    def __init__(self, test_case: DownloadTestCase):
+        self.test_case: DownloadTestCase = test_case
+        self.received_requests: List[RequestData] = []
+        self.content: bytes = os.urandom(self.test_case.file_size)
+        self.failure_pointer = 0
+        self.last_modified = 'Thu, 28 Nov 2024 16:39:14 GMT'
+
+    # following the signature of Session.request()
+    def request(self,
+                method,
+                url,
+                params=None,
+                data=None,
+                headers=None,
+                cookies=None,
+                files=None,
+                auth=None,
+                timeout=None,
+                allow_redirects=True,
+                proxies=None,
+                hooks=None,
+                stream=None,
+                verify=None,
+                cert=None,
+                json=None):
+        assert method == 'GET'
+        assert stream == True
+
+        offset = 0
+        if "Range" in headers:
+            range = headers["Range"]
+            match = re.search("^bytes=(\\d+)-$", range)
+            if match:
+                offset = int(match.group(1))
+            else:
+                raise Exception("Unexpected range header: " + range)
+
+            if "If-Unmodified-Since" in headers:
+                assert (headers["If-Unmodified-Since"] == self.last_modified)
+            else:
+                raise Exception("If-Unmodified-Since header should be passed along with Range")
+
+        logger.info("Client requested offset: %s", offset)
+
+        if offset > len(self.content):
+            raise Exception("Offset %s exceeds file length %s", offset, len(self.content))
+
+        self.received_requests.append(RequestData(offset))
+        return MockResponse(self, offset, MockRequest(url))
+
+
+# required only for correct logging
+class MockRequest:
+
+    def __init__(self, url: str):
+        self.url = url
+        self.method = 'GET'
+        self.headers = dict()
+        self.body = None
+
+
+class MockResponse:
+
+    def __init__(self, session: MockSession, offset: int, request: MockRequest):
+        self.session = session
+        self.offset = offset
+        self.request = request
+        self.status_code = 200
+        self.reason = 'OK'
+        self.headers = dict()
+        self.headers['Content-Length'] = len(session.content) - offset
+        self.headers['Content-Type'] = 'application/octet-stream'
+        self.headers['Last-Modified'] = session.last_modified
+        self.ok = True
+        self.url = request.url
+
+    def iter_content(self, chunk_size: int, decode_unicode: bool):
+        assert decode_unicode == False
+        return MockIterator(self, chunk_size)
+
+
+class MockIterator:
+
+    def __init__(self, response: MockResponse, chunk_size: int):
+        self.response = response
+        self.chunk_size = chunk_size
+        self.offset = 0
+
+    def __next__(self):
+        start_offset = self.response.offset + self.offset
+        if start_offset == len(self.response.session.content):
+            raise StopIteration
+
+        end_offset = start_offset + self.chunk_size # exclusive, might be out of range
+
+        if self.response.session.failure_pointer < len(
+                self.response.session.test_case.failure_at_absolute_offset):
+            failure_after_byte = self.response.session.test_case.failure_at_absolute_offset[
+                self.response.session.failure_pointer]
+            if failure_after_byte < end_offset:
+                self.response.session.failure_pointer += 1
+                raise RequestException("Fake error")
+
+        result = self.response.session.content[start_offset:end_offset]
+        self.offset += len(result)
+        return result
+
+    def close(self):
+        pass
+
+
+class _Constants:
+    underlying_chunk_size = 1024 * 1024 # see ticket #832
+
+
+@pytest.mark.parametrize(
+    "test_case",
+    [
+        DownloadTestCase(name="Old client: no failures, file of 5 bytes",
+                         enable_new_client=False,
+                         file_size=5,
+                         failure_at_absolute_offset=[],
+                         max_recovers_total=0,
+                         max_recovers_without_progressing=0,
+                         expected_success=True,
+                         expected_requested_offsets=[0]),
+        DownloadTestCase(name="Old client: no failures, file of 1.5 chunks",
+                         enable_new_client=False,
+                         file_size=int(1.5 * _Constants.underlying_chunk_size),
+                         failure_at_absolute_offset=[],
+                         max_recovers_total=0,
+                         max_recovers_without_progressing=0,
+                         expected_success=True,
+                         expected_requested_offsets=[0]),
+        DownloadTestCase(
+            name="Old client: failure",
+            enable_new_client=False,
+            file_size=1024,
+            failure_at_absolute_offset=[100],
+            max_recovers_total=None, # unlimited but ignored
+            max_recovers_without_progressing=None, # unlimited but ignored
+            expected_success=False,
+            expected_requested_offsets=[0]),
+        DownloadTestCase(name="New client: no failures, file of 5 bytes",
+                         enable_new_client=True,
+                         file_size=5,
+                         failure_at_absolute_offset=[],
+                         max_recovers_total=0,
+                         max_recovers_without_progressing=0,
+                         expected_success=True,
+                         expected_requested_offsets=[0]),
+        DownloadTestCase(name="New client: no failures, file of 1 Kb",
+                         enable_new_client=True,
+                         file_size=1024,
+                         max_recovers_total=None,
+                         max_recovers_without_progressing=None,
+                         failure_at_absolute_offset=[],
+                         expected_success=True,
+                         expected_requested_offsets=[0]),
+        DownloadTestCase(name="New client: no failures, file of 1.5 chunks",
+                         enable_new_client=True,
+                         file_size=int(1.5 * _Constants.underlying_chunk_size),
+                         failure_at_absolute_offset=[],
+                         max_recovers_total=0,
+                         max_recovers_without_progressing=0,
+                         expected_success=True,
+                         expected_requested_offsets=[0]),
+        DownloadTestCase(name="New client: no failures, file of 10 chunks",
+                         enable_new_client=True,
+                         file_size=10 * _Constants.underlying_chunk_size,
+                         failure_at_absolute_offset=[],
+                         max_recovers_total=0,
+                         max_recovers_without_progressing=0,
+                         expected_success=True,
+                         expected_requested_offsets=[0]),
+        DownloadTestCase(name="New client: recovers are disabled, first failure leads to download abort",
+                         enable_new_client=True,
+                         file_size=10000,
+                         failure_at_absolute_offset=[5],
+                         max_recovers_total=0,
+                         max_recovers_without_progressing=0,
+                         expected_success=False,
+                         expected_requested_offsets=[0]),
+        DownloadTestCase(
+            name="New client: unlimited recovers allowed",
+            enable_new_client=True,
+            file_size=_Constants.underlying_chunk_size * 5,
+            # causes errors on requesting the third chunk
+            failure_at_absolute_offset=[
+                _Constants.underlying_chunk_size - 1, _Constants.underlying_chunk_size - 1,
+                _Constants.underlying_chunk_size - 1, _Constants.underlying_chunk_size + 1,
+                _Constants.underlying_chunk_size * 3,
+            ],
+            max_recovers_total=None,
+            max_recovers_without_progressing=None,
+            expected_success=True,
+            expected_requested_offsets=[
+                0, 0, 0, 0, _Constants.underlying_chunk_size, _Constants.underlying_chunk_size * 3
+            ]),
+        DownloadTestCase(
+            name="New client: we respect limit on total recovers when progressing",
+            enable_new_client=True,
+            file_size=_Constants.underlying_chunk_size * 10,
+            failure_at_absolute_offset=[
+                1,
+                _Constants.underlying_chunk_size + 1, # progressing
+                _Constants.underlying_chunk_size * 2 + 1, # progressing
+                _Constants.underlying_chunk_size * 3 + 1 # progressing
+            ],
+            max_recovers_total=3,
+            max_recovers_without_progressing=None,
+            expected_success=False,
+            expected_requested_offsets=[
+                0, 0, _Constants.underlying_chunk_size * 1, _Constants.underlying_chunk_size * 2
+            ]),
+        DownloadTestCase(name="New client: we respect limit on total recovers when not progressing",
+                         enable_new_client=True,
+                         file_size=_Constants.underlying_chunk_size * 10,
+                         failure_at_absolute_offset=[1, 1, 1, 1],
+                         max_recovers_total=3,
+                         max_recovers_without_progressing=None,
+                         expected_success=False,
+                         expected_requested_offsets=[0, 0, 0, 0]),
+        DownloadTestCase(name="New client: we respect limit on non-progressing recovers",
+                         enable_new_client=True,
+                         file_size=_Constants.underlying_chunk_size * 2,
+                         failure_at_absolute_offset=[
+                             _Constants.underlying_chunk_size - 1, _Constants.underlying_chunk_size - 1,
+                             _Constants.underlying_chunk_size - 1, _Constants.underlying_chunk_size - 1
+                         ],
+                         max_recovers_total=None,
+                         max_recovers_without_progressing=3,
+                         expected_success=False,
+                         expected_requested_offsets=[0, 0, 0, 0]),
+        DownloadTestCase(
+            name="New client: non-progressing recovers count is reset when progressing",
+            enable_new_client=True,
+            file_size=_Constants.underlying_chunk_size * 10,
+            failure_at_absolute_offset=[
+                _Constants.underlying_chunk_size + 1, # this recover is after progressing
+                _Constants.underlying_chunk_size + 1, # this is not
+                _Constants.underlying_chunk_size * 2 + 1, # this recover is after progressing
+                _Constants.underlying_chunk_size * 2 + 1, # this is not
+                _Constants.underlying_chunk_size * 2 + 1, # this is not, we abort here
+            ],
+            max_recovers_total=None,
+            max_recovers_without_progressing=2,
+            expected_success=False,
+            expected_requested_offsets=[
+                0, _Constants.underlying_chunk_size, _Constants.underlying_chunk_size,
+                _Constants.underlying_chunk_size * 2, _Constants.underlying_chunk_size * 2
+            ]),
+        DownloadTestCase(name="New client: non-progressing recovers count is reset when progressing - 2",
+                         enable_new_client=True,
+                         file_size=_Constants.underlying_chunk_size * 10,
+                         failure_at_absolute_offset=[
+                             1, _Constants.underlying_chunk_size + 1, _Constants.underlying_chunk_size * 2 +
+                             1, _Constants.underlying_chunk_size * 3 + 1
+                         ],
+                         max_recovers_total=None,
+                         max_recovers_without_progressing=1,
+                         expected_success=True,
+                         expected_requested_offsets=[
+                             0, 0, _Constants.underlying_chunk_size, _Constants.underlying_chunk_size * 2,
+                             _Constants.underlying_chunk_size * 3
+                         ]),
+    ],
+    ids=DownloadTestCase.to_string)
+def test_download_recover(config: Config, test_case: DownloadTestCase):
+    test_case.run(config)
diff --git a/tests/test_jobs_mixin.py b/tests/test_jobs_mixin.py
new file mode 100644
index 000000000..2c39d41d9
--- /dev/null
+++ b/tests/test_jobs_mixin.py
@@ -0,0 +1,263 @@
+import json
+import re
+from typing import Pattern
+
+from databricks.sdk import WorkspaceClient
+
+
+def make_getrun_path_pattern(run_id: int, page_token: str) -> Pattern[str]:
+    return re.compile(
+        rf'{re.escape("http://localhost/api/")}2.\d{re.escape(f"/jobs/runs/get?page_token={page_token}&run_id={run_id}")}'
+    )
+
+
+def make_getjob_path_pattern(job_id: int, page_token: str) -> Pattern[str]:
+    return re.compile(
+        rf'{re.escape("http://localhost/api/")}2.\d{re.escape(f"/jobs/get?job_id={job_id}&page_token={page_token}")}'
+    )
+
+
+def test_get_run_with_no_pagination(config, requests_mock):
+    run1 = {"tasks": [{"run_id": 0}, {"run_id": 1}], }
+    requests_mock.get(make_getrun_path_pattern(1337, "initialToken"), text=json.dumps(run1))
+    w = WorkspaceClient(config=config)
+
+    run = w.jobs.get_run(1337, page_token="initialToken")
+
+    assert run.as_dict() == {"tasks": [{'run_id': 0}, {'run_id': 1}], }
+
+
+def test_get_run_pagination_with_tasks(config, requests_mock):
+    from databricks.sdk.service import compute, jobs
+    cluster_spec = compute.ClusterSpec(spark_version="11.3.x-scala2.12",
+                                       custom_tags={"ResourceClass": "SingleNode"},
+                                       num_workers=0,
+                                       node_type_id="Standard_DS3_v2",
+                                       )
+    cluster1 = jobs.JobCluster(job_cluster_key="cluster1", new_cluster=cluster_spec)
+    cluster2 = jobs.JobCluster(job_cluster_key="cluster2", new_cluster=cluster_spec)
+    cluster3 = jobs.JobCluster(job_cluster_key="cluster3", new_cluster=cluster_spec)
+    cluster4 = jobs.JobCluster(job_cluster_key="cluster4", new_cluster=cluster_spec)
+    run1 = {
+        "tasks": [{
+            "run_id": 0
+        }, {
+            "run_id": 1
+        }],
+        "job_clusters": [cluster1.as_dict(), cluster2.as_dict(), ],
+        "job_parameters": [{
+            "name": "param1",
+            "value": "value1"
+        }],
+        "next_page_token": "tokenToSecondPage",
+    }
+    run2 = {
+        "tasks": [{
+            "run_id": 2
+        }, {
+            "run_id": 3
+        }],
+        "job_clusters": [cluster3.as_dict(), cluster4.as_dict(), ],
+        "job_parameters": [{
+            "name": "param2",
+            "value": "value2"
+        }],
+        "next_page_token": "tokenToThirdPage",
+    }
+    run3 = {"tasks": [{"run_id": 4}]}
+    requests_mock.get(make_getrun_path_pattern(1337, "initialToken"), text=json.dumps(run1))
+    requests_mock.get(make_getrun_path_pattern(1337, "tokenToSecondPage"), text=json.dumps(run2))
+    requests_mock.get(make_getrun_path_pattern(1337, "tokenToThirdPage"), text=json.dumps(run3))
+    w = WorkspaceClient(config=config)
+
+    run = w.jobs.get_run(1337, page_token="initialToken")
+
+    assert run.as_dict() == {
+        "tasks": [{
+            'run_id': 0
+        }, {
+            'run_id': 1
+        }, {
+            'run_id': 2
+        }, {
+            'run_id': 3
+        }, {
+            'run_id': 4
+        }],
+        "job_clusters": [cluster1.as_dict(),
+                         cluster2.as_dict(),
+                         cluster3.as_dict(),
+                         cluster4.as_dict()],
+        "job_parameters": [{
+            "name": "param1",
+            "value": "value1"
+        }, {
+            "name": "param2",
+            "value": "value2"
+        }],
+    }
+
+
+def test_get_run_pagination_with_iterations(config, requests_mock):
+    run1 = {
+        "tasks": [{
+            "run_id": 1337
+        }],
+        "iterations": [{
+            "run_id": 0
+        }, {
+            "run_id": 1
+        }],
+        "next_page_token": "tokenToSecondPage",
+    }
+    run2 = {
+        "tasks": [{
+            "run_id": 1337
+        }],
+        "iterations": [{
+            "run_id": 2
+        }, {
+            "run_id": 3
+        }],
+        "next_page_token": "tokenToThirdPage",
+    }
+    run3 = {"tasks": [{"run_id": 1337}], "iterations": [{"run_id": 4}], }
+    requests_mock.get(make_getrun_path_pattern(1337, "initialToken"), text=json.dumps(run1))
+    requests_mock.get(make_getrun_path_pattern(1337, "tokenToSecondPage"), text=json.dumps(run2))
+    requests_mock.get(make_getrun_path_pattern(1337, "tokenToThirdPage"), text=json.dumps(run3))
+    w = WorkspaceClient(config=config)
+
+    run = w.jobs.get_run(1337, page_token="initialToken")
+
+    assert run.as_dict() == {
+        "tasks": [{
+            'run_id': 1337
+        }],
+        "iterations": [{
+            'run_id': 0
+        }, {
+            'run_id': 1
+        }, {
+            'run_id': 2
+        }, {
+            'run_id': 3
+        }, {
+            'run_id': 4
+        }],
+    }
+
+
+def test_get_job_with_no_pagination(config, requests_mock):
+    job1 = {"settings": {"tasks": [{"task_key": "taskKey1"}, {"task_key": "taskKey2"}], }}
+    requests_mock.get(make_getjob_path_pattern(1337, "initialToken"), text=json.dumps(job1))
+    w = WorkspaceClient(config=config)
+
+    job = w.jobs.get(1337, page_token="initialToken")
+
+    assert job.as_dict() == {"settings": {"tasks": [{"task_key": "taskKey1"}, {"task_key": "taskKey2"}], }}
+
+
+def test_get_job_pagination_with_tasks(config, requests_mock):
+    from databricks.sdk.service import compute, jobs
+    cluster_spec = compute.ClusterSpec(spark_version="11.3.x-scala2.12",
+                                       custom_tags={"ResourceClass": "SingleNode"},
+                                       num_workers=0,
+                                       node_type_id="Standard_DS3_v2",
+                                       )
+    cluster1 = jobs.JobCluster(job_cluster_key="cluster1", new_cluster=cluster_spec)
+    cluster2 = jobs.JobCluster(job_cluster_key="cluster2", new_cluster=cluster_spec)
+    cluster3 = jobs.JobCluster(job_cluster_key="cluster3", new_cluster=cluster_spec)
+    cluster4 = jobs.JobCluster(job_cluster_key="cluster4", new_cluster=cluster_spec)
+    job1 = {
+        "settings": {
+            "tasks": [{
+                "task_key": "taskKey1"
+            }, {
+                "task_key": "taskKey2"
+            }],
+            "job_clusters": [cluster1.as_dict(), cluster2.as_dict()],
+            "parameters": [{
+                "name": "param1",
+                "default": "default1"
+            }],
+            "environments": [{
+                "environment_key": "key1"
+            }, {
+                "environment_key": "key2"
+            }]
+        },
+        "next_page_token": "tokenToSecondPage"
+    }
+    job2 = {
+        "settings": {
+            "tasks": [{
+                "task_key": "taskKey3"
+            }, {
+                "task_key": "taskKey4"
+            }],
+            "job_clusters": [cluster3.as_dict(), cluster4.as_dict()],
+            "parameters": [{
+                "name": "param2",
+                "default": "default2"
+            }],
+            "environments": [{
+                "environment_key": "key3"
+            }]
+        },
+        "next_page_token": "tokenToThirdPage"
+    }
+    job3 = {
+        "settings": {
+            "tasks": [{
+                "task_key": "taskKey5"
+            }],
+            "parameters": [{
+                "name": "param3",
+                "default": "default3"
+            }]
+        },
+    }
+
+    requests_mock.get(make_getjob_path_pattern(1337, "initialToken"), text=json.dumps(job1))
+    requests_mock.get(make_getjob_path_pattern(1337, "tokenToSecondPage"), text=json.dumps(job2))
+    requests_mock.get(make_getjob_path_pattern(1337, "tokenToThirdPage"), text=json.dumps(job3))
+    w = WorkspaceClient(config=config)
+
+    job = w.jobs.get(1337, page_token="initialToken")
+
+    assert job.as_dict() == {
+        "settings": {
+            "tasks": [{
+                "task_key": "taskKey1"
+            }, {
+                "task_key": "taskKey2"
+            }, {
+                "task_key": "taskKey3"
+            }, {
+                "task_key": "taskKey4"
+            }, {
+                "task_key": "taskKey5"
+            }],
+            "job_clusters": [cluster1.as_dict(),
+                             cluster2.as_dict(),
+                             cluster3.as_dict(),
+                             cluster4.as_dict()],
+            "parameters": [{
+                "name": "param1",
+                "default": "default1"
+            }, {
+                "name": "param2",
+                "default": "default2"
+            }, {
+                "name": "param3",
+                "default": "default3"
+            }],
+            "environments": [{
+                "environment_key": "key1"
+            }, {
+                "environment_key": "key2"
+            }, {
+                "environment_key": "key3"
+            }]
+        }
+    }
diff --git a/tests/test_model_serving_auth.py b/tests/test_model_serving_auth.py
index 092a3bf16..49aed33a5 100644
--- a/tests/test_model_serving_auth.py
+++ b/tests/test_model_serving_auth.py
@@ -1,8 +1,10 @@
+import threading
 import time
 
 import pytest
 
 from databricks.sdk.core import Config
+from databricks.sdk.credentials_provider import ModelServingUserCredentials
 
 from .conftest import raises
 
@@ -39,7 +41,6 @@ def test_model_serving_auth(env_values, del_env_values, oauth_file_name, monkeyp
     mocker.patch('databricks.sdk.config.Config._known_file_config_loader')
 
     cfg = Config()
-
     assert cfg.auth_type == 'model-serving'
     headers = cfg.authenticate()
     assert (cfg.host == 'x')
@@ -47,15 +48,24 @@ def test_model_serving_auth(env_values, del_env_values, oauth_file_name, monkeyp
     assert headers.get("Authorization") == 'Bearer databricks_sdk_unit_test_token'
 
 
-@pytest.mark.parametrize("env_values, oauth_file_name", [
-    ([], "invalid_file_name"), # Not in Model Serving and Invalid File Name
-    ([('IS_IN_DB_MODEL_SERVING_ENV', 'true')], "invalid_file_name"), # In Model Serving and Invalid File Name
-    ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true')
-      ], "invalid_file_name"), # In Model Serving and Invalid File Name
-    ([], "tests/testdata/model-serving-test-token") # Not in Model Serving and Valid File Name
-])
+@pytest.mark.parametrize(
+    "env_values, oauth_file_name",
+    [
+        ([], "invalid_file_name"), # Not in Model Serving and Invalid File Name
+        ([('IS_IN_DB_MODEL_SERVING_ENV', 'true')
+          ], "invalid_file_name"), # In Model Serving and Invalid File Name
+        ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true')
+          ], "invalid_file_name"), # In Model Serving and Invalid File Name
+        ([], "tests/testdata/model-serving-test-token") # Not in Model Serving and Valid File Name
+    ])
 @raises(default_auth_base_error_message)
 def test_model_serving_auth_errors(env_values, oauth_file_name, monkeypatch):
+    # Guarantee that the tests defaults to env variables rather than config file.
+    #
+    # TODO: this is hacky and we should find a better way to tell the config
+    # that it should not read from the config file.
+    monkeypatch.setenv('DATABRICKS_CONFIG_FILE', 'x')
+
     for (env_name, env_value) in env_values:
         monkeypatch.setenv(env_name, env_value)
     monkeypatch.setattr(
@@ -84,7 +94,6 @@ def test_model_serving_auth_refresh(monkeypatch, mocker):
     assert (cfg.host == 'x')
     assert headers.get(
         "Authorization") == 'Bearer databricks_sdk_unit_test_token' # Token defined in the test file
-
     # Simulate refreshing the token by patching to to a new file
     monkeypatch.setattr(
         "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH",
@@ -104,3 +113,49 @@ def test_model_serving_auth_refresh(monkeypatch, mocker):
     assert (cfg.host == 'x')
     # Read V2 now
     assert headers.get("Authorization") == 'Bearer databricks_sdk_unit_test_token_v2'
+
+
+def test_agent_user_credentials(monkeypatch, mocker):
+    monkeypatch.setenv('IS_IN_DB_MODEL_SERVING_ENV', 'true')
+    monkeypatch.setenv('DB_MODEL_SERVING_HOST_URL', 'x')
+    monkeypatch.setattr(
+        "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH",
+        "tests/testdata/model-serving-test-token")
+
+    invokers_token_val = "databricks_invokers_token"
+    current_thread = threading.current_thread()
+    thread_data = current_thread.__dict__
+    thread_data["invokers_token"] = invokers_token_val
+
+    cfg = Config(credentials_strategy=ModelServingUserCredentials())
+    assert cfg.auth_type == 'model_serving_user_credentials'
+
+    headers = cfg.authenticate()
+
+    assert (cfg.host == 'x')
+    assert headers.get("Authorization") == f'Bearer {invokers_token_val}'
+
+    # Test updates of invokers token
+    invokers_token_val = "databricks_invokers_token_v2"
+    current_thread = threading.current_thread()
+    thread_data = current_thread.__dict__
+    thread_data["invokers_token"] = invokers_token_val
+
+    headers = cfg.authenticate()
+    assert (cfg.host == 'x')
+    assert headers.get("Authorization") == f'Bearer {invokers_token_val}'
+
+
+# If this credential strategy is being used in a non model serving environments then use default credential strategy instead
+def test_agent_user_credentials_in_non_model_serving_environments(monkeypatch):
+
+    monkeypatch.setenv('DATABRICKS_HOST', 'x')
+    monkeypatch.setenv('DATABRICKS_TOKEN', 'token')
+
+    cfg = Config(credentials_strategy=ModelServingUserCredentials())
+    assert cfg.auth_type == 'pat' # Auth type is PAT as it is no longer in a model serving environment
+
+    headers = cfg.authenticate()
+
+    assert (cfg.host == 'https://x')
+    assert headers.get("Authorization") == f'Bearer token'
diff --git a/tests/test_open_ai_mixin.py b/tests/test_open_ai_mixin.py
index 1858c66cb..72e1e9a60 100644
--- a/tests/test_open_ai_mixin.py
+++ b/tests/test_open_ai_mixin.py
@@ -1,8 +1,10 @@
 import sys
+from io import BytesIO
 
 import pytest
 
 from databricks.sdk.core import Config
+from databricks.sdk.service.serving import ExternalFunctionRequestHttpMethod
 
 
 def test_open_ai_client(monkeypatch):
@@ -28,3 +30,22 @@ def test_langchain_open_ai_client(monkeypatch):
 
     assert client.openai_api_base == "https://test_host/serving-endpoints"
     assert client.model_name == "databricks-meta-llama-3-1-70b-instruct"
+
+
+def test_http_request(w, requests_mock):
+    headers = {"Accept": "text/plain", "Content-Type": "application/json", }
+    mocked_url = "http://localhost/api/2.0/external-function"
+    blob_response = BytesIO(b"The request was successful")
+
+    requests_mock.post(mocked_url,
+                       request_headers=headers,
+                       content=blob_response.getvalue(),
+                       status_code=200,
+                       )
+    response = w.serving_endpoints.http_request(conn="test_conn",
+                                                method=ExternalFunctionRequestHttpMethod.GET,
+                                                path="test_path")
+    assert requests_mock.call_count == 1
+    assert requests_mock.called
+    assert response.status_code == 200 # Verify the response status
+    assert (response.text == "The request was successful") # Ensure the response body matches the mocked data
diff --git a/tests/test_user_agent.py b/tests/test_user_agent.py
index 5083d9908..ba6f694f5 100644
--- a/tests/test_user_agent.py
+++ b/tests/test_user_agent.py
@@ -1,3 +1,5 @@
+import os
+
 import pytest
 
 from databricks.sdk.version import __version__
@@ -40,3 +42,45 @@ def test_user_agent_with_partner(user_agent):
     user_agent.with_partner('differenttest')
     assert 'partner/test' in user_agent.to_string()
     assert 'partner/differenttest' in user_agent.to_string()
+
+
+@pytest.fixture(scope="function")
+def clear_cicd():
+    # Save and clear env vars.
+    original_env = os.environ.copy()
+    os.environ.clear()
+
+    # Clear cached CICD provider.
+    from databricks.sdk import useragent
+    useragent._cicd_provider = None
+
+    yield
+
+    # Restore env vars.
+    os.environ = original_env
+
+
+def test_user_agent_cicd_no_provider(clear_cicd):
+    from databricks.sdk import useragent
+    user_agent = useragent.to_string()
+
+    assert 'cicd' not in user_agent
+
+
+def test_user_agent_cicd_one_provider(clear_cicd):
+    os.environ['GITHUB_ACTIONS'] = 'true'
+
+    from databricks.sdk import useragent
+    user_agent = useragent.to_string()
+
+    assert 'cicd/github' in user_agent
+
+
+def test_user_agent_cicd_two_provider(clear_cicd):
+    os.environ['GITHUB_ACTIONS'] = 'true'
+    os.environ['GITLAB_CI'] = 'true'
+
+    from databricks.sdk import useragent
+    user_agent = useragent.to_string()
+
+    assert 'cicd/github' in user_agent

From 2814adf1aa999f69838db9a44449dd969a646604 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Fri, 14 Feb 2025 16:11:58 -0700
Subject: [PATCH 102/136] fix files

---
 databricks/sdk/config.py               | 12 ----
 databricks/sdk/credentials_provider.py | 82 --------------------------
 2 files changed, 94 deletions(-)

diff --git a/databricks/sdk/config.py b/databricks/sdk/config.py
index a556b5988..490c6ba4e 100644
--- a/databricks/sdk/config.py
+++ b/databricks/sdk/config.py
@@ -92,17 +92,6 @@ class Config:
     max_connections_per_pool: int = ConfigAttribute()
     databricks_environment: Optional[DatabricksEnvironment] = None
 
-<<<<<<< HEAD
-    def __init__(self,
-                 *,
-                 # Deprecated. Use credentials_strategy instead.
-                 credentials_provider: Optional[CredentialsStrategy] = None,
-                 credentials_strategy: Optional[CredentialsStrategy] = None,
-                 product=None,
-                 product_version=None,
-                 clock: Optional[Clock] = None,
-                 **kwargs):
-=======
     enable_experimental_files_api_client: bool = ConfigAttribute(
         env='DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT')
     files_api_client_download_max_total_recovers = None
@@ -118,7 +107,6 @@ def __init__(
             product_version=None,
             clock: Optional[Clock] = None,
             **kwargs):
->>>>>>> upstream/main
         self._header_factory = None
         self._inner = {}
         self._user_agent_other_info = []
diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py
index 9a5b0748f..07fb48c5a 100644
--- a/databricks/sdk/credentials_provider.py
+++ b/databricks/sdk/credentials_provider.py
@@ -9,10 +9,7 @@
 import platform
 import subprocess
 import sys
-<<<<<<< HEAD
-=======
 import threading
->>>>>>> upstream/main
 import time
 from datetime import datetime
 from typing import Callable, Dict, List, Optional, Tuple, Union
@@ -192,10 +189,6 @@ def token() -> Token:
 def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
     if cfg.auth_type != 'external-browser':
         return None
-<<<<<<< HEAD
-=======
-
->>>>>>> upstream/main
     client_id, client_secret = None, None
     if cfg.client_id:
         client_id = cfg.client_id
@@ -203,20 +196,11 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
     elif cfg.azure_client_id:
         client_id = cfg.azure_client
         client_secret = cfg.azure_client_secret
-<<<<<<< HEAD
-
-    if not client_id:
-        client_id = 'databricks-cli'
-
-    # Load cached credentials from disk if they exist.
-    # Note that these are local to the Python SDK and not reused by other SDKs.
-=======
     if not client_id:
         client_id = 'databricks-cli'
 
     # Load cached credentials from disk if they exist. Note that these are
     # local to the Python SDK and not reused by other SDKs.
->>>>>>> upstream/main
     oidc_endpoints = cfg.oidc_endpoints
     redirect_url = 'http://localhost:8020'
     token_cache = TokenCache(host=cfg.host,
@@ -226,19 +210,6 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
                              redirect_url=redirect_url)
     credentials = token_cache.load()
     if credentials:
-<<<<<<< HEAD
-        # Force a refresh in case the loaded credentials are expired.
-        credentials.token()
-    else:
-        oauth_client = OAuthClient(oidc_endpoints=oidc_endpoints,
-                                   client_id=client_id,
-                                   redirect_url=redirect_url,
-                                   client_secret=client_secret)
-        consent = oauth_client.initiate_consent()
-        if not consent:
-            return None
-        credentials = consent.launch_external_browser()
-=======
         try:
             # Pro-actively refresh the loaded credentials. This is done
             # to detect if the token is expired and needs to be refreshed
@@ -258,7 +229,6 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
         return None
 
     credentials = consent.launch_external_browser()
->>>>>>> upstream/main
     token_cache.save(credentials)
     return credentials(cfg)
 
@@ -753,16 +723,6 @@ def inner() -> Dict[str, str]:
 # This Code is derived from Mlflow DatabricksModelServingConfigProvider
 # https://github.com/mlflow/mlflow/blob/1219e3ef1aac7d337a618a352cd859b336cf5c81/mlflow/legacy_databricks_cli/configure/provider.py#L332
 class ModelServingAuthProvider():
-<<<<<<< HEAD
-    _MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH = "/var/credentials-secret/model-dependencies-oauth-token"
-
-    def __init__(self):
-        self.expiry_time = -1
-        self.current_token = None
-        self.refresh_duration = 300 # 300 Seconds
-
-    def should_fetch_model_serving_environment_oauth(self) -> bool:
-=======
     USER_CREDENTIALS = "user_credentials"
 
     _MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH = "/var/credentials-secret/model-dependencies-oauth-token"
@@ -774,7 +734,6 @@ def __init__(self, credential_type: Optional[str]):
         self.credential_type = credential_type
 
     def should_fetch_model_serving_environment_oauth() -> bool:
->>>>>>> upstream/main
         """
         Check whether this is the model serving environment
         Additionally check if the oauth token file path exists
@@ -783,25 +742,15 @@ def should_fetch_model_serving_environment_oauth() -> bool:
         is_in_model_serving_env = (os.environ.get("IS_IN_DB_MODEL_SERVING_ENV")
                                    or os.environ.get("IS_IN_DATABRICKS_MODEL_SERVING_ENV") or "false")
         return (is_in_model_serving_env == "true"
-<<<<<<< HEAD
-                and os.path.isfile(self._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH))
-
-    def get_model_dependency_oauth_token(self, should_retry=True) -> str:
-=======
                 and os.path.isfile(ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH))
 
     def _get_model_dependency_oauth_token(self, should_retry=True) -> str:
->>>>>>> upstream/main
         # Use Cached value if it is valid
         if self.current_token is not None and self.expiry_time > time.time():
             return self.current_token
 
         try:
-<<<<<<< HEAD
-            with open(self._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH) as f:
-=======
             with open(ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH) as f:
->>>>>>> upstream/main
                 oauth_dict = json.load(f)
                 self.current_token = oauth_dict["OAUTH_TOKEN"][0]["oauthTokenValue"]
                 self.expiry_time = time.time() + self.refresh_duration
@@ -811,21 +760,13 @@ def _get_model_dependency_oauth_token(self, should_retry=True) -> str:
                 logger.warning("Unable to read oauth token on first attmept in Model Serving Environment",
                                exc_info=e)
                 time.sleep(0.5)
-<<<<<<< HEAD
-                return self.get_model_dependency_oauth_token(should_retry=False)
-=======
                 return self._get_model_dependency_oauth_token(should_retry=False)
->>>>>>> upstream/main
             else:
                 raise RuntimeError(
                     "Unable to read OAuth credentials from the file mounted in Databricks Model Serving"
                 ) from e
         return self.current_token
 
-<<<<<<< HEAD
-    def get_databricks_host_token(self) -> Optional[Tuple[str, str]]:
-        if not self.should_fetch_model_serving_environment_oauth():
-=======
     def _get_invokers_token(self):
         current_thread = threading.current_thread()
         thread_data = current_thread.__dict__
@@ -840,26 +781,11 @@ def _get_invokers_token(self):
 
     def get_databricks_host_token(self) -> Optional[Tuple[str, str]]:
         if not ModelServingAuthProvider.should_fetch_model_serving_environment_oauth():
->>>>>>> upstream/main
             return None
 
         # read from DB_MODEL_SERVING_HOST_ENV_VAR if available otherwise MODEL_SERVING_HOST_ENV_VAR
         host = os.environ.get("DATABRICKS_MODEL_SERVING_HOST_URL") or os.environ.get(
             "DB_MODEL_SERVING_HOST_URL")
-<<<<<<< HEAD
-        token = self.get_model_dependency_oauth_token()
-
-        return (host, token)
-
-
-@credentials_strategy('model-serving', [])
-def model_serving_auth(cfg: 'Config') -> Optional[CredentialsProvider]:
-    try:
-        model_serving_auth_provider = ModelServingAuthProvider()
-        if not model_serving_auth_provider.should_fetch_model_serving_environment_oauth():
-            logger.debug("model-serving: Not in Databricks Model Serving, skipping")
-            return None
-=======
 
         if self.credential_type == ModelServingAuthProvider.USER_CREDENTIALS:
             return (host, self._get_invokers_token())
@@ -871,7 +797,6 @@ def model_serving_auth_visitor(cfg: 'Config',
                                credential_type: Optional[str] = None) -> Optional[CredentialsProvider]:
     try:
         model_serving_auth_provider = ModelServingAuthProvider(credential_type)
->>>>>>> upstream/main
         host, token = model_serving_auth_provider.get_databricks_host_token()
         if token is None:
             raise ValueError(
@@ -882,10 +807,6 @@ def model_serving_auth_visitor(cfg: 'Config',
     except Exception as e:
         logger.warning("Unable to get auth from Databricks Model Serving Environment", exc_info=e)
         return None
-<<<<<<< HEAD
-
-=======
->>>>>>> upstream/main
     logger.info("Using Databricks Model Serving Authentication")
 
     def inner() -> Dict[str, str]:
@@ -896,8 +817,6 @@ def inner() -> Dict[str, str]:
     return inner
 
 
-<<<<<<< HEAD
-=======
 @credentials_strategy('model-serving', [])
 def model_serving_auth(cfg: 'Config') -> Optional[CredentialsProvider]:
     if not ModelServingAuthProvider.should_fetch_model_serving_environment_oauth():
@@ -907,7 +826,6 @@ def model_serving_auth(cfg: 'Config') -> Optional[CredentialsProvider]:
     return model_serving_auth_visitor(cfg)
 
 
->>>>>>> upstream/main
 class DefaultCredentials:
     """ Select the first applicable credential provider from the chain """
 

From f0e962c64bd097aa8bfca7bbc0af8b77d901a596 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Fri, 14 Feb 2025 17:08:14 -0700
Subject: [PATCH 103/136] missing termination reason code

---
 databricks/sdk/service/compute.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py
index 63a971b73..d4596e63e 100755
--- a/databricks/sdk/service/compute.py
+++ b/databricks/sdk/service/compute.py
@@ -7616,6 +7616,8 @@ class TerminationReasonCode(Enum):
     INSTANCE_UNREACHABLE = 'INSTANCE_UNREACHABLE'
     INTERNAL_ERROR = 'INTERNAL_ERROR'
     INVALID_ARGUMENT = 'INVALID_ARGUMENT'
+    # [PROD-2800] Add missing termination reason code
+    INVALID_INSTANCE_PLACEMENT_PROTOCOL = 'INVALID_INSTANCE_PLACEMENT_PROTOCOL'
     INVALID_SPARK_IMAGE = 'INVALID_SPARK_IMAGE'
     IP_EXHAUSTION_FAILURE = 'IP_EXHAUSTION_FAILURE'
     JOB_FINISHED = 'JOB_FINISHED'

From ee90a155ec674ac56a27cdad25b7f6255743a34f Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Fri, 14 Feb 2025 17:18:53 -0700
Subject: [PATCH 104/136] delete tests

---
 .github/workflows/integration-tests.yml | 90 -------------------------
 .github/workflows/message.yml           | 32 ---------
 2 files changed, 122 deletions(-)
 delete mode 100644 .github/workflows/integration-tests.yml
 delete mode 100644 .github/workflows/message.yml

diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml
deleted file mode 100644
index c308cc03c..000000000
--- a/.github/workflows/integration-tests.yml
+++ /dev/null
@@ -1,90 +0,0 @@
-name: Integration Tests
-
-on:
-
-  pull_request:
-    types: [opened, synchronize]
-
-  merge_group:
-
-
-jobs:
-  check-token:
-    name: Check secrets access
-
-    runs-on:
-      group: databricks-deco-testing-runner-group
-      labels: ubuntu-latest-deco
-
-    environment: "test-trigger-is"
-    outputs:
-      has_token: ${{ steps.set-token-status.outputs.has_token }}
-    steps:
-      - name: Check if DECO_WORKFLOW_TRIGGER_APP_ID is set
-        id: set-token-status
-        run: |
-            if [ -z "${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}" ]; then
-              echo "DECO_WORKFLOW_TRIGGER_APP_ID is empty. User has no access to secrets."
-              echo "::set-output name=has_token::false"
-            else
-              echo "DECO_WORKFLOW_TRIGGER_APP_ID is set. User has access to secrets."
-              echo "::set-output name=has_token::true"
-            fi
-
-  trigger-tests:
-    name: Trigger Tests
-
-    runs-on:
-      group: databricks-deco-testing-runner-group
-      labels: ubuntu-latest-deco
-
-    needs: check-token
-    if: github.event_name == 'pull_request'  && needs.check-token.outputs.has_token == 'true'
-    environment: "test-trigger-is"
-
-    steps:
-    - uses: actions/checkout@v3
-
-    - name: Generate GitHub App Token
-      id: generate-token
-      uses: actions/create-github-app-token@v1
-      with:
-        app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
-        private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
-        owner: ${{ secrets.ORG_NAME }}
-        repositories: ${{secrets.REPO_NAME}}
-
-    - name: Trigger Workflow in Another Repo
-      env:
-        GH_TOKEN: ${{ steps.generate-token.outputs.token }}
-      run: |
-        gh workflow run sdk-py-isolated-pr.yml -R ${{ secrets.ORG_NAME }}/${{secrets.REPO_NAME}} \
-        --ref main \
-        -f pull_request_number=${{ github.event.pull_request.number }} \
-        -f commit_sha=${{ github.event.pull_request.head.sha }}
-
-  # Statuses and checks apply to specific commits (by hash).
-  # Enforcement of required checks is done both at the PR level and the merge queue level.
-  # In case of multiple commits in a single PR, the hash of the squashed commit
-  # will not match the one for the latest (approved) commit in the PR.
-  # We auto approve the check for the merge queue for two reasons:
-  # * Queue times out due to duration of tests.
-  # * Avoid running integration tests twice, since it was already run at the tip of the branch before squashing.
-  auto-approve:
-    if: github.event_name == 'merge_group'
-
-    runs-on:
-      group: databricks-deco-testing-runner-group
-      labels: ubuntu-latest-deco
-
-    steps:
-      - name: Mark Check
-        env:
-          GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-        shell: bash
-        run: |
-            gh api -X POST -H "Accept: application/vnd.github+json" \
-              -H "X-GitHub-Api-Version: 2022-11-28" \
-              /repos/${{ github.repository }}/statuses/${{ github.sha }} \
-              -f 'state=success' \
-              -f 'context=Integration Tests Check'
diff --git a/.github/workflows/message.yml b/.github/workflows/message.yml
deleted file mode 100644
index 057556895..000000000
--- a/.github/workflows/message.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-name: Validate Commit Message
-
-on:
-  pull_request:
-    types: [opened, synchronize, edited]
-  merge_group:
-    types: [checks_requested]
-
-jobs:
-  validate:
-    runs-on: ubuntu-latest
-    # GitHub required checks are shared between PRs and the Merge Queue.
-    # Since there is no PR title on Merge Queue, we need to trigger and
-    # skip this test for Merge Queue to succeed.
-    if: github.event_name == 'pull_request'
-    steps:
-      - name: Checkout
-        uses: actions/checkout@v3
-        with:
-          fetch-depth: 0
-
-      - name: Validate Tag
-        env:
-          TITLE: ${{ github.event.pull_request.title }}
-        run: |
-          TAG=$(echo "$TITLE" | sed -ne 's/\[\(.*\)\].*/\1/p')
-          if grep -q "tag: \"\[$TAG\]\"" .codegen/changelog_config.yml; then 
-            echo "Valid tag found: [$TAG]"
-          else 
-            echo "Invalid or missing tag in commit message: [$TAG]" 
-            exit 1
-          fi
\ No newline at end of file

From cb5925c489a12e59c1c8e40e380c6b3da6434e51 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Fri, 14 Feb 2025 18:13:18 -0700
Subject: [PATCH 105/136] fix merge issues

---
 databricks/sdk/_base_client.py               |   1 +
 databricks/sdk/credentials_provider.py       |   1 +
 databricks/sdk/service/billing.py            |   6 +-
 databricks/sdk/service/dashboards.py         | 187 -------------------
 databricks/sdk/service/iam.py                |   4 +-
 databricks/sdk/service/jobs.py               |   5 -
 databricks/sdk/service/serving.py            |  21 ---
 databricks/sdk/service/sql.py                | 138 +++++++++-----
 docs/account/iam/workspace_assignment.rst    |   2 +-
 docs/workspace/dashboards/index.rst          |   2 +-
 docs/workspace/serving/serving_endpoints.rst |   3 -
 tests/test_open_ai_mixin.py                  |   2 +-
 12 files changed, 97 insertions(+), 275 deletions(-)

diff --git a/databricks/sdk/_base_client.py b/databricks/sdk/_base_client.py
index 58fcb10a5..f0950f656 100644
--- a/databricks/sdk/_base_client.py
+++ b/databricks/sdk/_base_client.py
@@ -276,6 +276,7 @@ def _perform(self,
         error = self._error_parser.get_api_error(response)
         if error is not None:
             raise error from None
+
         return response
 
     def _record_request_log(self, response: requests.Response, raw: bool = False) -> None:
diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py
index 07fb48c5a..24d01f678 100644
--- a/databricks/sdk/credentials_provider.py
+++ b/databricks/sdk/credentials_provider.py
@@ -189,6 +189,7 @@ def token() -> Token:
 def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
     if cfg.auth_type != 'external-browser':
         return None
+
     client_id, client_secret = None, None
     if cfg.client_id:
         client_id = cfg.client_id
diff --git a/databricks/sdk/service/billing.py b/databricks/sdk/service/billing.py
index e23e676fe..dd2579921 100755
--- a/databricks/sdk/service/billing.py
+++ b/databricks/sdk/service/billing.py
@@ -753,7 +753,7 @@ def from_dict(cls, d: Dict[str, any]) -> DeleteBudgetConfigurationResponse:
 class DeleteResponse:
 
     def as_dict(self) -> dict:
-        """Serializes the DeleteBudgetConfigurationResponse into a dictionary suitable for use as a JSON request body."""
+        """Serializes the DeleteResponse into a dictionary suitable for use as a JSON request body."""
         body = {}
         return body
 
@@ -763,8 +763,8 @@ def as_shallow_dict(self) -> dict:
         return body
 
     @classmethod
-    def from_dict(cls, d: Dict[str, any]) -> DeleteBudgetConfigurationResponse:
-        """Deserializes the DeleteBudgetConfigurationResponse from a dictionary."""
+    def from_dict(cls, d: Dict[str, any]) -> DeleteResponse:
+        """Deserializes the DeleteResponse from a dictionary."""
         return cls()
 
 
diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py
index c81159cca..ba01ba41d 100755
--- a/databricks/sdk/service/dashboards.py
+++ b/databricks/sdk/service/dashboards.py
@@ -1804,193 +1804,6 @@ def start_conversation_and_wait(self, space_id: str, content: str,
         return self.start_conversation(content=content, space_id=space_id).result(timeout=timeout)
 
 
-class GenieAPI:
-    """Genie provides a no-code experience for business users, powered by AI/BI. Analysts set up spaces that
-    business users can use to ask questions using natural language. Genie uses data registered to Unity
-    Catalog and requires at least CAN USE permission on a Pro or Serverless SQL warehouse. Also, Databricks
-    Assistant must be enabled."""
-
-    def __init__(self, api_client):
-        self._api = api_client
-
-    def wait_get_message_genie_completed(
-            self,
-            conversation_id: str,
-            message_id: str,
-            space_id: str,
-            timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[GenieMessage], None]] = None) -> GenieMessage:
-        deadline = time.time() + timeout.total_seconds()
-        target_states = (MessageStatus.COMPLETED, )
-        failure_states = (MessageStatus.FAILED, )
-        status_message = 'polling...'
-        attempt = 1
-        while time.time() < deadline:
-            poll = self.get_message(conversation_id=conversation_id, message_id=message_id, space_id=space_id)
-            status = poll.status
-            status_message = f'current status: {status}'
-            if status in target_states:
-                return poll
-            if callback:
-                callback(poll)
-            if status in failure_states:
-                msg = f'failed to reach COMPLETED, got {status}: {status_message}'
-                raise OperationFailed(msg)
-            prefix = f"conversation_id={conversation_id}, message_id={message_id}, space_id={space_id}"
-            sleep = attempt
-            if sleep > 10:
-                # sleep 10s max per attempt
-                sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
-            time.sleep(sleep + random.random())
-            attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
-
-    def create_message(self, space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage]:
-        """Create conversation message.
-        
-        Create new message in [conversation](:method:genie/startconversation). The AI response uses all
-        previously created messages in the conversation to respond.
-        
-        :param space_id: str
-          The ID associated with the Genie space where the conversation is started.
-        :param conversation_id: str
-          The ID associated with the conversation.
-        :param content: str
-          User message content.
-        
-        :returns:
-          Long-running operation waiter for :class:`GenieMessage`.
-          See :method:wait_get_message_genie_completed for more details.
-        """
-        body = {}
-        if content is not None: body['content'] = content
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do(
-            'POST',
-            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages',
-            body=body,
-            headers=headers)
-        return Wait(self.wait_get_message_genie_completed,
-                    response=GenieMessage.from_dict(op_response),
-                    conversation_id=conversation_id,
-                    message_id=op_response['id'],
-                    space_id=space_id)
-
-    def create_message_and_wait(self,
-                                space_id: str,
-                                conversation_id: str,
-                                content: str,
-                                timeout=timedelta(minutes=20)) -> GenieMessage:
-        return self.create_message(content=content, conversation_id=conversation_id,
-                                   space_id=space_id).result(timeout=timeout)
-
-    def execute_message_query(self, space_id: str, conversation_id: str,
-                              message_id: str) -> GenieGetMessageQueryResultResponse:
-        """Execute SQL query in a conversation message.
-        
-        Execute the SQL query in the message.
-        
-        :param space_id: str
-          Genie space ID
-        :param conversation_id: str
-          Conversation ID
-        :param message_id: str
-          Message ID
-        
-        :returns: :class:`GenieGetMessageQueryResultResponse`
-        """
-
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do(
-            'POST',
-            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/execute-query',
-            headers=headers)
-        return GenieGetMessageQueryResultResponse.from_dict(res)
-
-    def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage:
-        """Get conversation message.
-        
-        Get message from conversation.
-        
-        :param space_id: str
-          The ID associated with the Genie space where the target conversation is located.
-        :param conversation_id: str
-          The ID associated with the target conversation.
-        :param message_id: str
-          The ID associated with the target message from the identified conversation.
-        
-        :returns: :class:`GenieMessage`
-        """
-
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do(
-            'GET',
-            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}',
-            headers=headers)
-        return GenieMessage.from_dict(res)
-
-    def get_message_query_result(self, space_id: str, conversation_id: str,
-                                 message_id: str) -> GenieGetMessageQueryResultResponse:
-        """Get conversation message SQL query result.
-        
-        Get the result of SQL query if the message has a query attachment. This is only available if a message
-        has a query attachment and the message status is `EXECUTING_QUERY`.
-        
-        :param space_id: str
-          Genie space ID
-        :param conversation_id: str
-          Conversation ID
-        :param message_id: str
-          Message ID
-        
-        :returns: :class:`GenieGetMessageQueryResultResponse`
-        """
-
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do(
-            'GET',
-            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result',
-            headers=headers)
-        return GenieGetMessageQueryResultResponse.from_dict(res)
-
-    def start_conversation(self, space_id: str, content: str) -> Wait[GenieMessage]:
-        """Start conversation.
-        
-        Start a new conversation.
-        
-        :param space_id: str
-          The ID associated with the Genie space where you want to start a conversation.
-        :param content: str
-          The text of the message that starts the conversation.
-        
-        :returns:
-          Long-running operation waiter for :class:`GenieMessage`.
-          See :method:wait_get_message_genie_completed for more details.
-        """
-        body = {}
-        if content is not None: body['content'] = content
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST',
-                                   f'/api/2.0/genie/spaces/{space_id}/start-conversation',
-                                   body=body,
-                                   headers=headers)
-        return Wait(self.wait_get_message_genie_completed,
-                    response=GenieStartConversationResponse.from_dict(op_response),
-                    conversation_id=op_response['conversation_id'],
-                    message_id=op_response['message_id'],
-                    space_id=space_id)
-
-    def start_conversation_and_wait(self, space_id: str, content: str,
-                                    timeout=timedelta(minutes=20)) -> GenieMessage:
-        return self.start_conversation(content=content, space_id=space_id).result(timeout=timeout)
-
-
 class LakeviewAPI:
     """These APIs provide specific management operations for Lakeview dashboards. Generic resource management can
     be done with Workspace API (import, export, get-status, list, delete)."""
diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py
index b841bec8b..2f752d06c 100755
--- a/databricks/sdk/service/iam.py
+++ b/databricks/sdk/service/iam.py
@@ -1588,7 +1588,7 @@ class UpdateWorkspaceAssignments:
     """The ID of the user, service principal, or group."""
 
     workspace_id: Optional[int] = None
-    """The workspace ID for the account."""
+    """The workspace ID."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateWorkspaceAssignments into a dictionary suitable for use as a JSON request body."""
@@ -3894,7 +3894,7 @@ def update(self,
         specified principal.
         
         :param workspace_id: int
-          The workspace ID for the account.
+          The workspace ID.
         :param principal_id: int
           The ID of the user, service principal, or group.
         :param permissions: List[:class:`WorkspacePermission`] (optional)
diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index 8220a0715..c0d4240bf 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -3861,9 +3861,6 @@ class Run:
     overriding_parameters: Optional[RunParameters] = None
     """The parameters used for this run."""
 
-    prev_page_token: Optional[str] = None
-    """A token that can be used to list the previous page of sub-resources."""
-
     queue_duration: Optional[int] = None
     """The time in milliseconds that the run has spent in the queue."""
 
@@ -3956,7 +3953,6 @@ def as_dict(self) -> dict:
         if self.original_attempt_run_id is not None:
             body['original_attempt_run_id'] = self.original_attempt_run_id
         if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters.as_dict()
-        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
         if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
         if self.repair_history: body['repair_history'] = [v.as_dict() for v in self.repair_history]
         if self.run_duration is not None: body['run_duration'] = self.run_duration
@@ -4039,7 +4035,6 @@ def from_dict(cls, d: Dict[str, any]) -> Run:
                    number_in_job=d.get('number_in_job', None),
                    original_attempt_run_id=d.get('original_attempt_run_id', None),
                    overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters),
-                   prev_page_token=d.get('prev_page_token', None),
                    queue_duration=d.get('queue_duration', None),
                    repair_history=_repeated_dict(d, 'repair_history', RepairHistoryItem),
                    run_duration=d.get('run_duration', None),
diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py
index c10e43572..938445863 100755
--- a/databricks/sdk/service/serving.py
+++ b/databricks/sdk/service/serving.py
@@ -657,10 +657,6 @@ class CreateServingEndpoint:
     config: Optional[EndpointCoreConfigInput] = None
     """The core config of the serving endpoint."""
 
-    ai_gateway: Optional[AiGatewayConfig] = None
-    """The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
-    supported as of now."""
-
     rate_limits: Optional[List[RateLimit]] = None
     """Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
     Gateway to manage rate limits."""
@@ -2554,12 +2550,6 @@ class ServedModelInput:
     min_provisioned_throughput: Optional[int] = None
     """The minimum tokens per second that the endpoint can scale down to."""
 
-    max_provisioned_throughput: Optional[int] = None
-    """The maximum tokens per second that the endpoint can scale up to."""
-
-    min_provisioned_throughput: Optional[int] = None
-    """The minimum tokens per second that the endpoint can scale down to."""
-
     name: Optional[str] = None
     """The name of a served entity. It must be unique across an endpoint. A served entity name can
     consist of alphanumeric characters, dashes, and underscores. If not specified for an external
@@ -2574,14 +2564,6 @@ class ServedModelInput:
     scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size
     is 0."""
 
-    workload_size: Optional[ServedModelInputWorkloadSize] = None
-    """The workload size of the served model. The workload size corresponds to a range of provisioned
-    concurrency that the compute will autoscale between. A single unit of provisioned concurrency
-    can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned
-    concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned
-    concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for
-    each workload size will be 0."""
-
     workload_type: Optional[ServedModelInputWorkloadType] = None
     """The workload type of the served entity. The workload type selects which type of compute to use
     in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU
@@ -3431,9 +3413,6 @@ def create(self,
           throughput endpoints are currently supported.
         :param config: :class:`EndpointCoreConfigInput` (optional)
           The core config of the serving endpoint.
-        :param ai_gateway: :class:`AiGatewayConfig` (optional)
-          The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
-          supported as of now.
         :param rate_limits: List[:class:`RateLimit`] (optional)
           Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
           Gateway to manage rate limits.
diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py
index 059b744ef..bc3c03d31 100755
--- a/databricks/sdk/service/sql.py
+++ b/databricks/sdk/service/sql.py
@@ -3078,49 +3078,74 @@ class LegacyQuery:
     can_edit: Optional[bool] = None
     """Describes whether the authenticated user is allowed to edit the definition of this query."""
 
-    catalog: Optional[str] = None
-    """Name of the catalog where this query will be executed."""
+    created_at: Optional[str] = None
+    """The timestamp when this query was created."""
 
-    create_time: Optional[str] = None
-    """Timestamp when this query was created."""
+    data_source_id: Optional[str] = None
+    """Data source ID maps to the ID of the data source used by the resource and is distinct from the
+    warehouse ID. [Learn more]
+    
+    [Learn more]: https://docs.databricks.com/api/workspace/datasources/list"""
 
     description: Optional[str] = None
     """General description that conveys additional information about this query such as usage notes."""
 
-    display_name: Optional[str] = None
-    """Display name of the query that appears in list views, widget headings, and on the query page."""
-
     id: Optional[str] = None
-    """UUID identifying the query."""
+    """Query ID."""
 
-    last_modifier_user_name: Optional[str] = None
-    """Username of the user who last saved changes to this query."""
+    is_archived: Optional[bool] = None
+    """Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear
+    in search results. If this boolean is `true`, the `options` property for this query includes a
+    `moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days."""
 
-    lifecycle_state: Optional[LifecycleState] = None
-    """Indicates whether the query is trashed."""
+    is_draft: Optional[bool] = None
+    """Whether the query is a draft. Draft queries only appear in list views for their owners.
+    Visualizations from draft queries cannot appear on dashboards."""
 
-    owner_user_name: Optional[str] = None
-    """Username of the user that owns the query."""
+    is_favorite: Optional[bool] = None
+    """Whether this query object appears in the current user's favorites list. This flag determines
+    whether the star icon for favorites is selected."""
 
-    parameters: Optional[List[QueryParameter]] = None
-    """List of query parameter definitions."""
+    is_safe: Optional[bool] = None
+    """Text parameter types are not safe from SQL injection for all types of data source. Set this
+    Boolean parameter to `true` if a query either does not use any text type parameters or uses a
+    data source type where text type parameters are handled safely."""
 
-    parent_path: Optional[str] = None
-    """Workspace path of the workspace folder containing the object."""
+    last_modified_by: Optional[User] = None
 
-    query_text: Optional[str] = None
-    """Text of the query to be run."""
+    last_modified_by_id: Optional[int] = None
+    """The ID of the user who last saved changes to this query."""
 
-    run_as_mode: Optional[RunAsMode] = None
-    """Sets the "Run as" role for the object."""
+    latest_query_data_id: Optional[str] = None
+    """If there is a cached result for this query and user, this field includes the query result ID. If
+    this query uses parameters, this field is always null."""
 
-    schema: Optional[str] = None
-    """Name of the schema where this query will be executed."""
+    name: Optional[str] = None
+    """The title of this query that appears in list views, widget headings, and on the query page."""
+
+    options: Optional[QueryOptions] = None
+
+    parent: Optional[str] = None
+    """The identifier of the workspace folder containing the object."""
+
+    permission_tier: Optional[PermissionLevel] = None
+    """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query
+    * `CAN_MANAGE`: Can manage the query"""
+
+    query: Optional[str] = None
+    """The text of the query to be run."""
+
+    query_hash: Optional[str] = None
+    """A SHA-256 hash of the query text along with the authenticated user ID."""
+
+    run_as_role: Optional[RunAsRole] = None
+    """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
+    viewer" behavior) or `"owner"` (signifying "run as owner" behavior)"""
 
     tags: Optional[List[str]] = None
 
-    update_time: Optional[str] = None
-    """Timestamp when this query was last updated."""
+    updated_at: Optional[str] = None
+    """The timestamp at which this query was last updated."""
 
     user: Optional[User] = None
 
@@ -3132,24 +3157,30 @@ class LegacyQuery:
     def as_dict(self) -> dict:
         """Serializes the LegacyQuery into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.can_edit is not None: body['can_edit'] = self.can_edit
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
         if self.description is not None: body['description'] = self.description
-        if self.display_name is not None: body['display_name'] = self.display_name
         if self.id is not None: body['id'] = self.id
-        if self.last_modifier_user_name is not None:
-            body['last_modifier_user_name'] = self.last_modifier_user_name
-        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
-        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
-        if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
-        if self.parent_path is not None: body['parent_path'] = self.parent_path
-        if self.query_text is not None: body['query_text'] = self.query_text
-        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
-        if self.schema is not None: body['schema'] = self.schema
+        if self.is_archived is not None: body['is_archived'] = self.is_archived
+        if self.is_draft is not None: body['is_draft'] = self.is_draft
+        if self.is_favorite is not None: body['is_favorite'] = self.is_favorite
+        if self.is_safe is not None: body['is_safe'] = self.is_safe
+        if self.last_modified_by: body['last_modified_by'] = self.last_modified_by.as_dict()
+        if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id
+        if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options.as_dict()
+        if self.parent is not None: body['parent'] = self.parent
+        if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value
+        if self.query is not None: body['query'] = self.query
+        if self.query_hash is not None: body['query_hash'] = self.query_hash
+        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value
         if self.tags: body['tags'] = [v for v in self.tags]
-        if self.update_time is not None: body['update_time'] = self.update_time
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.user: body['user'] = self.user.as_dict()
+        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations]
         return body
 
     def as_shallow_dict(self) -> dict:
@@ -3188,16 +3219,21 @@ def from_dict(cls, d: Dict[str, any]) -> LegacyQuery:
                    created_at=d.get('created_at', None),
                    data_source_id=d.get('data_source_id', None),
                    description=d.get('description', None),
-                   display_name=d.get('display_name', None),
                    id=d.get('id', None),
-                   last_modifier_user_name=d.get('last_modifier_user_name', None),
-                   lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
-                   owner_user_name=d.get('owner_user_name', None),
-                   parameters=_repeated_dict(d, 'parameters', QueryParameter),
-                   parent_path=d.get('parent_path', None),
-                   query_text=d.get('query_text', None),
-                   run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
-                   schema=d.get('schema', None),
+                   is_archived=d.get('is_archived', None),
+                   is_draft=d.get('is_draft', None),
+                   is_favorite=d.get('is_favorite', None),
+                   is_safe=d.get('is_safe', None),
+                   last_modified_by=_from_dict(d, 'last_modified_by', User),
+                   last_modified_by_id=d.get('last_modified_by_id', None),
+                   latest_query_data_id=d.get('latest_query_data_id', None),
+                   name=d.get('name', None),
+                   options=_from_dict(d, 'options', QueryOptions),
+                   parent=d.get('parent', None),
+                   permission_tier=_enum(d, 'permission_tier', PermissionLevel),
+                   query=d.get('query', None),
+                   query_hash=d.get('query_hash', None),
+                   run_as_role=_enum(d, 'run_as_role', RunAsRole),
                    tags=d.get('tags', None),
                    updated_at=d.get('updated_at', None),
                    user=_from_dict(d, 'user', User),
@@ -8666,4 +8702,4 @@ def update_permissions(self,
                            f'/api/2.0/permissions/warehouses/{warehouse_id}',
                            body=body,
                            headers=headers)
-        return WarehousePermissions.from_dict(res)
+        return WarehousePermissions.from_dict(res)
\ No newline at end of file
diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst
index 6230b8199..697f0a5da 100644
--- a/docs/account/iam/workspace_assignment.rst
+++ b/docs/account/iam/workspace_assignment.rst
@@ -92,7 +92,7 @@
         specified principal.
         
         :param workspace_id: int
-          The workspace ID for the account.
+          The workspace ID.
         :param principal_id: int
           The ID of the user, service principal, or group.
         :param permissions: List[:class:`WorkspacePermission`] (optional)
diff --git a/docs/workspace/dashboards/index.rst b/docs/workspace/dashboards/index.rst
index acea442bb..940efa5dd 100644
--- a/docs/workspace/dashboards/index.rst
+++ b/docs/workspace/dashboards/index.rst
@@ -10,4 +10,4 @@ Manage Lakeview dashboards
    genie
    lakeview
    lakeview_embedded
-   query_execution
+   query_execution
\ No newline at end of file
diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst
index f6bfe82f4..687976f5d 100644
--- a/docs/workspace/serving/serving_endpoints.rst
+++ b/docs/workspace/serving/serving_endpoints.rst
@@ -41,9 +41,6 @@
           throughput endpoints are currently supported.
         :param config: :class:`EndpointCoreConfigInput` (optional)
           The core config of the serving endpoint.
-        :param ai_gateway: :class:`AiGatewayConfig` (optional)
-          The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
-          supported as of now.
         :param rate_limits: List[:class:`RateLimit`] (optional)
           Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
           Gateway to manage rate limits.
diff --git a/tests/test_open_ai_mixin.py b/tests/test_open_ai_mixin.py
index 72e1e9a60..e503da073 100644
--- a/tests/test_open_ai_mixin.py
+++ b/tests/test_open_ai_mixin.py
@@ -48,4 +48,4 @@ def test_http_request(w, requests_mock):
     assert requests_mock.call_count == 1
     assert requests_mock.called
     assert response.status_code == 200 # Verify the response status
-    assert (response.text == "The request was successful") # Ensure the response body matches the mocked data
+    assert (response.text == "The request was successful") # Ensure the response body matches the mocked data
\ No newline at end of file

From f268fa33276c15a9d94333e257643823fd592a39 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Fri, 14 Feb 2025 18:17:25 -0700
Subject: [PATCH 106/136] more diff

---
 .codegen/_openapi_sha         | 2 +-
 databricks/sdk/service/sql.py | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 2a9a021e0..562b72fcc 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-99f644e72261ef5ecf8d74db20f4b7a1e09723cc
+99f644e72261ef5ecf8d74db20f4b7a1e09723cc
\ No newline at end of file
diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py
index bc3c03d31..cfa94aaa7 100755
--- a/databricks/sdk/service/sql.py
+++ b/databricks/sdk/service/sql.py
@@ -8702,4 +8702,4 @@ def update_permissions(self,
                            f'/api/2.0/permissions/warehouses/{warehouse_id}',
                            body=body,
                            headers=headers)
-        return WarehousePermissions.from_dict(res)
\ No newline at end of file
+        return WarehousePermissions.from_dict(res)

From c19c4a5c60c40cdf2eeeee30ce755fc70d0095c7 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Sat, 15 Feb 2025 13:01:31 -0700
Subject: [PATCH 107/136] update push workflow

---
 .github/workflows/push.yml | 12 +-----------
 1 file changed, 1 insertion(+), 11 deletions(-)

diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
index a839096c0..4f8881465 100644
--- a/.github/workflows/push.yml
+++ b/.github/workflows/push.yml
@@ -10,7 +10,7 @@ on:
       - main
 
 jobs:
-  tests-ubuntu:
+  tests:
     uses: ./.github/workflows/test.yml
     strategy:
       fail-fast: false
@@ -19,16 +19,6 @@ jobs:
     with:
       os: ubuntu-latest
       pyVersion: ${{ matrix.pyVersion }}
-
-  tests-windows:
-      uses: ./.github/workflows/test.yml
-      strategy:
-        fail-fast: false
-        matrix:
-          pyVersion: [ '3.9', '3.10', '3.11', '3.12' ]
-      with:
-        os: windows-latest
-        pyVersion: ${{ matrix.pyVersion }}
           
   fmt:
     runs-on: ubuntu-latest

From 2402560cc3708944b23b3af5a4b3f8a8c1f6295c Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Sat, 15 Feb 2025 13:03:00 -0700
Subject: [PATCH 108/136] update push workflow again

---
 .github/workflows/push.yml | 17 ++++++++++++++---
 1 file changed, 14 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
index 4f8881465..80dc449a1 100644
--- a/.github/workflows/push.yml
+++ b/.github/workflows/push.yml
@@ -16,9 +16,20 @@ jobs:
       fail-fast: false
       matrix:
         pyVersion: [ '3.8', '3.9', '3.10', '3.11', '3.12' ]
-    with:
-      os: ubuntu-latest
-      pyVersion: ${{ matrix.pyVersion }}
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v4
+
+      - name: Unshallow
+        run: git fetch --prune --unshallow
+
+      - uses: actions/setup-python@v5
+        with:
+          python-version: ${{ matrix.pyVersion }}
+
+      - name: Run tests
+        run: make dev install test
           
   fmt:
     runs-on: ubuntu-latest

From 2a6e44b3849c391d3fb33c4e976a0f86b0a4e9b3 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Sat, 15 Feb 2025 13:04:55 -0700
Subject: [PATCH 109/136] test change

---
 .github/workflows/push.yml | 15 +--------------
 1 file changed, 1 insertion(+), 14 deletions(-)

diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
index 80dc449a1..f524bdcd3 100644
--- a/.github/workflows/push.yml
+++ b/.github/workflows/push.yml
@@ -15,7 +15,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        pyVersion: [ '3.8', '3.9', '3.10', '3.11', '3.12' ]
+        pyVersion: [ '3.7', '3.8', '3.9', '3.10', '3.11', '3.12' ]
     runs-on: ubuntu-latest
     steps:
       - name: Checkout
@@ -30,16 +30,3 @@ jobs:
 
       - name: Run tests
         run: make dev install test
-          
-  fmt:
-    runs-on: ubuntu-latest
-
-    steps:
-      - name: Checkout
-        uses: actions/checkout@v4
-
-      - name: Format all files
-        run: make dev fmt
-
-      - name: Fail on differences
-        run: git diff --exit-code

From d36c2de38867f19f6850db8c8d9c8f528dad2e85 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Sat, 15 Feb 2025 13:06:03 -0700
Subject: [PATCH 110/136] fix tests

---
 .github/workflows/push.yml | 1 -
 1 file changed, 1 deletion(-)

diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
index f524bdcd3..1c71fcd9e 100644
--- a/.github/workflows/push.yml
+++ b/.github/workflows/push.yml
@@ -11,7 +11,6 @@ on:
 
 jobs:
   tests:
-    uses: ./.github/workflows/test.yml
     strategy:
       fail-fast: false
       matrix:

From 5fe847c3e166a0cc87f1338a1e1f4fc0d984cf30 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Sat, 15 Feb 2025 13:07:04 -0700
Subject: [PATCH 111/136] remove test version

---
 .github/workflows/push.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
index 1c71fcd9e..f76e696f2 100644
--- a/.github/workflows/push.yml
+++ b/.github/workflows/push.yml
@@ -14,7 +14,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        pyVersion: [ '3.7', '3.8', '3.9', '3.10', '3.11', '3.12' ]
+        pyVersion: [ '3.8', '3.9', '3.10', '3.11', '3.12' ]
     runs-on: ubuntu-latest
     steps:
       - name: Checkout

From e10818e163529fa6e737a544a23305b42f110407 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Sat, 15 Feb 2025 13:11:09 -0700
Subject: [PATCH 112/136] retry

---
 .github/workflows/push.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
index f76e696f2..bef41718f 100644
--- a/.github/workflows/push.yml
+++ b/.github/workflows/push.yml
@@ -14,8 +14,8 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        pyVersion: [ '3.8', '3.9', '3.10', '3.11', '3.12' ]
-    runs-on: ubuntu-latest
+        pyVersion: [ '3.7', '3.8', '3.9', '3.10', '3.11', '3.12' ]
+    runs-on: ubuntu-22.04
     steps:
       - name: Checkout
         uses: actions/checkout@v4

From 93b4ef1aa7be8bc1b489087a531fe400376f9f7b Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Sat, 15 Feb 2025 13:22:29 -0700
Subject: [PATCH 113/136] revert release action

---
 .github/workflows/release.yml | 7 +------
 1 file changed, 1 insertion(+), 6 deletions(-)

diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index ae242c1d8..892bbc5c6 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -7,16 +7,11 @@ on:
 
 jobs:
   publish:
-    runs-on:
-      group: databricks-deco-testing-runner-group
-      labels: ubuntu-latest-deco
-
+    runs-on: ubuntu-latest
     environment: release
-
     permissions:
       contents: write
       id-token: write
-
     steps:
       - uses: actions/checkout@v3
 

From b1e5803856d5069fb7cf336c64a7be0640e68ae7 Mon Sep 17 00:00:00 2001
From: Vinoo Ganesh 
Date: Mon, 3 Jun 2024 11:17:29 -0400
Subject: [PATCH 114/136] remove publish to pypi + setup changes

---
 .github/workflows/release.yml |  5 +----
 setup.py                      | 14 ++++++--------
 2 files changed, 7 insertions(+), 12 deletions(-)

diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index 32890bde6..ae242c1d8 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -31,7 +31,4 @@ jobs:
           draft: true
           files: |
             dist/databricks-*.whl
-            dist/databricks-*.tar.gz
-
-      - uses: pypa/gh-action-pypi-publish@release/v1
-        name: Publish package distributions to PyPI
+            dist/databricks-*.tar.gz
\ No newline at end of file
diff --git a/setup.py b/setup.py
index b756e6d0d..812682684 100644
--- a/setup.py
+++ b/setup.py
@@ -8,7 +8,7 @@
 with version_file.open('r') as f:
     exec(f.read(), version_data)
 
-setup(name="databricks-sdk",
+setup(name="sync-databricks-sdk",
       version=version_data['__version__'],
       packages=find_packages(exclude=["tests", "*tests.*", "*tests"]),
       package_data = {"databricks.sdk": ["py.typed"]},
@@ -17,13 +17,11 @@
       extras_require={"dev": ["pytest", "pytest-cov", "pytest-xdist", "pytest-mock",
                               "yapf", "pycodestyle", "autoflake", "isort", "wheel",
                               "ipython", "ipywidgets", "requests-mock", "pyfakefs",
-                              "databricks-connect", "pytest-rerunfailures", "openai", 
-                              'langchain-openai; python_version > "3.7"', "httpx"],
-                      "notebook": ["ipython>=8,<9", "ipywidgets>=8,<9"],
-                      "openai": ["openai", 'langchain-openai; python_version > "3.7"', "httpx"]},
-      author="Serge Smertin",
-      author_email="serge.smertin@databricks.com",
-      description="Databricks SDK for Python (Beta)",
+                              "databricks-connect"],
+                      "notebook": ["ipython>=8,<9", "ipywidgets>=8,<9"]},
+      author="Sync Computing",
+      author_email="info@synccomputing.com",
+      description="Sync Fork Databricks SDK for Python (Beta)",
       long_description=io.open("README.md", encoding="utf-8").read(),
       long_description_content_type='text/markdown',
       url="https://databricks-sdk-py.readthedocs.io",

From 687ecc9130f60b76f1bbc178e1de3f72a4ad4d83 Mon Sep 17 00:00:00 2001
From: Vinoo Ganesh 
Date: Mon, 3 Jun 2024 12:02:37 -0400
Subject: [PATCH 115/136] cleaning up publication

---
 .github/workflows/downstreams.yml | 54 -------------------------------
 .github/workflows/push.yml        | 40 ++++++++++++-----------
 2 files changed, 22 insertions(+), 72 deletions(-)
 delete mode 100644 .github/workflows/downstreams.yml

diff --git a/.github/workflows/downstreams.yml b/.github/workflows/downstreams.yml
deleted file mode 100644
index 6f57457cd..000000000
--- a/.github/workflows/downstreams.yml
+++ /dev/null
@@ -1,54 +0,0 @@
-name: downstreams
-
-on:
-  pull_request:
-    types: [opened, synchronize]
-  merge_group:
-    types: [checks_requested]
-  push:
-    # Always run on push to main. The build cache can only be reused
-    # if it was saved by a run from the repository's default branch.
-    # The run result will be identical to that from the merge queue
-    # because the commit is identical, yet we need to perform it to
-    # seed the build cache.
-    branches:
-      - main
-
-permissions:
-  id-token: write
-  contents: read
-  pull-requests: write
-
-jobs:
-  compatibility:
-    strategy:
-      fail-fast: false
-      matrix:
-        downstream:
-          - name: ucx
-            org: databrickslabs
-          - name: blueprint
-            org: databrickslabs
-    runs-on: ubuntu-latest
-    steps:
-      - name: Checkout
-        uses: actions/checkout@v3
-        with:
-          fetch-depth: 0
-
-      - name: Install Python
-        uses: actions/setup-python@v4
-        with:
-          python-version: '3.10'
-
-      - name: Install toolchain
-        run: |
-          pip install hatch==1.9.4
-
-      - name: Acceptance
-        uses: databrickslabs/sandbox/downstreams@downstreams/v0.0.1
-        with:
-          repo: ${{ matrix.downstream.name }}
-          org: ${{ matrix.downstream.org }}
-        env:
-          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
index 70f094c18..f639f51eb 100644
--- a/.github/workflows/push.yml
+++ b/.github/workflows/push.yml
@@ -2,9 +2,12 @@ name: build
 
 on:
   pull_request:
-    types: [opened, synchronize]
+    types: [ opened, synchronize ]
   merge_group:
-    types: [checks_requested]
+    types: [ checks_requested ]
+  push:
+    branches:
+      - main
 
 jobs:
   tests-ubuntu:
@@ -12,27 +15,28 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        pyVersion: [ '3.8', '3.9', '3.10', '3.11', '3.12' ]
-    with:
-      os: ubuntu-latest
-      pyVersion: ${{ matrix.pyVersion }}
-
-  tests-windows:
-      uses: ./.github/workflows/test.yml
-      strategy:
-        fail-fast: false
-        matrix:
-          pyVersion: [ '3.9', '3.10', '3.11', '3.12' ]
-      with:
-        os: windows-latest
-        pyVersion: ${{ matrix.pyVersion }}
-          
+        pyVersion: [ '3.7', '3.8', '3.9', '3.10', '3.11', '3.12' ]
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v4
+
+      - name: Unshallow
+        run: git fetch --prune --unshallow
+
+      - uses: actions/setup-python@v5
+        with:
+          python-version: ${{ matrix.pyVersion }}
+
+      - name: Run tests
+        run: make dev install test
+
   fmt:
     runs-on: ubuntu-latest
 
     steps:
       - name: Checkout
-        uses: actions/checkout@v2
+        uses: actions/checkout@v4
 
       - name: Format all files
         run: make dev fmt

From 5380ae0cbdb088f6f5afc11f7023606aa8cbfd9d Mon Sep 17 00:00:00 2001
From: Cayman Williams 
Date: Tue, 2 Jul 2024 09:36:50 -0600
Subject: [PATCH 116/136] PROD 2198 Databricks Datatype Replacement in
 sync_backend (#1)

* small model changes
---
 databricks/sdk/service/compute.py | 28 ++++++++++++++++++++++++++++
 databricks/sdk/service/jobs.py    |  3 ++-
 2 files changed, 30 insertions(+), 1 deletion(-)

diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py
index c16f699bb..9e0958a9d 100755
--- a/databricks/sdk/service/compute.py
+++ b/databricks/sdk/service/compute.py
@@ -1019,12 +1019,24 @@ class ClusterDetails:
     
     - Name: """
 
+    disk_spec: Optional[dict] = None
+    """[PROD-2198] An APC attribute only. This field is missing in the API docs and the unforked databricks
+    sdk so it needed to be added here"""
+
     docker_image: Optional[DockerImage] = None
 
     driver: Optional[SparkNode] = None
     """Node on which the Spark driver resides. The driver node contains the Spark master and the
     Databricks application that manages the per-notebook Spark REPLs."""
 
+    driver_healthy: Optional[bool] = None
+    """[PROD-2198] An APC attribute only. This field is missing in the API docs and the unforked databricks
+    sdk so it needed to be added here"""
+
+    driver_instance_source: Optional[dict] = None
+    """[PROD-2198] An APC attribute only. This field is missing in the API docs and the unforked databricks
+    sdk so it needed to be added here"""
+
     driver_instance_pool_id: Optional[str] = None
     """The optional ID of the instance pool for the driver of the cluster belongs. The pool cluster
     uses the instance pool with id (instance_pool_id) if the driver pool is not assigned."""
@@ -1033,6 +1045,10 @@ class ClusterDetails:
     """The node type of the Spark driver. Note that this field is optional; if unset, the driver node
     type will be set as the same value as `node_type_id` defined above."""
 
+    effective_spark_version: Optional[str] = None
+    """[PROD-2198] An APC attribute only. This field is missing in the API docs and the unforked databricks
+    sdk so it needed to be added here"""
+
     enable_elastic_disk: Optional[bool] = None
     """Autoscaling Local Storage: when enabled, this cluster will dynamically acquire additional disk
     space when its Spark workers are running low on disk space. This feature requires specific AWS
@@ -1053,9 +1069,17 @@ class ClusterDetails:
     scripts are executed sequentially in the order provided. If `cluster_log_conf` is specified,
     init script logs are sent to `//init_scripts`."""
 
+    init_scripts_safe_mode: Optional[int] = None
+    """[PROD-2198] An APC attribute only. This field is missing in the API docs and the unforked databricks
+    sdk so it needed to be added here"""
+
     instance_pool_id: Optional[str] = None
     """The optional ID of the instance pool to which the cluster belongs."""
 
+    instance_source: Optional[dict] = None
+    """[PROD-2198] An APC attribute only. This field is missing in the API docs and the unforked databricks
+    sdk so it needed to be added here"""
+
     is_single_node: Optional[bool] = None
     """This field can only be used with `kind`.
     
@@ -1074,6 +1098,10 @@ class ClusterDetails:
     The first usage of this value is for the simple cluster form where it sets `kind =
     CLASSIC_PREVIEW`."""
 
+    last_activity_time: Optional[int] = None
+    """[PROD-2198] An APC attribute only. This field is missing in the API docs and the unforked databricks
+    sdk so it needed to be added here"""
+
     last_restarted_time: Optional[int] = None
     """the timestamp that the cluster was started/restarted"""
 
diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index 6cc2e4213..8feef6140 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -4871,7 +4871,8 @@ def from_dict(cls, d: Dict[str, any]) -> RunStatus:
 class RunTask:
     """Used when outputting a child run, in GetRun or ListRuns."""
 
-    task_key: str
+    # [PROD-2198] adding the default here is necessary to process legacy cluster reports
+    task_key: str = None
     """A unique name for the task. This field is used to refer to this task from other tasks. This
     field is required and must be unique within its parent job. On Update or Reset, this field is
     used to reference the tasks to be updated or reset."""

From 2dd15600455d033f7824660dac38e9ad6dd277dc Mon Sep 17 00:00:00 2001
From: Cayman Williams 
Date: Mon, 5 Aug 2024 09:17:32 -0600
Subject: [PATCH 117/136] make gitprovider enum case insensitve (#2)

---
 databricks/sdk/service/jobs.py | 7 +++++++
 1 file changed, 7 insertions(+)

diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index 8feef6140..bd7fdb1a6 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -1640,6 +1640,13 @@ class GitProvider(Enum):
     GIT_LAB = 'gitLab'
     GIT_LAB_ENTERPRISE_EDITION = 'gitLabEnterpriseEdition'
 
+    # [PROD-2302] The API treats this enum as case insensitive and the strictness here was causing failures
+    @classmethod
+    def _missing_(cls, value):
+        for member in cls:
+            if member.value.lower() == value.lower():
+                return member
+
 
 @dataclass
 class GitSnapshot:

From 7c2d68d4f74f182aab9976a93ea78c9ae3de9fcc Mon Sep 17 00:00:00 2001
From: Vinoo Ganesh 
Date: Mon, 5 Aug 2024 15:25:51 -0400
Subject: [PATCH 118/136] update readme with release notes (#3)

---
 README.md | 15 +++++++++++++++
 1 file changed, 15 insertions(+)

diff --git a/README.md b/README.md
index 9991c9cd0..f17435aef 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,18 @@
+# INFO
+
+This repo is Sync's fork of https://github.com/databricks/databricks-sdk-py/. It allows Sync to pull in commits / feature or create contributions to upstream that enhance both Sync's product offering and the Databricks ecosystem. 
+
+This repo is *public*.
+
+To release this repo:
+
+1. Locally (on your terminal) tag the commit you want to release with a version for example: git tag v0.0.29-sync.0
+2. Push this tag git push origin v0.0.29-sync.0
+3. Pushing the tag triggers an automated github action that looks for tags that start with v: https://github.com/synccomputingcode/databricks-sdk-py/blob/main/.github/workflows/release.yml#L6
+4. The actions run in github and create a draft release, with the release artifacts tied to it. https://github.com/synccomputingcode/databricks-sdk-py/releases/tag/untagged-327af053f51d1f4da444
+5. To make the release "real", edit it and publish it
+6. Then bump this the dependency inside of sync's codebase
+
 # Databricks SDK for Python (Beta)
 
 [![PyPI - Downloads](https://img.shields.io/pypi/dw/databricks-sdk)](https://pypistats.org/packages/databricks-sdk)

From ae337ab9eb910e222f83539c90710beb0601b4ff Mon Sep 17 00:00:00 2001
From: gorskysd <71267847+gorskysd@users.noreply.github.com>
Date: Mon, 12 Aug 2024 15:35:47 -0400
Subject: [PATCH 119/136] add AWS_RESOURCE_QUOTA_EXCEEDED to
 TerminationReasonCode (#4)

---
 databricks/sdk/service/compute.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py
index 9e0958a9d..63a971b73 100755
--- a/databricks/sdk/service/compute.py
+++ b/databricks/sdk/service/compute.py
@@ -7578,6 +7578,7 @@ class TerminationReasonCode(Enum):
     AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE = 'AWS_INSUFFICIENT_INSTANCE_CAPACITY_FAILURE'
     AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE = 'AWS_MAX_SPOT_INSTANCE_COUNT_EXCEEDED_FAILURE'
     AWS_REQUEST_LIMIT_EXCEEDED = 'AWS_REQUEST_LIMIT_EXCEEDED'
+    AWS_RESOURCE_QUOTA_EXCEEDED = 'AWS_RESOURCE_QUOTA_EXCEEDED'
     AWS_UNSUPPORTED_FAILURE = 'AWS_UNSUPPORTED_FAILURE'
     AZURE_BYOK_KEY_PERMISSION_FAILURE = 'AZURE_BYOK_KEY_PERMISSION_FAILURE'
     AZURE_EPHEMERAL_DISK_FAILURE = 'AZURE_EPHEMERAL_DISK_FAILURE'

From 96791b066725f91b44e09f5597a5204518d71d84 Mon Sep 17 00:00:00 2001
From: Cayman Williams 
Date: Thu, 22 Aug 2024 12:19:47 -0600
Subject: [PATCH 120/136] add continuous trigger type (#5)

---
 databricks/sdk/service/jobs.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index bd7fdb1a6..c0d4240bf 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -6912,6 +6912,8 @@ class TriggerType(Enum):
     RETRY = 'RETRY'
     RUN_JOB_TASK = 'RUN_JOB_TASK'
     TABLE = 'TABLE'
+    # [PROD-2364] this trigger type is missing from api docs and sdk
+    CONTINUOUS = 'CONTINUOUS'
 
 
 @dataclass

From 1940f45357727bec5d6b3f3149541154e35badb5 Mon Sep 17 00:00:00 2001
From: Cayman Williams 
Date: Wed, 23 Oct 2024 12:34:41 -0600
Subject: [PATCH 121/136] [Release] Release v0.0.36-sync.0 (#9)

* update to v0.36
---
 .codegen/_openapi_sha                        |   2 +-
 .github/workflows/test.yml                   |   2 +-
 databricks/sdk/_base_client.py               |   1 -
 databricks/sdk/credentials_provider.py       |   1 -
 databricks/sdk/service/dashboards.py         | 187 ++++++++++++++++
 databricks/sdk/service/iam.py                |   4 +-
 databricks/sdk/service/jobs.py               |   5 +
 databricks/sdk/service/serving.py            |  21 ++
 databricks/sdk/service/sql.py                | 136 +++++------
 docs/account/iam/workspace_assignment.rst    |   2 +-
 docs/workspace/dashboards/index.rst          |   2 +-
 docs/workspace/serving/serving_endpoints.rst |   3 +
 tests/integration/test_auth.py               |  19 +-
 tests/test_base_client.py                    | 224 +------------------
 tests/test_core.py                           |   7 +-
 tests/test_data_plane.py                     |   2 +-
 tests/test_model_serving_auth.py             |  71 +-----
 tests/test_open_ai_mixin.py                  |  21 --
 tests/test_user_agent.py                     |  44 ----
 19 files changed, 294 insertions(+), 460 deletions(-)

diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 562b72fcc..2a9a021e0 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-99f644e72261ef5ecf8d74db20f4b7a1e09723cc
\ No newline at end of file
+99f644e72261ef5ecf8d74db20f4b7a1e09723cc
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index bb86e38a3..eeaf35c20 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -32,4 +32,4 @@ jobs:
       - name: Publish test coverage
         uses: codecov/codecov-action@v4
         env:
-          CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
\ No newline at end of file
+          CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
diff --git a/databricks/sdk/_base_client.py b/databricks/sdk/_base_client.py
index f0950f656..58fcb10a5 100644
--- a/databricks/sdk/_base_client.py
+++ b/databricks/sdk/_base_client.py
@@ -276,7 +276,6 @@ def _perform(self,
         error = self._error_parser.get_api_error(response)
         if error is not None:
             raise error from None
-
         return response
 
     def _record_request_log(self, response: requests.Response, raw: bool = False) -> None:
diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py
index 24d01f678..07fb48c5a 100644
--- a/databricks/sdk/credentials_provider.py
+++ b/databricks/sdk/credentials_provider.py
@@ -189,7 +189,6 @@ def token() -> Token:
 def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
     if cfg.auth_type != 'external-browser':
         return None
-
     client_id, client_secret = None, None
     if cfg.client_id:
         client_id = cfg.client_id
diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py
index ba01ba41d..c81159cca 100755
--- a/databricks/sdk/service/dashboards.py
+++ b/databricks/sdk/service/dashboards.py
@@ -1804,6 +1804,193 @@ def start_conversation_and_wait(self, space_id: str, content: str,
         return self.start_conversation(content=content, space_id=space_id).result(timeout=timeout)
 
 
+class GenieAPI:
+    """Genie provides a no-code experience for business users, powered by AI/BI. Analysts set up spaces that
+    business users can use to ask questions using natural language. Genie uses data registered to Unity
+    Catalog and requires at least CAN USE permission on a Pro or Serverless SQL warehouse. Also, Databricks
+    Assistant must be enabled."""
+
+    def __init__(self, api_client):
+        self._api = api_client
+
+    def wait_get_message_genie_completed(
+            self,
+            conversation_id: str,
+            message_id: str,
+            space_id: str,
+            timeout=timedelta(minutes=20),
+            callback: Optional[Callable[[GenieMessage], None]] = None) -> GenieMessage:
+        deadline = time.time() + timeout.total_seconds()
+        target_states = (MessageStatus.COMPLETED, )
+        failure_states = (MessageStatus.FAILED, )
+        status_message = 'polling...'
+        attempt = 1
+        while time.time() < deadline:
+            poll = self.get_message(conversation_id=conversation_id, message_id=message_id, space_id=space_id)
+            status = poll.status
+            status_message = f'current status: {status}'
+            if status in target_states:
+                return poll
+            if callback:
+                callback(poll)
+            if status in failure_states:
+                msg = f'failed to reach COMPLETED, got {status}: {status_message}'
+                raise OperationFailed(msg)
+            prefix = f"conversation_id={conversation_id}, message_id={message_id}, space_id={space_id}"
+            sleep = attempt
+            if sleep > 10:
+                # sleep 10s max per attempt
+                sleep = 10
+            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
+            time.sleep(sleep + random.random())
+            attempt += 1
+        raise TimeoutError(f'timed out after {timeout}: {status_message}')
+
+    def create_message(self, space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage]:
+        """Create conversation message.
+        
+        Create new message in [conversation](:method:genie/startconversation). The AI response uses all
+        previously created messages in the conversation to respond.
+        
+        :param space_id: str
+          The ID associated with the Genie space where the conversation is started.
+        :param conversation_id: str
+          The ID associated with the conversation.
+        :param content: str
+          User message content.
+        
+        :returns:
+          Long-running operation waiter for :class:`GenieMessage`.
+          See :method:wait_get_message_genie_completed for more details.
+        """
+        body = {}
+        if content is not None: body['content'] = content
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        op_response = self._api.do(
+            'POST',
+            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages',
+            body=body,
+            headers=headers)
+        return Wait(self.wait_get_message_genie_completed,
+                    response=GenieMessage.from_dict(op_response),
+                    conversation_id=conversation_id,
+                    message_id=op_response['id'],
+                    space_id=space_id)
+
+    def create_message_and_wait(self,
+                                space_id: str,
+                                conversation_id: str,
+                                content: str,
+                                timeout=timedelta(minutes=20)) -> GenieMessage:
+        return self.create_message(content=content, conversation_id=conversation_id,
+                                   space_id=space_id).result(timeout=timeout)
+
+    def execute_message_query(self, space_id: str, conversation_id: str,
+                              message_id: str) -> GenieGetMessageQueryResultResponse:
+        """Execute SQL query in a conversation message.
+        
+        Execute the SQL query in the message.
+        
+        :param space_id: str
+          Genie space ID
+        :param conversation_id: str
+          Conversation ID
+        :param message_id: str
+          Message ID
+        
+        :returns: :class:`GenieGetMessageQueryResultResponse`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'POST',
+            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/execute-query',
+            headers=headers)
+        return GenieGetMessageQueryResultResponse.from_dict(res)
+
+    def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage:
+        """Get conversation message.
+        
+        Get message from conversation.
+        
+        :param space_id: str
+          The ID associated with the Genie space where the target conversation is located.
+        :param conversation_id: str
+          The ID associated with the target conversation.
+        :param message_id: str
+          The ID associated with the target message from the identified conversation.
+        
+        :returns: :class:`GenieMessage`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'GET',
+            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}',
+            headers=headers)
+        return GenieMessage.from_dict(res)
+
+    def get_message_query_result(self, space_id: str, conversation_id: str,
+                                 message_id: str) -> GenieGetMessageQueryResultResponse:
+        """Get conversation message SQL query result.
+        
+        Get the result of SQL query if the message has a query attachment. This is only available if a message
+        has a query attachment and the message status is `EXECUTING_QUERY`.
+        
+        :param space_id: str
+          Genie space ID
+        :param conversation_id: str
+          Conversation ID
+        :param message_id: str
+          Message ID
+        
+        :returns: :class:`GenieGetMessageQueryResultResponse`
+        """
+
+        headers = {'Accept': 'application/json', }
+
+        res = self._api.do(
+            'GET',
+            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result',
+            headers=headers)
+        return GenieGetMessageQueryResultResponse.from_dict(res)
+
+    def start_conversation(self, space_id: str, content: str) -> Wait[GenieMessage]:
+        """Start conversation.
+        
+        Start a new conversation.
+        
+        :param space_id: str
+          The ID associated with the Genie space where you want to start a conversation.
+        :param content: str
+          The text of the message that starts the conversation.
+        
+        :returns:
+          Long-running operation waiter for :class:`GenieMessage`.
+          See :method:wait_get_message_genie_completed for more details.
+        """
+        body = {}
+        if content is not None: body['content'] = content
+        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
+
+        op_response = self._api.do('POST',
+                                   f'/api/2.0/genie/spaces/{space_id}/start-conversation',
+                                   body=body,
+                                   headers=headers)
+        return Wait(self.wait_get_message_genie_completed,
+                    response=GenieStartConversationResponse.from_dict(op_response),
+                    conversation_id=op_response['conversation_id'],
+                    message_id=op_response['message_id'],
+                    space_id=space_id)
+
+    def start_conversation_and_wait(self, space_id: str, content: str,
+                                    timeout=timedelta(minutes=20)) -> GenieMessage:
+        return self.start_conversation(content=content, space_id=space_id).result(timeout=timeout)
+
+
 class LakeviewAPI:
     """These APIs provide specific management operations for Lakeview dashboards. Generic resource management can
     be done with Workspace API (import, export, get-status, list, delete)."""
diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py
index 2f752d06c..b841bec8b 100755
--- a/databricks/sdk/service/iam.py
+++ b/databricks/sdk/service/iam.py
@@ -1588,7 +1588,7 @@ class UpdateWorkspaceAssignments:
     """The ID of the user, service principal, or group."""
 
     workspace_id: Optional[int] = None
-    """The workspace ID."""
+    """The workspace ID for the account."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateWorkspaceAssignments into a dictionary suitable for use as a JSON request body."""
@@ -3894,7 +3894,7 @@ def update(self,
         specified principal.
         
         :param workspace_id: int
-          The workspace ID.
+          The workspace ID for the account.
         :param principal_id: int
           The ID of the user, service principal, or group.
         :param permissions: List[:class:`WorkspacePermission`] (optional)
diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index c0d4240bf..8220a0715 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -3861,6 +3861,9 @@ class Run:
     overriding_parameters: Optional[RunParameters] = None
     """The parameters used for this run."""
 
+    prev_page_token: Optional[str] = None
+    """A token that can be used to list the previous page of sub-resources."""
+
     queue_duration: Optional[int] = None
     """The time in milliseconds that the run has spent in the queue."""
 
@@ -3953,6 +3956,7 @@ def as_dict(self) -> dict:
         if self.original_attempt_run_id is not None:
             body['original_attempt_run_id'] = self.original_attempt_run_id
         if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters.as_dict()
+        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
         if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
         if self.repair_history: body['repair_history'] = [v.as_dict() for v in self.repair_history]
         if self.run_duration is not None: body['run_duration'] = self.run_duration
@@ -4035,6 +4039,7 @@ def from_dict(cls, d: Dict[str, any]) -> Run:
                    number_in_job=d.get('number_in_job', None),
                    original_attempt_run_id=d.get('original_attempt_run_id', None),
                    overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters),
+                   prev_page_token=d.get('prev_page_token', None),
                    queue_duration=d.get('queue_duration', None),
                    repair_history=_repeated_dict(d, 'repair_history', RepairHistoryItem),
                    run_duration=d.get('run_duration', None),
diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py
index 938445863..c10e43572 100755
--- a/databricks/sdk/service/serving.py
+++ b/databricks/sdk/service/serving.py
@@ -657,6 +657,10 @@ class CreateServingEndpoint:
     config: Optional[EndpointCoreConfigInput] = None
     """The core config of the serving endpoint."""
 
+    ai_gateway: Optional[AiGatewayConfig] = None
+    """The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
+    supported as of now."""
+
     rate_limits: Optional[List[RateLimit]] = None
     """Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
     Gateway to manage rate limits."""
@@ -2550,6 +2554,12 @@ class ServedModelInput:
     min_provisioned_throughput: Optional[int] = None
     """The minimum tokens per second that the endpoint can scale down to."""
 
+    max_provisioned_throughput: Optional[int] = None
+    """The maximum tokens per second that the endpoint can scale up to."""
+
+    min_provisioned_throughput: Optional[int] = None
+    """The minimum tokens per second that the endpoint can scale down to."""
+
     name: Optional[str] = None
     """The name of a served entity. It must be unique across an endpoint. A served entity name can
     consist of alphanumeric characters, dashes, and underscores. If not specified for an external
@@ -2564,6 +2574,14 @@ class ServedModelInput:
     scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size
     is 0."""
 
+    workload_size: Optional[ServedModelInputWorkloadSize] = None
+    """The workload size of the served model. The workload size corresponds to a range of provisioned
+    concurrency that the compute will autoscale between. A single unit of provisioned concurrency
+    can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned
+    concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned
+    concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for
+    each workload size will be 0."""
+
     workload_type: Optional[ServedModelInputWorkloadType] = None
     """The workload type of the served entity. The workload type selects which type of compute to use
     in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU
@@ -3413,6 +3431,9 @@ def create(self,
           throughput endpoints are currently supported.
         :param config: :class:`EndpointCoreConfigInput` (optional)
           The core config of the serving endpoint.
+        :param ai_gateway: :class:`AiGatewayConfig` (optional)
+          The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
+          supported as of now.
         :param rate_limits: List[:class:`RateLimit`] (optional)
           Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
           Gateway to manage rate limits.
diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py
index cfa94aaa7..059b744ef 100755
--- a/databricks/sdk/service/sql.py
+++ b/databricks/sdk/service/sql.py
@@ -3078,74 +3078,49 @@ class LegacyQuery:
     can_edit: Optional[bool] = None
     """Describes whether the authenticated user is allowed to edit the definition of this query."""
 
-    created_at: Optional[str] = None
-    """The timestamp when this query was created."""
+    catalog: Optional[str] = None
+    """Name of the catalog where this query will be executed."""
 
-    data_source_id: Optional[str] = None
-    """Data source ID maps to the ID of the data source used by the resource and is distinct from the
-    warehouse ID. [Learn more]
-    
-    [Learn more]: https://docs.databricks.com/api/workspace/datasources/list"""
+    create_time: Optional[str] = None
+    """Timestamp when this query was created."""
 
     description: Optional[str] = None
     """General description that conveys additional information about this query such as usage notes."""
 
-    id: Optional[str] = None
-    """Query ID."""
-
-    is_archived: Optional[bool] = None
-    """Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear
-    in search results. If this boolean is `true`, the `options` property for this query includes a
-    `moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days."""
-
-    is_draft: Optional[bool] = None
-    """Whether the query is a draft. Draft queries only appear in list views for their owners.
-    Visualizations from draft queries cannot appear on dashboards."""
-
-    is_favorite: Optional[bool] = None
-    """Whether this query object appears in the current user's favorites list. This flag determines
-    whether the star icon for favorites is selected."""
-
-    is_safe: Optional[bool] = None
-    """Text parameter types are not safe from SQL injection for all types of data source. Set this
-    Boolean parameter to `true` if a query either does not use any text type parameters or uses a
-    data source type where text type parameters are handled safely."""
-
-    last_modified_by: Optional[User] = None
+    display_name: Optional[str] = None
+    """Display name of the query that appears in list views, widget headings, and on the query page."""
 
-    last_modified_by_id: Optional[int] = None
-    """The ID of the user who last saved changes to this query."""
+    id: Optional[str] = None
+    """UUID identifying the query."""
 
-    latest_query_data_id: Optional[str] = None
-    """If there is a cached result for this query and user, this field includes the query result ID. If
-    this query uses parameters, this field is always null."""
+    last_modifier_user_name: Optional[str] = None
+    """Username of the user who last saved changes to this query."""
 
-    name: Optional[str] = None
-    """The title of this query that appears in list views, widget headings, and on the query page."""
+    lifecycle_state: Optional[LifecycleState] = None
+    """Indicates whether the query is trashed."""
 
-    options: Optional[QueryOptions] = None
+    owner_user_name: Optional[str] = None
+    """Username of the user that owns the query."""
 
-    parent: Optional[str] = None
-    """The identifier of the workspace folder containing the object."""
+    parameters: Optional[List[QueryParameter]] = None
+    """List of query parameter definitions."""
 
-    permission_tier: Optional[PermissionLevel] = None
-    """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query
-    * `CAN_MANAGE`: Can manage the query"""
+    parent_path: Optional[str] = None
+    """Workspace path of the workspace folder containing the object."""
 
-    query: Optional[str] = None
-    """The text of the query to be run."""
+    query_text: Optional[str] = None
+    """Text of the query to be run."""
 
-    query_hash: Optional[str] = None
-    """A SHA-256 hash of the query text along with the authenticated user ID."""
+    run_as_mode: Optional[RunAsMode] = None
+    """Sets the "Run as" role for the object."""
 
-    run_as_role: Optional[RunAsRole] = None
-    """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
-    viewer" behavior) or `"owner"` (signifying "run as owner" behavior)"""
+    schema: Optional[str] = None
+    """Name of the schema where this query will be executed."""
 
     tags: Optional[List[str]] = None
 
-    updated_at: Optional[str] = None
-    """The timestamp at which this query was last updated."""
+    update_time: Optional[str] = None
+    """Timestamp when this query was last updated."""
 
     user: Optional[User] = None
 
@@ -3157,30 +3132,24 @@ class LegacyQuery:
     def as_dict(self) -> dict:
         """Serializes the LegacyQuery into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.can_edit is not None: body['can_edit'] = self.can_edit
-        if self.created_at is not None: body['created_at'] = self.created_at
-        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
+        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
+        if self.catalog is not None: body['catalog'] = self.catalog
+        if self.create_time is not None: body['create_time'] = self.create_time
         if self.description is not None: body['description'] = self.description
+        if self.display_name is not None: body['display_name'] = self.display_name
         if self.id is not None: body['id'] = self.id
-        if self.is_archived is not None: body['is_archived'] = self.is_archived
-        if self.is_draft is not None: body['is_draft'] = self.is_draft
-        if self.is_favorite is not None: body['is_favorite'] = self.is_favorite
-        if self.is_safe is not None: body['is_safe'] = self.is_safe
-        if self.last_modified_by: body['last_modified_by'] = self.last_modified_by.as_dict()
-        if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id
-        if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id
-        if self.name is not None: body['name'] = self.name
-        if self.options: body['options'] = self.options.as_dict()
-        if self.parent is not None: body['parent'] = self.parent
-        if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value
-        if self.query is not None: body['query'] = self.query
-        if self.query_hash is not None: body['query_hash'] = self.query_hash
-        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value
+        if self.last_modifier_user_name is not None:
+            body['last_modifier_user_name'] = self.last_modifier_user_name
+        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
+        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
+        if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
+        if self.parent_path is not None: body['parent_path'] = self.parent_path
+        if self.query_text is not None: body['query_text'] = self.query_text
+        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
+        if self.schema is not None: body['schema'] = self.schema
         if self.tags: body['tags'] = [v for v in self.tags]
-        if self.updated_at is not None: body['updated_at'] = self.updated_at
-        if self.user: body['user'] = self.user.as_dict()
-        if self.user_id is not None: body['user_id'] = self.user_id
-        if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations]
+        if self.update_time is not None: body['update_time'] = self.update_time
+        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
         return body
 
     def as_shallow_dict(self) -> dict:
@@ -3219,21 +3188,16 @@ def from_dict(cls, d: Dict[str, any]) -> LegacyQuery:
                    created_at=d.get('created_at', None),
                    data_source_id=d.get('data_source_id', None),
                    description=d.get('description', None),
+                   display_name=d.get('display_name', None),
                    id=d.get('id', None),
-                   is_archived=d.get('is_archived', None),
-                   is_draft=d.get('is_draft', None),
-                   is_favorite=d.get('is_favorite', None),
-                   is_safe=d.get('is_safe', None),
-                   last_modified_by=_from_dict(d, 'last_modified_by', User),
-                   last_modified_by_id=d.get('last_modified_by_id', None),
-                   latest_query_data_id=d.get('latest_query_data_id', None),
-                   name=d.get('name', None),
-                   options=_from_dict(d, 'options', QueryOptions),
-                   parent=d.get('parent', None),
-                   permission_tier=_enum(d, 'permission_tier', PermissionLevel),
-                   query=d.get('query', None),
-                   query_hash=d.get('query_hash', None),
-                   run_as_role=_enum(d, 'run_as_role', RunAsRole),
+                   last_modifier_user_name=d.get('last_modifier_user_name', None),
+                   lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
+                   owner_user_name=d.get('owner_user_name', None),
+                   parameters=_repeated_dict(d, 'parameters', QueryParameter),
+                   parent_path=d.get('parent_path', None),
+                   query_text=d.get('query_text', None),
+                   run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
+                   schema=d.get('schema', None),
                    tags=d.get('tags', None),
                    updated_at=d.get('updated_at', None),
                    user=_from_dict(d, 'user', User),
diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst
index 697f0a5da..6230b8199 100644
--- a/docs/account/iam/workspace_assignment.rst
+++ b/docs/account/iam/workspace_assignment.rst
@@ -92,7 +92,7 @@
         specified principal.
         
         :param workspace_id: int
-          The workspace ID.
+          The workspace ID for the account.
         :param principal_id: int
           The ID of the user, service principal, or group.
         :param permissions: List[:class:`WorkspacePermission`] (optional)
diff --git a/docs/workspace/dashboards/index.rst b/docs/workspace/dashboards/index.rst
index 940efa5dd..acea442bb 100644
--- a/docs/workspace/dashboards/index.rst
+++ b/docs/workspace/dashboards/index.rst
@@ -10,4 +10,4 @@ Manage Lakeview dashboards
    genie
    lakeview
    lakeview_embedded
-   query_execution
\ No newline at end of file
+   query_execution
diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst
index 687976f5d..f6bfe82f4 100644
--- a/docs/workspace/serving/serving_endpoints.rst
+++ b/docs/workspace/serving/serving_endpoints.rst
@@ -41,6 +41,9 @@
           throughput endpoints are currently supported.
         :param config: :class:`EndpointCoreConfigInput` (optional)
           The core config of the serving endpoint.
+        :param ai_gateway: :class:`AiGatewayConfig` (optional)
+          The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
+          supported as of now.
         :param rate_limits: List[:class:`RateLimit`] (optional)
           Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
           Gateway to manage rate limits.
diff --git a/tests/integration/test_auth.py b/tests/integration/test_auth.py
index 3ee271778..0bf7f951d 100644
--- a/tests/integration/test_auth.py
+++ b/tests/integration/test_auth.py
@@ -133,16 +133,15 @@ def _test_runtime_auth_from_jobs_inner(w, env_or_skip, random, dbr_versions, lib
 
     tasks = []
     for v in dbr_versions:
-        t = Task(
-            task_key=f'test_{v.key.replace(".", "_")}',
-            notebook_task=NotebookTask(notebook_path=notebook_path),
-            new_cluster=ClusterSpec(
-                spark_version=v.key,
-                num_workers=1,
-                instance_pool_id=instance_pool_id,
-                # GCP uses "custom" data security mode by default, which does not support UC.
-                data_security_mode=DataSecurityMode.SINGLE_USER),
-            libraries=[library])
+        t = Task(task_key=f'test_{v.key.replace(".", "_")}',
+                 notebook_task=NotebookTask(notebook_path=notebook_path),
+                 new_cluster=ClusterSpec(
+                     spark_version=v.key,
+                     num_workers=1,
+                     instance_pool_id=instance_pool_id,
+                     # GCP uses "custom" data security mode by default, which does not support UC.
+                     data_security_mode=DataSecurityMode.SINGLE_USER),
+                 libraries=[library])
         tasks.append(t)
 
     waiter = w.jobs.submit(run_name=f'Runtime Native Auth {random(10)}', tasks=tasks)
diff --git a/tests/test_base_client.py b/tests/test_base_client.py
index 16a8ecfc4..e9e7324a9 100644
--- a/tests/test_base_client.py
+++ b/tests/test_base_client.py
@@ -1,22 +1,18 @@
-import io
-import random
 from http.server import BaseHTTPRequestHandler
-from typing import Callable, Iterator, List, Optional, Tuple, Type
-from unittest.mock import Mock
+from typing import Iterator, List
 
 import pytest
-from requests import PreparedRequest, Response, Timeout
+import requests
 
 from databricks.sdk import errors, useragent
-from databricks.sdk._base_client import (_BaseClient, _RawResponse,
-                                         _StreamingResponse)
+from databricks.sdk._base_client import _BaseClient, _StreamingResponse
 from databricks.sdk.core import DatabricksError
 
 from .clock import FakeClock
 from .fixture_server import http_fixture_server
 
 
-class DummyResponse(_RawResponse):
+class DummyResponse(requests.Response):
     _content: Iterator[bytes]
     _closed: bool = False
 
@@ -280,215 +276,3 @@ def inner(h: BaseHTTPRequestHandler):
         assert 'foo' in res
 
     assert len(requests) == 2
-
-
-@pytest.mark.parametrize(
-    'chunk_size,expected_chunks,data_size',
-    [
-        (5, 20, 100), # 100 / 5 bytes per chunk = 20 chunks
-        (10, 10, 100), # 100 / 10 bytes per chunk = 10 chunks
-        (200, 1, 100), # 100 / 200 bytes per chunk = 1 chunk
-    ])
-def test_streaming_response_chunk_size(chunk_size, expected_chunks, data_size):
-    rng = random.Random(42)
-    test_data = bytes(rng.getrandbits(8) for _ in range(data_size))
-
-    content_chunks = []
-    mock_response = Mock(spec=_RawResponse)
-
-    def mock_iter_content(chunk_size: int, decode_unicode: bool):
-        # Simulate how requests would chunk the data.
-        for i in range(0, len(test_data), chunk_size):
-            chunk = test_data[i:i + chunk_size]
-            content_chunks.append(chunk) # track chunks for verification
-            yield chunk
-
-    mock_response.iter_content = mock_iter_content
-    stream = _StreamingResponse(mock_response)
-    stream.set_chunk_size(chunk_size)
-
-    # Read all data one byte at a time.
-    received_data = b""
-    while True:
-        chunk = stream.read(1)
-        if not chunk:
-            break
-        received_data += chunk
-
-    assert received_data == test_data # all data was received correctly
-    assert len(content_chunks) == expected_chunks # correct number of chunks
-    assert all(len(c) <= chunk_size for c in content_chunks) # chunks don't exceed size
-
-
-def test_is_seekable_stream():
-    client = _BaseClient()
-
-    # Test various input types that are not streams.
-    assert not client._is_seekable_stream(None) # None
-    assert not client._is_seekable_stream("string data") # str
-    assert not client._is_seekable_stream(b"binary data") # bytes
-    assert not client._is_seekable_stream(["list", "data"]) # list
-    assert not client._is_seekable_stream(42) # int
-
-    # Test non-seekable stream.
-    non_seekable = io.BytesIO(b"test data")
-    non_seekable.seekable = lambda: False
-    assert not client._is_seekable_stream(non_seekable)
-
-    # Test seekable streams.
-    assert client._is_seekable_stream(io.BytesIO(b"test data")) # BytesIO
-    assert client._is_seekable_stream(io.StringIO("test data")) # StringIO
-
-    # Test file objects.
-    with open(__file__, 'rb') as f:
-        assert client._is_seekable_stream(f) # File object
-
-    # Test custom seekable stream.
-    class CustomSeekableStream(io.IOBase):
-
-        def seekable(self):
-            return True
-
-        def seek(self, offset, whence=0):
-            return 0
-
-        def tell(self):
-            return 0
-
-    assert client._is_seekable_stream(CustomSeekableStream())
-
-
-class RetryTestCase:
-
-    def __init__(self, data_provider: Callable, offset: Optional[int], expected_failure: bool,
-                 expected_result: bytes):
-        self._data_provider = data_provider
-        self._offset = offset
-        self._expected_result = expected_result
-        self._expected_failure = expected_failure
-
-    def get_data(self):
-        data = self._data_provider()
-        if self._offset is not None:
-            data.seek(self._offset)
-        return data
-
-    @classmethod
-    def create_non_seekable_stream(cls, data: bytes):
-        result = io.BytesIO(data)
-        result.seekable = lambda: False # makes the stream appear non-seekable
-        return result
-
-
-class MockSession:
-
-    def __init__(self, failure_count: int, failure_provider: Callable[[], Response]):
-        self._failure_count = failure_count
-        self._received_requests: List[bytes] = []
-        self._failure_provider = failure_provider
-
-    @classmethod
-    def raise_timeout_exception(cls):
-        raise Timeout("Fake timeout")
-
-    @classmethod
-    def return_retryable_response(cls):
-        # fill response fields so that logging does not fail
-        response = Response()
-        response._content = b''
-        response.status_code = 429
-        response.headers = {'Retry-After': '1'}
-        response.url = 'http://test.com/'
-
-        response.request = PreparedRequest()
-        response.request.url = response.url
-        response.request.method = 'POST'
-        response.request.headers = None
-        response.request.body = b''
-        return response
-
-    # following the signature of Session.request()
-    def request(self,
-                method,
-                url,
-                params=None,
-                data=None,
-                headers=None,
-                cookies=None,
-                files=None,
-                auth=None,
-                timeout=None,
-                allow_redirects=True,
-                proxies=None,
-                hooks=None,
-                stream=None,
-                verify=None,
-                cert=None,
-                json=None):
-        request_body = data.read()
-
-        if isinstance(request_body, str):
-            request_body = request_body.encode('utf-8') # to be able to compare with expected bytes
-
-        self._received_requests.append(request_body)
-        if self._failure_count > 0:
-            self._failure_count -= 1
-            return self._failure_provider()
-            #
-        else:
-            # fill response fields so that logging does not fail
-            response = Response()
-            response._content = b''
-            response.status_code = 200
-            response.reason = 'OK'
-            response.url = url
-
-            response.request = PreparedRequest()
-            response.request.url = url
-            response.request.method = method
-            response.request.headers = headers
-            response.request.body = data
-            return response
-
-
-@pytest.mark.parametrize(
-    'test_case',
-    [
-        # bytes -> BytesIO
-        RetryTestCase(lambda: b"0123456789", None, False, b"0123456789"),
-        # str -> BytesIO
-        RetryTestCase(lambda: "0123456789", None, False, b"0123456789"),
-        # BytesIO directly
-        RetryTestCase(lambda: io.BytesIO(b"0123456789"), None, False, b"0123456789"),
-        # BytesIO directly with offset
-        RetryTestCase(lambda: io.BytesIO(b"0123456789"), 4, False, b"456789"),
-        # StringIO
-        RetryTestCase(lambda: io.StringIO("0123456789"), None, False, b"0123456789"),
-        # Non-seekable
-        RetryTestCase(lambda: RetryTestCase.create_non_seekable_stream(b"0123456789"), None, True,
-                      b"0123456789")
-    ])
-@pytest.mark.parametrize('failure', [[MockSession.raise_timeout_exception, Timeout],
-                                     [MockSession.return_retryable_response, errors.TooManyRequests]])
-def test_rewind_seekable_stream(test_case: RetryTestCase, failure: Tuple[Callable[[], Response], Type]):
-    failure_count = 2
-
-    data = test_case.get_data()
-
-    session = MockSession(failure_count, failure[0])
-    client = _BaseClient()
-    client._session = session
-
-    def do():
-        client.do('POST', f'test.com/foo', data=data)
-
-    if test_case._expected_failure:
-        expected_attempts_made = 1
-        exception_class = failure[1]
-        with pytest.raises(exception_class):
-            do()
-    else:
-        expected_attempts_made = failure_count + 1
-        do()
-
-    assert session._received_requests == [test_case._expected_result for _ in range(expected_attempts_made)]
diff --git a/tests/test_core.py b/tests/test_core.py
index 32431172b..1cca428cb 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -8,7 +8,7 @@
 
 import pytest
 
-from databricks.sdk import WorkspaceClient, errors, useragent
+from databricks.sdk import WorkspaceClient, errors
 from databricks.sdk.core import ApiClient, Config, DatabricksError
 from databricks.sdk.credentials_provider import (CliTokenSource,
                                                  CredentialsProvider,
@@ -178,11 +178,6 @@ class MockUname:
         def system(self):
             return 'TestOS'
 
-    # Clear all environment variables and cached CICD provider.
-    for k in os.environ:
-        monkeypatch.delenv(k, raising=False)
-    useragent._cicd_provider = None
-
     monkeypatch.setattr(platform, 'python_version', lambda: '3.0.0')
     monkeypatch.setattr(platform, 'uname', MockUname)
     monkeypatch.setenv('DATABRICKS_SDK_UPSTREAM', "upstream-product")
diff --git a/tests/test_data_plane.py b/tests/test_data_plane.py
index 1eac92382..a74658964 100644
--- a/tests/test_data_plane.py
+++ b/tests/test_data_plane.py
@@ -2,7 +2,7 @@
 
 from databricks.sdk.data_plane import DataPlaneService
 from databricks.sdk.oauth import Token
-from databricks.sdk.service.serving import DataPlaneInfo
+from databricks.sdk.service.oauth2 import DataPlaneInfo
 
 info = DataPlaneInfo(authorization_details="authDetails", endpoint_url="url")
 
diff --git a/tests/test_model_serving_auth.py b/tests/test_model_serving_auth.py
index 49aed33a5..fa68b3a5c 100644
--- a/tests/test_model_serving_auth.py
+++ b/tests/test_model_serving_auth.py
@@ -1,10 +1,8 @@
-import threading
 import time
 
 import pytest
 
 from databricks.sdk.core import Config
-from databricks.sdk.credentials_provider import ModelServingUserCredentials
 
 from .conftest import raises
 
@@ -48,24 +46,15 @@ def test_model_serving_auth(env_values, del_env_values, oauth_file_name, monkeyp
     assert headers.get("Authorization") == 'Bearer databricks_sdk_unit_test_token'
 
 
-@pytest.mark.parametrize(
-    "env_values, oauth_file_name",
-    [
-        ([], "invalid_file_name"), # Not in Model Serving and Invalid File Name
-        ([('IS_IN_DB_MODEL_SERVING_ENV', 'true')
-          ], "invalid_file_name"), # In Model Serving and Invalid File Name
-        ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true')
-          ], "invalid_file_name"), # In Model Serving and Invalid File Name
-        ([], "tests/testdata/model-serving-test-token") # Not in Model Serving and Valid File Name
-    ])
+@pytest.mark.parametrize("env_values, oauth_file_name", [
+    ([], "invalid_file_name"), # Not in Model Serving and Invalid File Name
+    ([('IS_IN_DB_MODEL_SERVING_ENV', 'true')], "invalid_file_name"), # In Model Serving and Invalid File Name
+    ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true')
+      ], "invalid_file_name"), # In Model Serving and Invalid File Name
+    ([], "tests/testdata/model-serving-test-token") # Not in Model Serving and Valid File Name
+])
 @raises(default_auth_base_error_message)
 def test_model_serving_auth_errors(env_values, oauth_file_name, monkeypatch):
-    # Guarantee that the tests defaults to env variables rather than config file.
-    #
-    # TODO: this is hacky and we should find a better way to tell the config
-    # that it should not read from the config file.
-    monkeypatch.setenv('DATABRICKS_CONFIG_FILE', 'x')
-
     for (env_name, env_value) in env_values:
         monkeypatch.setenv(env_name, env_value)
     monkeypatch.setattr(
@@ -113,49 +102,3 @@ def test_model_serving_auth_refresh(monkeypatch, mocker):
     assert (cfg.host == 'x')
     # Read V2 now
     assert headers.get("Authorization") == 'Bearer databricks_sdk_unit_test_token_v2'
-
-
-def test_agent_user_credentials(monkeypatch, mocker):
-    monkeypatch.setenv('IS_IN_DB_MODEL_SERVING_ENV', 'true')
-    monkeypatch.setenv('DB_MODEL_SERVING_HOST_URL', 'x')
-    monkeypatch.setattr(
-        "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH",
-        "tests/testdata/model-serving-test-token")
-
-    invokers_token_val = "databricks_invokers_token"
-    current_thread = threading.current_thread()
-    thread_data = current_thread.__dict__
-    thread_data["invokers_token"] = invokers_token_val
-
-    cfg = Config(credentials_strategy=ModelServingUserCredentials())
-    assert cfg.auth_type == 'model_serving_user_credentials'
-
-    headers = cfg.authenticate()
-
-    assert (cfg.host == 'x')
-    assert headers.get("Authorization") == f'Bearer {invokers_token_val}'
-
-    # Test updates of invokers token
-    invokers_token_val = "databricks_invokers_token_v2"
-    current_thread = threading.current_thread()
-    thread_data = current_thread.__dict__
-    thread_data["invokers_token"] = invokers_token_val
-
-    headers = cfg.authenticate()
-    assert (cfg.host == 'x')
-    assert headers.get("Authorization") == f'Bearer {invokers_token_val}'
-
-
-# If this credential strategy is being used in a non model serving environments then use default credential strategy instead
-def test_agent_user_credentials_in_non_model_serving_environments(monkeypatch):
-
-    monkeypatch.setenv('DATABRICKS_HOST', 'x')
-    monkeypatch.setenv('DATABRICKS_TOKEN', 'token')
-
-    cfg = Config(credentials_strategy=ModelServingUserCredentials())
-    assert cfg.auth_type == 'pat' # Auth type is PAT as it is no longer in a model serving environment
-
-    headers = cfg.authenticate()
-
-    assert (cfg.host == 'https://x')
-    assert headers.get("Authorization") == f'Bearer token'
diff --git a/tests/test_open_ai_mixin.py b/tests/test_open_ai_mixin.py
index e503da073..1858c66cb 100644
--- a/tests/test_open_ai_mixin.py
+++ b/tests/test_open_ai_mixin.py
@@ -1,10 +1,8 @@
 import sys
-from io import BytesIO
 
 import pytest
 
 from databricks.sdk.core import Config
-from databricks.sdk.service.serving import ExternalFunctionRequestHttpMethod
 
 
 def test_open_ai_client(monkeypatch):
@@ -30,22 +28,3 @@ def test_langchain_open_ai_client(monkeypatch):
 
     assert client.openai_api_base == "https://test_host/serving-endpoints"
     assert client.model_name == "databricks-meta-llama-3-1-70b-instruct"
-
-
-def test_http_request(w, requests_mock):
-    headers = {"Accept": "text/plain", "Content-Type": "application/json", }
-    mocked_url = "http://localhost/api/2.0/external-function"
-    blob_response = BytesIO(b"The request was successful")
-
-    requests_mock.post(mocked_url,
-                       request_headers=headers,
-                       content=blob_response.getvalue(),
-                       status_code=200,
-                       )
-    response = w.serving_endpoints.http_request(conn="test_conn",
-                                                method=ExternalFunctionRequestHttpMethod.GET,
-                                                path="test_path")
-    assert requests_mock.call_count == 1
-    assert requests_mock.called
-    assert response.status_code == 200 # Verify the response status
-    assert (response.text == "The request was successful") # Ensure the response body matches the mocked data
\ No newline at end of file
diff --git a/tests/test_user_agent.py b/tests/test_user_agent.py
index ba6f694f5..5083d9908 100644
--- a/tests/test_user_agent.py
+++ b/tests/test_user_agent.py
@@ -1,5 +1,3 @@
-import os
-
 import pytest
 
 from databricks.sdk.version import __version__
@@ -42,45 +40,3 @@ def test_user_agent_with_partner(user_agent):
     user_agent.with_partner('differenttest')
     assert 'partner/test' in user_agent.to_string()
     assert 'partner/differenttest' in user_agent.to_string()
-
-
-@pytest.fixture(scope="function")
-def clear_cicd():
-    # Save and clear env vars.
-    original_env = os.environ.copy()
-    os.environ.clear()
-
-    # Clear cached CICD provider.
-    from databricks.sdk import useragent
-    useragent._cicd_provider = None
-
-    yield
-
-    # Restore env vars.
-    os.environ = original_env
-
-
-def test_user_agent_cicd_no_provider(clear_cicd):
-    from databricks.sdk import useragent
-    user_agent = useragent.to_string()
-
-    assert 'cicd' not in user_agent
-
-
-def test_user_agent_cicd_one_provider(clear_cicd):
-    os.environ['GITHUB_ACTIONS'] = 'true'
-
-    from databricks.sdk import useragent
-    user_agent = useragent.to_string()
-
-    assert 'cicd/github' in user_agent
-
-
-def test_user_agent_cicd_two_provider(clear_cicd):
-    os.environ['GITHUB_ACTIONS'] = 'true'
-    os.environ['GITLAB_CI'] = 'true'
-
-    from databricks.sdk import useragent
-    user_agent = useragent.to_string()
-
-    assert 'cicd/github' in user_agent

From 145df70cfc1d890b842ff8b6bcc013d105502e82 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Fri, 14 Feb 2025 16:11:00 -0700
Subject: [PATCH 122/136] merge upstream branch into fork

---
 .github/workflows/push.yml             |  31 ++--
 databricks/sdk/credentials_provider.py |  37 ++--
 setup.py                               |   6 +-
 tests/integration/test_auth.py         |  19 ++-
 tests/test_base_client.py              | 224 ++++++++++++++++++++++++-
 tests/test_core.py                     |   7 +-
 tests/test_data_plane.py               |   2 +-
 tests/test_model_serving_auth.py       |  71 +++++++-
 tests/test_open_ai_mixin.py            |  21 +++
 tests/test_user_agent.py               |  44 +++++
 10 files changed, 399 insertions(+), 63 deletions(-)

diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
index f639f51eb..a839096c0 100644
--- a/.github/workflows/push.yml
+++ b/.github/workflows/push.yml
@@ -15,22 +15,21 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        pyVersion: [ '3.7', '3.8', '3.9', '3.10', '3.11', '3.12' ]
-    runs-on: ubuntu-latest
-    steps:
-      - name: Checkout
-        uses: actions/checkout@v4
-
-      - name: Unshallow
-        run: git fetch --prune --unshallow
-
-      - uses: actions/setup-python@v5
-        with:
-          python-version: ${{ matrix.pyVersion }}
-
-      - name: Run tests
-        run: make dev install test
-
+        pyVersion: [ '3.8', '3.9', '3.10', '3.11', '3.12' ]
+    with:
+      os: ubuntu-latest
+      pyVersion: ${{ matrix.pyVersion }}
+
+  tests-windows:
+      uses: ./.github/workflows/test.yml
+      strategy:
+        fail-fast: false
+        matrix:
+          pyVersion: [ '3.9', '3.10', '3.11', '3.12' ]
+      with:
+        os: windows-latest
+        pyVersion: ${{ matrix.pyVersion }}
+          
   fmt:
     runs-on: ubuntu-latest
 
diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py
index 07fb48c5a..c40d1e82f 100644
--- a/databricks/sdk/credentials_provider.py
+++ b/databricks/sdk/credentials_provider.py
@@ -199,8 +199,8 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
     if not client_id:
         client_id = 'databricks-cli'
 
-    # Load cached credentials from disk if they exist. Note that these are
-    # local to the Python SDK and not reused by other SDKs.
+    # Load cached credentials from disk if they exist.
+    # Note that these are local to the Python SDK and not reused by other SDKs.
     oidc_endpoints = cfg.oidc_endpoints
     redirect_url = 'http://localhost:8020'
     token_cache = TokenCache(host=cfg.host,
@@ -723,8 +723,6 @@ def inner() -> Dict[str, str]:
 # This Code is derived from Mlflow DatabricksModelServingConfigProvider
 # https://github.com/mlflow/mlflow/blob/1219e3ef1aac7d337a618a352cd859b336cf5c81/mlflow/legacy_databricks_cli/configure/provider.py#L332
 class ModelServingAuthProvider():
-    USER_CREDENTIALS = "user_credentials"
-
     _MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH = "/var/credentials-secret/model-dependencies-oauth-token"
 
     def __init__(self, credential_type: Optional[str]):
@@ -733,7 +731,7 @@ def __init__(self, credential_type: Optional[str]):
         self.refresh_duration = 300 # 300 Seconds
         self.credential_type = credential_type
 
-    def should_fetch_model_serving_environment_oauth() -> bool:
+    def should_fetch_model_serving_environment_oauth(self) -> bool:
         """
         Check whether this is the model serving environment
         Additionally check if the oauth token file path exists
@@ -742,15 +740,15 @@ def should_fetch_model_serving_environment_oauth() -> bool:
         is_in_model_serving_env = (os.environ.get("IS_IN_DB_MODEL_SERVING_ENV")
                                    or os.environ.get("IS_IN_DATABRICKS_MODEL_SERVING_ENV") or "false")
         return (is_in_model_serving_env == "true"
-                and os.path.isfile(ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH))
+                and os.path.isfile(self._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH))
 
-    def _get_model_dependency_oauth_token(self, should_retry=True) -> str:
+    def get_model_dependency_oauth_token(self, should_retry=True) -> str:
         # Use Cached value if it is valid
         if self.current_token is not None and self.expiry_time > time.time():
             return self.current_token
 
         try:
-            with open(ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH) as f:
+            with open(self._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH) as f:
                 oauth_dict = json.load(f)
                 self.current_token = oauth_dict["OAUTH_TOKEN"][0]["oauthTokenValue"]
                 self.expiry_time = time.time() + self.refresh_duration
@@ -760,32 +758,21 @@ def _get_model_dependency_oauth_token(self, should_retry=True) -> str:
                 logger.warning("Unable to read oauth token on first attmept in Model Serving Environment",
                                exc_info=e)
                 time.sleep(0.5)
-                return self._get_model_dependency_oauth_token(should_retry=False)
+                return self.get_model_dependency_oauth_token(should_retry=False)
             else:
                 raise RuntimeError(
                     "Unable to read OAuth credentials from the file mounted in Databricks Model Serving"
                 ) from e
         return self.current_token
 
-    def _get_invokers_token(self):
-        current_thread = threading.current_thread()
-        thread_data = current_thread.__dict__
-        invokers_token = None
-        if "invokers_token" in thread_data:
-            invokers_token = thread_data["invokers_token"]
-
-        if invokers_token is None:
-            raise RuntimeError("Unable to read Invokers Token in Databricks Model Serving")
-
-        return invokers_token
-
     def get_databricks_host_token(self) -> Optional[Tuple[str, str]]:
-        if not ModelServingAuthProvider.should_fetch_model_serving_environment_oauth():
+        if not self.should_fetch_model_serving_environment_oauth():
             return None
 
         # read from DB_MODEL_SERVING_HOST_ENV_VAR if available otherwise MODEL_SERVING_HOST_ENV_VAR
         host = os.environ.get("DATABRICKS_MODEL_SERVING_HOST_URL") or os.environ.get(
             "DB_MODEL_SERVING_HOST_URL")
+        token = self.get_model_dependency_oauth_token()
 
         if self.credential_type == ModelServingAuthProvider.USER_CREDENTIALS:
             return (host, self._get_invokers_token())
@@ -796,7 +783,10 @@ def get_databricks_host_token(self) -> Optional[Tuple[str, str]]:
 def model_serving_auth_visitor(cfg: 'Config',
                                credential_type: Optional[str] = None) -> Optional[CredentialsProvider]:
     try:
-        model_serving_auth_provider = ModelServingAuthProvider(credential_type)
+        model_serving_auth_provider = ModelServingAuthProvider()
+        if not model_serving_auth_provider.should_fetch_model_serving_environment_oauth():
+            logger.debug("model-serving: Not in Databricks Model Serving, skipping")
+            return None
         host, token = model_serving_auth_provider.get_databricks_host_token()
         if token is None:
             raise ValueError(
@@ -807,6 +797,7 @@ def model_serving_auth_visitor(cfg: 'Config',
     except Exception as e:
         logger.warning("Unable to get auth from Databricks Model Serving Environment", exc_info=e)
         return None
+
     logger.info("Using Databricks Model Serving Authentication")
 
     def inner() -> Dict[str, str]:
diff --git a/setup.py b/setup.py
index 812682684..bc7327e31 100644
--- a/setup.py
+++ b/setup.py
@@ -17,8 +17,10 @@
       extras_require={"dev": ["pytest", "pytest-cov", "pytest-xdist", "pytest-mock",
                               "yapf", "pycodestyle", "autoflake", "isort", "wheel",
                               "ipython", "ipywidgets", "requests-mock", "pyfakefs",
-                              "databricks-connect"],
-                      "notebook": ["ipython>=8,<9", "ipywidgets>=8,<9"]},
+                              "databricks-connect", "pytest-rerunfailures", "openai", 
+                              'langchain-openai; python_version > "3.7"', "httpx"],
+                      "notebook": ["ipython>=8,<9", "ipywidgets>=8,<9"],
+                      "openai": ["openai", 'langchain-openai; python_version > "3.7"', "httpx"]},
       author="Sync Computing",
       author_email="info@synccomputing.com",
       description="Sync Fork Databricks SDK for Python (Beta)",
diff --git a/tests/integration/test_auth.py b/tests/integration/test_auth.py
index 0bf7f951d..3ee271778 100644
--- a/tests/integration/test_auth.py
+++ b/tests/integration/test_auth.py
@@ -133,15 +133,16 @@ def _test_runtime_auth_from_jobs_inner(w, env_or_skip, random, dbr_versions, lib
 
     tasks = []
     for v in dbr_versions:
-        t = Task(task_key=f'test_{v.key.replace(".", "_")}',
-                 notebook_task=NotebookTask(notebook_path=notebook_path),
-                 new_cluster=ClusterSpec(
-                     spark_version=v.key,
-                     num_workers=1,
-                     instance_pool_id=instance_pool_id,
-                     # GCP uses "custom" data security mode by default, which does not support UC.
-                     data_security_mode=DataSecurityMode.SINGLE_USER),
-                 libraries=[library])
+        t = Task(
+            task_key=f'test_{v.key.replace(".", "_")}',
+            notebook_task=NotebookTask(notebook_path=notebook_path),
+            new_cluster=ClusterSpec(
+                spark_version=v.key,
+                num_workers=1,
+                instance_pool_id=instance_pool_id,
+                # GCP uses "custom" data security mode by default, which does not support UC.
+                data_security_mode=DataSecurityMode.SINGLE_USER),
+            libraries=[library])
         tasks.append(t)
 
     waiter = w.jobs.submit(run_name=f'Runtime Native Auth {random(10)}', tasks=tasks)
diff --git a/tests/test_base_client.py b/tests/test_base_client.py
index e9e7324a9..16a8ecfc4 100644
--- a/tests/test_base_client.py
+++ b/tests/test_base_client.py
@@ -1,18 +1,22 @@
+import io
+import random
 from http.server import BaseHTTPRequestHandler
-from typing import Iterator, List
+from typing import Callable, Iterator, List, Optional, Tuple, Type
+from unittest.mock import Mock
 
 import pytest
-import requests
+from requests import PreparedRequest, Response, Timeout
 
 from databricks.sdk import errors, useragent
-from databricks.sdk._base_client import _BaseClient, _StreamingResponse
+from databricks.sdk._base_client import (_BaseClient, _RawResponse,
+                                         _StreamingResponse)
 from databricks.sdk.core import DatabricksError
 
 from .clock import FakeClock
 from .fixture_server import http_fixture_server
 
 
-class DummyResponse(requests.Response):
+class DummyResponse(_RawResponse):
     _content: Iterator[bytes]
     _closed: bool = False
 
@@ -276,3 +280,215 @@ def inner(h: BaseHTTPRequestHandler):
         assert 'foo' in res
 
     assert len(requests) == 2
+
+
+@pytest.mark.parametrize(
+    'chunk_size,expected_chunks,data_size',
+    [
+        (5, 20, 100), # 100 / 5 bytes per chunk = 20 chunks
+        (10, 10, 100), # 100 / 10 bytes per chunk = 10 chunks
+        (200, 1, 100), # 100 / 200 bytes per chunk = 1 chunk
+    ])
+def test_streaming_response_chunk_size(chunk_size, expected_chunks, data_size):
+    rng = random.Random(42)
+    test_data = bytes(rng.getrandbits(8) for _ in range(data_size))
+
+    content_chunks = []
+    mock_response = Mock(spec=_RawResponse)
+
+    def mock_iter_content(chunk_size: int, decode_unicode: bool):
+        # Simulate how requests would chunk the data.
+        for i in range(0, len(test_data), chunk_size):
+            chunk = test_data[i:i + chunk_size]
+            content_chunks.append(chunk) # track chunks for verification
+            yield chunk
+
+    mock_response.iter_content = mock_iter_content
+    stream = _StreamingResponse(mock_response)
+    stream.set_chunk_size(chunk_size)
+
+    # Read all data one byte at a time.
+    received_data = b""
+    while True:
+        chunk = stream.read(1)
+        if not chunk:
+            break
+        received_data += chunk
+
+    assert received_data == test_data # all data was received correctly
+    assert len(content_chunks) == expected_chunks # correct number of chunks
+    assert all(len(c) <= chunk_size for c in content_chunks) # chunks don't exceed size
+
+
+def test_is_seekable_stream():
+    client = _BaseClient()
+
+    # Test various input types that are not streams.
+    assert not client._is_seekable_stream(None) # None
+    assert not client._is_seekable_stream("string data") # str
+    assert not client._is_seekable_stream(b"binary data") # bytes
+    assert not client._is_seekable_stream(["list", "data"]) # list
+    assert not client._is_seekable_stream(42) # int
+
+    # Test non-seekable stream.
+    non_seekable = io.BytesIO(b"test data")
+    non_seekable.seekable = lambda: False
+    assert not client._is_seekable_stream(non_seekable)
+
+    # Test seekable streams.
+    assert client._is_seekable_stream(io.BytesIO(b"test data")) # BytesIO
+    assert client._is_seekable_stream(io.StringIO("test data")) # StringIO
+
+    # Test file objects.
+    with open(__file__, 'rb') as f:
+        assert client._is_seekable_stream(f) # File object
+
+    # Test custom seekable stream.
+    class CustomSeekableStream(io.IOBase):
+
+        def seekable(self):
+            return True
+
+        def seek(self, offset, whence=0):
+            return 0
+
+        def tell(self):
+            return 0
+
+    assert client._is_seekable_stream(CustomSeekableStream())
+
+
+class RetryTestCase:
+
+    def __init__(self, data_provider: Callable, offset: Optional[int], expected_failure: bool,
+                 expected_result: bytes):
+        self._data_provider = data_provider
+        self._offset = offset
+        self._expected_result = expected_result
+        self._expected_failure = expected_failure
+
+    def get_data(self):
+        data = self._data_provider()
+        if self._offset is not None:
+            data.seek(self._offset)
+        return data
+
+    @classmethod
+    def create_non_seekable_stream(cls, data: bytes):
+        result = io.BytesIO(data)
+        result.seekable = lambda: False # makes the stream appear non-seekable
+        return result
+
+
+class MockSession:
+
+    def __init__(self, failure_count: int, failure_provider: Callable[[], Response]):
+        self._failure_count = failure_count
+        self._received_requests: List[bytes] = []
+        self._failure_provider = failure_provider
+
+    @classmethod
+    def raise_timeout_exception(cls):
+        raise Timeout("Fake timeout")
+
+    @classmethod
+    def return_retryable_response(cls):
+        # fill response fields so that logging does not fail
+        response = Response()
+        response._content = b''
+        response.status_code = 429
+        response.headers = {'Retry-After': '1'}
+        response.url = 'http://test.com/'
+
+        response.request = PreparedRequest()
+        response.request.url = response.url
+        response.request.method = 'POST'
+        response.request.headers = None
+        response.request.body = b''
+        return response
+
+    # following the signature of Session.request()
+    def request(self,
+                method,
+                url,
+                params=None,
+                data=None,
+                headers=None,
+                cookies=None,
+                files=None,
+                auth=None,
+                timeout=None,
+                allow_redirects=True,
+                proxies=None,
+                hooks=None,
+                stream=None,
+                verify=None,
+                cert=None,
+                json=None):
+        request_body = data.read()
+
+        if isinstance(request_body, str):
+            request_body = request_body.encode('utf-8') # to be able to compare with expected bytes
+
+        self._received_requests.append(request_body)
+        if self._failure_count > 0:
+            self._failure_count -= 1
+            return self._failure_provider()
+            #
+        else:
+            # fill response fields so that logging does not fail
+            response = Response()
+            response._content = b''
+            response.status_code = 200
+            response.reason = 'OK'
+            response.url = url
+
+            response.request = PreparedRequest()
+            response.request.url = url
+            response.request.method = method
+            response.request.headers = headers
+            response.request.body = data
+            return response
+
+
+@pytest.mark.parametrize(
+    'test_case',
+    [
+        # bytes -> BytesIO
+        RetryTestCase(lambda: b"0123456789", None, False, b"0123456789"),
+        # str -> BytesIO
+        RetryTestCase(lambda: "0123456789", None, False, b"0123456789"),
+        # BytesIO directly
+        RetryTestCase(lambda: io.BytesIO(b"0123456789"), None, False, b"0123456789"),
+        # BytesIO directly with offset
+        RetryTestCase(lambda: io.BytesIO(b"0123456789"), 4, False, b"456789"),
+        # StringIO
+        RetryTestCase(lambda: io.StringIO("0123456789"), None, False, b"0123456789"),
+        # Non-seekable
+        RetryTestCase(lambda: RetryTestCase.create_non_seekable_stream(b"0123456789"), None, True,
+                      b"0123456789")
+    ])
+@pytest.mark.parametrize('failure', [[MockSession.raise_timeout_exception, Timeout],
+                                     [MockSession.return_retryable_response, errors.TooManyRequests]])
+def test_rewind_seekable_stream(test_case: RetryTestCase, failure: Tuple[Callable[[], Response], Type]):
+    failure_count = 2
+
+    data = test_case.get_data()
+
+    session = MockSession(failure_count, failure[0])
+    client = _BaseClient()
+    client._session = session
+
+    def do():
+        client.do('POST', f'test.com/foo', data=data)
+
+    if test_case._expected_failure:
+        expected_attempts_made = 1
+        exception_class = failure[1]
+        with pytest.raises(exception_class):
+            do()
+    else:
+        expected_attempts_made = failure_count + 1
+        do()
+
+    assert session._received_requests == [test_case._expected_result for _ in range(expected_attempts_made)]
diff --git a/tests/test_core.py b/tests/test_core.py
index 1cca428cb..32431172b 100644
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -8,7 +8,7 @@
 
 import pytest
 
-from databricks.sdk import WorkspaceClient, errors
+from databricks.sdk import WorkspaceClient, errors, useragent
 from databricks.sdk.core import ApiClient, Config, DatabricksError
 from databricks.sdk.credentials_provider import (CliTokenSource,
                                                  CredentialsProvider,
@@ -178,6 +178,11 @@ class MockUname:
         def system(self):
             return 'TestOS'
 
+    # Clear all environment variables and cached CICD provider.
+    for k in os.environ:
+        monkeypatch.delenv(k, raising=False)
+    useragent._cicd_provider = None
+
     monkeypatch.setattr(platform, 'python_version', lambda: '3.0.0')
     monkeypatch.setattr(platform, 'uname', MockUname)
     monkeypatch.setenv('DATABRICKS_SDK_UPSTREAM', "upstream-product")
diff --git a/tests/test_data_plane.py b/tests/test_data_plane.py
index a74658964..1eac92382 100644
--- a/tests/test_data_plane.py
+++ b/tests/test_data_plane.py
@@ -2,7 +2,7 @@
 
 from databricks.sdk.data_plane import DataPlaneService
 from databricks.sdk.oauth import Token
-from databricks.sdk.service.oauth2 import DataPlaneInfo
+from databricks.sdk.service.serving import DataPlaneInfo
 
 info = DataPlaneInfo(authorization_details="authDetails", endpoint_url="url")
 
diff --git a/tests/test_model_serving_auth.py b/tests/test_model_serving_auth.py
index fa68b3a5c..49aed33a5 100644
--- a/tests/test_model_serving_auth.py
+++ b/tests/test_model_serving_auth.py
@@ -1,8 +1,10 @@
+import threading
 import time
 
 import pytest
 
 from databricks.sdk.core import Config
+from databricks.sdk.credentials_provider import ModelServingUserCredentials
 
 from .conftest import raises
 
@@ -46,15 +48,24 @@ def test_model_serving_auth(env_values, del_env_values, oauth_file_name, monkeyp
     assert headers.get("Authorization") == 'Bearer databricks_sdk_unit_test_token'
 
 
-@pytest.mark.parametrize("env_values, oauth_file_name", [
-    ([], "invalid_file_name"), # Not in Model Serving and Invalid File Name
-    ([('IS_IN_DB_MODEL_SERVING_ENV', 'true')], "invalid_file_name"), # In Model Serving and Invalid File Name
-    ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true')
-      ], "invalid_file_name"), # In Model Serving and Invalid File Name
-    ([], "tests/testdata/model-serving-test-token") # Not in Model Serving and Valid File Name
-])
+@pytest.mark.parametrize(
+    "env_values, oauth_file_name",
+    [
+        ([], "invalid_file_name"), # Not in Model Serving and Invalid File Name
+        ([('IS_IN_DB_MODEL_SERVING_ENV', 'true')
+          ], "invalid_file_name"), # In Model Serving and Invalid File Name
+        ([('IS_IN_DATABRICKS_MODEL_SERVING_ENV', 'true')
+          ], "invalid_file_name"), # In Model Serving and Invalid File Name
+        ([], "tests/testdata/model-serving-test-token") # Not in Model Serving and Valid File Name
+    ])
 @raises(default_auth_base_error_message)
 def test_model_serving_auth_errors(env_values, oauth_file_name, monkeypatch):
+    # Guarantee that the tests defaults to env variables rather than config file.
+    #
+    # TODO: this is hacky and we should find a better way to tell the config
+    # that it should not read from the config file.
+    monkeypatch.setenv('DATABRICKS_CONFIG_FILE', 'x')
+
     for (env_name, env_value) in env_values:
         monkeypatch.setenv(env_name, env_value)
     monkeypatch.setattr(
@@ -102,3 +113,49 @@ def test_model_serving_auth_refresh(monkeypatch, mocker):
     assert (cfg.host == 'x')
     # Read V2 now
     assert headers.get("Authorization") == 'Bearer databricks_sdk_unit_test_token_v2'
+
+
+def test_agent_user_credentials(monkeypatch, mocker):
+    monkeypatch.setenv('IS_IN_DB_MODEL_SERVING_ENV', 'true')
+    monkeypatch.setenv('DB_MODEL_SERVING_HOST_URL', 'x')
+    monkeypatch.setattr(
+        "databricks.sdk.credentials_provider.ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH",
+        "tests/testdata/model-serving-test-token")
+
+    invokers_token_val = "databricks_invokers_token"
+    current_thread = threading.current_thread()
+    thread_data = current_thread.__dict__
+    thread_data["invokers_token"] = invokers_token_val
+
+    cfg = Config(credentials_strategy=ModelServingUserCredentials())
+    assert cfg.auth_type == 'model_serving_user_credentials'
+
+    headers = cfg.authenticate()
+
+    assert (cfg.host == 'x')
+    assert headers.get("Authorization") == f'Bearer {invokers_token_val}'
+
+    # Test updates of invokers token
+    invokers_token_val = "databricks_invokers_token_v2"
+    current_thread = threading.current_thread()
+    thread_data = current_thread.__dict__
+    thread_data["invokers_token"] = invokers_token_val
+
+    headers = cfg.authenticate()
+    assert (cfg.host == 'x')
+    assert headers.get("Authorization") == f'Bearer {invokers_token_val}'
+
+
+# If this credential strategy is being used in a non model serving environments then use default credential strategy instead
+def test_agent_user_credentials_in_non_model_serving_environments(monkeypatch):
+
+    monkeypatch.setenv('DATABRICKS_HOST', 'x')
+    monkeypatch.setenv('DATABRICKS_TOKEN', 'token')
+
+    cfg = Config(credentials_strategy=ModelServingUserCredentials())
+    assert cfg.auth_type == 'pat' # Auth type is PAT as it is no longer in a model serving environment
+
+    headers = cfg.authenticate()
+
+    assert (cfg.host == 'https://x')
+    assert headers.get("Authorization") == f'Bearer token'
diff --git a/tests/test_open_ai_mixin.py b/tests/test_open_ai_mixin.py
index 1858c66cb..72e1e9a60 100644
--- a/tests/test_open_ai_mixin.py
+++ b/tests/test_open_ai_mixin.py
@@ -1,8 +1,10 @@
 import sys
+from io import BytesIO
 
 import pytest
 
 from databricks.sdk.core import Config
+from databricks.sdk.service.serving import ExternalFunctionRequestHttpMethod
 
 
 def test_open_ai_client(monkeypatch):
@@ -28,3 +30,22 @@ def test_langchain_open_ai_client(monkeypatch):
 
     assert client.openai_api_base == "https://test_host/serving-endpoints"
     assert client.model_name == "databricks-meta-llama-3-1-70b-instruct"
+
+
+def test_http_request(w, requests_mock):
+    headers = {"Accept": "text/plain", "Content-Type": "application/json", }
+    mocked_url = "http://localhost/api/2.0/external-function"
+    blob_response = BytesIO(b"The request was successful")
+
+    requests_mock.post(mocked_url,
+                       request_headers=headers,
+                       content=blob_response.getvalue(),
+                       status_code=200,
+                       )
+    response = w.serving_endpoints.http_request(conn="test_conn",
+                                                method=ExternalFunctionRequestHttpMethod.GET,
+                                                path="test_path")
+    assert requests_mock.call_count == 1
+    assert requests_mock.called
+    assert response.status_code == 200 # Verify the response status
+    assert (response.text == "The request was successful") # Ensure the response body matches the mocked data
diff --git a/tests/test_user_agent.py b/tests/test_user_agent.py
index 5083d9908..ba6f694f5 100644
--- a/tests/test_user_agent.py
+++ b/tests/test_user_agent.py
@@ -1,3 +1,5 @@
+import os
+
 import pytest
 
 from databricks.sdk.version import __version__
@@ -40,3 +42,45 @@ def test_user_agent_with_partner(user_agent):
     user_agent.with_partner('differenttest')
     assert 'partner/test' in user_agent.to_string()
     assert 'partner/differenttest' in user_agent.to_string()
+
+
+@pytest.fixture(scope="function")
+def clear_cicd():
+    # Save and clear env vars.
+    original_env = os.environ.copy()
+    os.environ.clear()
+
+    # Clear cached CICD provider.
+    from databricks.sdk import useragent
+    useragent._cicd_provider = None
+
+    yield
+
+    # Restore env vars.
+    os.environ = original_env
+
+
+def test_user_agent_cicd_no_provider(clear_cicd):
+    from databricks.sdk import useragent
+    user_agent = useragent.to_string()
+
+    assert 'cicd' not in user_agent
+
+
+def test_user_agent_cicd_one_provider(clear_cicd):
+    os.environ['GITHUB_ACTIONS'] = 'true'
+
+    from databricks.sdk import useragent
+    user_agent = useragent.to_string()
+
+    assert 'cicd/github' in user_agent
+
+
+def test_user_agent_cicd_two_provider(clear_cicd):
+    os.environ['GITHUB_ACTIONS'] = 'true'
+    os.environ['GITLAB_CI'] = 'true'
+
+    from databricks.sdk import useragent
+    user_agent = useragent.to_string()
+
+    assert 'cicd/github' in user_agent

From 5c9705fe253c01513e7fc97ee50ac33f07b2c883 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Fri, 14 Feb 2025 16:11:58 -0700
Subject: [PATCH 123/136] fix files

---
 databricks/sdk/credentials_provider.py | 37 ++++++++++++++++----------
 1 file changed, 23 insertions(+), 14 deletions(-)

diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py
index c40d1e82f..07fb48c5a 100644
--- a/databricks/sdk/credentials_provider.py
+++ b/databricks/sdk/credentials_provider.py
@@ -199,8 +199,8 @@ def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
     if not client_id:
         client_id = 'databricks-cli'
 
-    # Load cached credentials from disk if they exist.
-    # Note that these are local to the Python SDK and not reused by other SDKs.
+    # Load cached credentials from disk if they exist. Note that these are
+    # local to the Python SDK and not reused by other SDKs.
     oidc_endpoints = cfg.oidc_endpoints
     redirect_url = 'http://localhost:8020'
     token_cache = TokenCache(host=cfg.host,
@@ -723,6 +723,8 @@ def inner() -> Dict[str, str]:
 # This Code is derived from Mlflow DatabricksModelServingConfigProvider
 # https://github.com/mlflow/mlflow/blob/1219e3ef1aac7d337a618a352cd859b336cf5c81/mlflow/legacy_databricks_cli/configure/provider.py#L332
 class ModelServingAuthProvider():
+    USER_CREDENTIALS = "user_credentials"
+
     _MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH = "/var/credentials-secret/model-dependencies-oauth-token"
 
     def __init__(self, credential_type: Optional[str]):
@@ -731,7 +733,7 @@ def __init__(self, credential_type: Optional[str]):
         self.refresh_duration = 300 # 300 Seconds
         self.credential_type = credential_type
 
-    def should_fetch_model_serving_environment_oauth(self) -> bool:
+    def should_fetch_model_serving_environment_oauth() -> bool:
         """
         Check whether this is the model serving environment
         Additionally check if the oauth token file path exists
@@ -740,15 +742,15 @@ def should_fetch_model_serving_environment_oauth(self) -> bool:
         is_in_model_serving_env = (os.environ.get("IS_IN_DB_MODEL_SERVING_ENV")
                                    or os.environ.get("IS_IN_DATABRICKS_MODEL_SERVING_ENV") or "false")
         return (is_in_model_serving_env == "true"
-                and os.path.isfile(self._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH))
+                and os.path.isfile(ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH))
 
-    def get_model_dependency_oauth_token(self, should_retry=True) -> str:
+    def _get_model_dependency_oauth_token(self, should_retry=True) -> str:
         # Use Cached value if it is valid
         if self.current_token is not None and self.expiry_time > time.time():
             return self.current_token
 
         try:
-            with open(self._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH) as f:
+            with open(ModelServingAuthProvider._MODEL_DEPENDENCY_OAUTH_TOKEN_FILE_PATH) as f:
                 oauth_dict = json.load(f)
                 self.current_token = oauth_dict["OAUTH_TOKEN"][0]["oauthTokenValue"]
                 self.expiry_time = time.time() + self.refresh_duration
@@ -758,21 +760,32 @@ def get_model_dependency_oauth_token(self, should_retry=True) -> str:
                 logger.warning("Unable to read oauth token on first attmept in Model Serving Environment",
                                exc_info=e)
                 time.sleep(0.5)
-                return self.get_model_dependency_oauth_token(should_retry=False)
+                return self._get_model_dependency_oauth_token(should_retry=False)
             else:
                 raise RuntimeError(
                     "Unable to read OAuth credentials from the file mounted in Databricks Model Serving"
                 ) from e
         return self.current_token
 
+    def _get_invokers_token(self):
+        current_thread = threading.current_thread()
+        thread_data = current_thread.__dict__
+        invokers_token = None
+        if "invokers_token" in thread_data:
+            invokers_token = thread_data["invokers_token"]
+
+        if invokers_token is None:
+            raise RuntimeError("Unable to read Invokers Token in Databricks Model Serving")
+
+        return invokers_token
+
     def get_databricks_host_token(self) -> Optional[Tuple[str, str]]:
-        if not self.should_fetch_model_serving_environment_oauth():
+        if not ModelServingAuthProvider.should_fetch_model_serving_environment_oauth():
             return None
 
         # read from DB_MODEL_SERVING_HOST_ENV_VAR if available otherwise MODEL_SERVING_HOST_ENV_VAR
         host = os.environ.get("DATABRICKS_MODEL_SERVING_HOST_URL") or os.environ.get(
             "DB_MODEL_SERVING_HOST_URL")
-        token = self.get_model_dependency_oauth_token()
 
         if self.credential_type == ModelServingAuthProvider.USER_CREDENTIALS:
             return (host, self._get_invokers_token())
@@ -783,10 +796,7 @@ def get_databricks_host_token(self) -> Optional[Tuple[str, str]]:
 def model_serving_auth_visitor(cfg: 'Config',
                                credential_type: Optional[str] = None) -> Optional[CredentialsProvider]:
     try:
-        model_serving_auth_provider = ModelServingAuthProvider()
-        if not model_serving_auth_provider.should_fetch_model_serving_environment_oauth():
-            logger.debug("model-serving: Not in Databricks Model Serving, skipping")
-            return None
+        model_serving_auth_provider = ModelServingAuthProvider(credential_type)
         host, token = model_serving_auth_provider.get_databricks_host_token()
         if token is None:
             raise ValueError(
@@ -797,7 +807,6 @@ def model_serving_auth_visitor(cfg: 'Config',
     except Exception as e:
         logger.warning("Unable to get auth from Databricks Model Serving Environment", exc_info=e)
         return None
-
     logger.info("Using Databricks Model Serving Authentication")
 
     def inner() -> Dict[str, str]:

From 491043a240e9ac06679dc8c54d8d87c3ad7202df Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Fri, 14 Feb 2025 17:08:14 -0700
Subject: [PATCH 124/136] missing termination reason code

---
 databricks/sdk/service/compute.py | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/databricks/sdk/service/compute.py b/databricks/sdk/service/compute.py
index 63a971b73..d4596e63e 100755
--- a/databricks/sdk/service/compute.py
+++ b/databricks/sdk/service/compute.py
@@ -7616,6 +7616,8 @@ class TerminationReasonCode(Enum):
     INSTANCE_UNREACHABLE = 'INSTANCE_UNREACHABLE'
     INTERNAL_ERROR = 'INTERNAL_ERROR'
     INVALID_ARGUMENT = 'INVALID_ARGUMENT'
+    # [PROD-2800] Add missing termination reason code
+    INVALID_INSTANCE_PLACEMENT_PROTOCOL = 'INVALID_INSTANCE_PLACEMENT_PROTOCOL'
     INVALID_SPARK_IMAGE = 'INVALID_SPARK_IMAGE'
     IP_EXHAUSTION_FAILURE = 'IP_EXHAUSTION_FAILURE'
     JOB_FINISHED = 'JOB_FINISHED'

From ecf01ffb2c20ca36024b4a3096c28e1039f10565 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Fri, 14 Feb 2025 17:18:53 -0700
Subject: [PATCH 125/136] delete tests

---
 .github/workflows/integration-tests.yml | 90 -------------------------
 .github/workflows/message.yml           | 32 ---------
 2 files changed, 122 deletions(-)
 delete mode 100644 .github/workflows/integration-tests.yml
 delete mode 100644 .github/workflows/message.yml

diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml
deleted file mode 100644
index c308cc03c..000000000
--- a/.github/workflows/integration-tests.yml
+++ /dev/null
@@ -1,90 +0,0 @@
-name: Integration Tests
-
-on:
-
-  pull_request:
-    types: [opened, synchronize]
-
-  merge_group:
-
-
-jobs:
-  check-token:
-    name: Check secrets access
-
-    runs-on:
-      group: databricks-deco-testing-runner-group
-      labels: ubuntu-latest-deco
-
-    environment: "test-trigger-is"
-    outputs:
-      has_token: ${{ steps.set-token-status.outputs.has_token }}
-    steps:
-      - name: Check if DECO_WORKFLOW_TRIGGER_APP_ID is set
-        id: set-token-status
-        run: |
-            if [ -z "${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}" ]; then
-              echo "DECO_WORKFLOW_TRIGGER_APP_ID is empty. User has no access to secrets."
-              echo "::set-output name=has_token::false"
-            else
-              echo "DECO_WORKFLOW_TRIGGER_APP_ID is set. User has access to secrets."
-              echo "::set-output name=has_token::true"
-            fi
-
-  trigger-tests:
-    name: Trigger Tests
-
-    runs-on:
-      group: databricks-deco-testing-runner-group
-      labels: ubuntu-latest-deco
-
-    needs: check-token
-    if: github.event_name == 'pull_request'  && needs.check-token.outputs.has_token == 'true'
-    environment: "test-trigger-is"
-
-    steps:
-    - uses: actions/checkout@v3
-
-    - name: Generate GitHub App Token
-      id: generate-token
-      uses: actions/create-github-app-token@v1
-      with:
-        app-id: ${{ secrets.DECO_WORKFLOW_TRIGGER_APP_ID }}
-        private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
-        owner: ${{ secrets.ORG_NAME }}
-        repositories: ${{secrets.REPO_NAME}}
-
-    - name: Trigger Workflow in Another Repo
-      env:
-        GH_TOKEN: ${{ steps.generate-token.outputs.token }}
-      run: |
-        gh workflow run sdk-py-isolated-pr.yml -R ${{ secrets.ORG_NAME }}/${{secrets.REPO_NAME}} \
-        --ref main \
-        -f pull_request_number=${{ github.event.pull_request.number }} \
-        -f commit_sha=${{ github.event.pull_request.head.sha }}
-
-  # Statuses and checks apply to specific commits (by hash).
-  # Enforcement of required checks is done both at the PR level and the merge queue level.
-  # In case of multiple commits in a single PR, the hash of the squashed commit
-  # will not match the one for the latest (approved) commit in the PR.
-  # We auto approve the check for the merge queue for two reasons:
-  # * Queue times out due to duration of tests.
-  # * Avoid running integration tests twice, since it was already run at the tip of the branch before squashing.
-  auto-approve:
-    if: github.event_name == 'merge_group'
-
-    runs-on:
-      group: databricks-deco-testing-runner-group
-      labels: ubuntu-latest-deco
-
-    steps:
-      - name: Mark Check
-        env:
-          GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-        shell: bash
-        run: |
-            gh api -X POST -H "Accept: application/vnd.github+json" \
-              -H "X-GitHub-Api-Version: 2022-11-28" \
-              /repos/${{ github.repository }}/statuses/${{ github.sha }} \
-              -f 'state=success' \
-              -f 'context=Integration Tests Check'
diff --git a/.github/workflows/message.yml b/.github/workflows/message.yml
deleted file mode 100644
index 057556895..000000000
--- a/.github/workflows/message.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-name: Validate Commit Message
-
-on:
-  pull_request:
-    types: [opened, synchronize, edited]
-  merge_group:
-    types: [checks_requested]
-
-jobs:
-  validate:
-    runs-on: ubuntu-latest
-    # GitHub required checks are shared between PRs and the Merge Queue.
-    # Since there is no PR title on Merge Queue, we need to trigger and
-    # skip this test for Merge Queue to succeed.
-    if: github.event_name == 'pull_request'
-    steps:
-      - name: Checkout
-        uses: actions/checkout@v3
-        with:
-          fetch-depth: 0
-
-      - name: Validate Tag
-        env:
-          TITLE: ${{ github.event.pull_request.title }}
-        run: |
-          TAG=$(echo "$TITLE" | sed -ne 's/\[\(.*\)\].*/\1/p')
-          if grep -q "tag: \"\[$TAG\]\"" .codegen/changelog_config.yml; then 
-            echo "Valid tag found: [$TAG]"
-          else 
-            echo "Invalid or missing tag in commit message: [$TAG]" 
-            exit 1
-          fi
\ No newline at end of file

From c9ca6b31c7c831b747500544868e002ac62f3fc8 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Fri, 14 Feb 2025 18:13:18 -0700
Subject: [PATCH 126/136] fix merge issues

---
 databricks/sdk/_base_client.py               |   1 +
 databricks/sdk/credentials_provider.py       |   1 +
 databricks/sdk/service/dashboards.py         | 187 -------------------
 databricks/sdk/service/iam.py                |   4 +-
 databricks/sdk/service/jobs.py               |   5 -
 databricks/sdk/service/serving.py            |  21 ---
 databricks/sdk/service/sql.py                | 138 +++++++++-----
 docs/account/iam/workspace_assignment.rst    |   2 +-
 docs/workspace/dashboards/index.rst          |   2 +-
 docs/workspace/serving/serving_endpoints.rst |   3 -
 tests/test_open_ai_mixin.py                  |   2 +-
 11 files changed, 94 insertions(+), 272 deletions(-)

diff --git a/databricks/sdk/_base_client.py b/databricks/sdk/_base_client.py
index 58fcb10a5..f0950f656 100644
--- a/databricks/sdk/_base_client.py
+++ b/databricks/sdk/_base_client.py
@@ -276,6 +276,7 @@ def _perform(self,
         error = self._error_parser.get_api_error(response)
         if error is not None:
             raise error from None
+
         return response
 
     def _record_request_log(self, response: requests.Response, raw: bool = False) -> None:
diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py
index 07fb48c5a..24d01f678 100644
--- a/databricks/sdk/credentials_provider.py
+++ b/databricks/sdk/credentials_provider.py
@@ -189,6 +189,7 @@ def token() -> Token:
 def external_browser(cfg: 'Config') -> Optional[CredentialsProvider]:
     if cfg.auth_type != 'external-browser':
         return None
+
     client_id, client_secret = None, None
     if cfg.client_id:
         client_id = cfg.client_id
diff --git a/databricks/sdk/service/dashboards.py b/databricks/sdk/service/dashboards.py
index c81159cca..ba01ba41d 100755
--- a/databricks/sdk/service/dashboards.py
+++ b/databricks/sdk/service/dashboards.py
@@ -1804,193 +1804,6 @@ def start_conversation_and_wait(self, space_id: str, content: str,
         return self.start_conversation(content=content, space_id=space_id).result(timeout=timeout)
 
 
-class GenieAPI:
-    """Genie provides a no-code experience for business users, powered by AI/BI. Analysts set up spaces that
-    business users can use to ask questions using natural language. Genie uses data registered to Unity
-    Catalog and requires at least CAN USE permission on a Pro or Serverless SQL warehouse. Also, Databricks
-    Assistant must be enabled."""
-
-    def __init__(self, api_client):
-        self._api = api_client
-
-    def wait_get_message_genie_completed(
-            self,
-            conversation_id: str,
-            message_id: str,
-            space_id: str,
-            timeout=timedelta(minutes=20),
-            callback: Optional[Callable[[GenieMessage], None]] = None) -> GenieMessage:
-        deadline = time.time() + timeout.total_seconds()
-        target_states = (MessageStatus.COMPLETED, )
-        failure_states = (MessageStatus.FAILED, )
-        status_message = 'polling...'
-        attempt = 1
-        while time.time() < deadline:
-            poll = self.get_message(conversation_id=conversation_id, message_id=message_id, space_id=space_id)
-            status = poll.status
-            status_message = f'current status: {status}'
-            if status in target_states:
-                return poll
-            if callback:
-                callback(poll)
-            if status in failure_states:
-                msg = f'failed to reach COMPLETED, got {status}: {status_message}'
-                raise OperationFailed(msg)
-            prefix = f"conversation_id={conversation_id}, message_id={message_id}, space_id={space_id}"
-            sleep = attempt
-            if sleep > 10:
-                # sleep 10s max per attempt
-                sleep = 10
-            _LOG.debug(f'{prefix}: ({status}) {status_message} (sleeping ~{sleep}s)')
-            time.sleep(sleep + random.random())
-            attempt += 1
-        raise TimeoutError(f'timed out after {timeout}: {status_message}')
-
-    def create_message(self, space_id: str, conversation_id: str, content: str) -> Wait[GenieMessage]:
-        """Create conversation message.
-        
-        Create new message in [conversation](:method:genie/startconversation). The AI response uses all
-        previously created messages in the conversation to respond.
-        
-        :param space_id: str
-          The ID associated with the Genie space where the conversation is started.
-        :param conversation_id: str
-          The ID associated with the conversation.
-        :param content: str
-          User message content.
-        
-        :returns:
-          Long-running operation waiter for :class:`GenieMessage`.
-          See :method:wait_get_message_genie_completed for more details.
-        """
-        body = {}
-        if content is not None: body['content'] = content
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do(
-            'POST',
-            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages',
-            body=body,
-            headers=headers)
-        return Wait(self.wait_get_message_genie_completed,
-                    response=GenieMessage.from_dict(op_response),
-                    conversation_id=conversation_id,
-                    message_id=op_response['id'],
-                    space_id=space_id)
-
-    def create_message_and_wait(self,
-                                space_id: str,
-                                conversation_id: str,
-                                content: str,
-                                timeout=timedelta(minutes=20)) -> GenieMessage:
-        return self.create_message(content=content, conversation_id=conversation_id,
-                                   space_id=space_id).result(timeout=timeout)
-
-    def execute_message_query(self, space_id: str, conversation_id: str,
-                              message_id: str) -> GenieGetMessageQueryResultResponse:
-        """Execute SQL query in a conversation message.
-        
-        Execute the SQL query in the message.
-        
-        :param space_id: str
-          Genie space ID
-        :param conversation_id: str
-          Conversation ID
-        :param message_id: str
-          Message ID
-        
-        :returns: :class:`GenieGetMessageQueryResultResponse`
-        """
-
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do(
-            'POST',
-            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/execute-query',
-            headers=headers)
-        return GenieGetMessageQueryResultResponse.from_dict(res)
-
-    def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage:
-        """Get conversation message.
-        
-        Get message from conversation.
-        
-        :param space_id: str
-          The ID associated with the Genie space where the target conversation is located.
-        :param conversation_id: str
-          The ID associated with the target conversation.
-        :param message_id: str
-          The ID associated with the target message from the identified conversation.
-        
-        :returns: :class:`GenieMessage`
-        """
-
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do(
-            'GET',
-            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}',
-            headers=headers)
-        return GenieMessage.from_dict(res)
-
-    def get_message_query_result(self, space_id: str, conversation_id: str,
-                                 message_id: str) -> GenieGetMessageQueryResultResponse:
-        """Get conversation message SQL query result.
-        
-        Get the result of SQL query if the message has a query attachment. This is only available if a message
-        has a query attachment and the message status is `EXECUTING_QUERY`.
-        
-        :param space_id: str
-          Genie space ID
-        :param conversation_id: str
-          Conversation ID
-        :param message_id: str
-          Message ID
-        
-        :returns: :class:`GenieGetMessageQueryResultResponse`
-        """
-
-        headers = {'Accept': 'application/json', }
-
-        res = self._api.do(
-            'GET',
-            f'/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/query-result',
-            headers=headers)
-        return GenieGetMessageQueryResultResponse.from_dict(res)
-
-    def start_conversation(self, space_id: str, content: str) -> Wait[GenieMessage]:
-        """Start conversation.
-        
-        Start a new conversation.
-        
-        :param space_id: str
-          The ID associated with the Genie space where you want to start a conversation.
-        :param content: str
-          The text of the message that starts the conversation.
-        
-        :returns:
-          Long-running operation waiter for :class:`GenieMessage`.
-          See :method:wait_get_message_genie_completed for more details.
-        """
-        body = {}
-        if content is not None: body['content'] = content
-        headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
-
-        op_response = self._api.do('POST',
-                                   f'/api/2.0/genie/spaces/{space_id}/start-conversation',
-                                   body=body,
-                                   headers=headers)
-        return Wait(self.wait_get_message_genie_completed,
-                    response=GenieStartConversationResponse.from_dict(op_response),
-                    conversation_id=op_response['conversation_id'],
-                    message_id=op_response['message_id'],
-                    space_id=space_id)
-
-    def start_conversation_and_wait(self, space_id: str, content: str,
-                                    timeout=timedelta(minutes=20)) -> GenieMessage:
-        return self.start_conversation(content=content, space_id=space_id).result(timeout=timeout)
-
-
 class LakeviewAPI:
     """These APIs provide specific management operations for Lakeview dashboards. Generic resource management can
     be done with Workspace API (import, export, get-status, list, delete)."""
diff --git a/databricks/sdk/service/iam.py b/databricks/sdk/service/iam.py
index b841bec8b..2f752d06c 100755
--- a/databricks/sdk/service/iam.py
+++ b/databricks/sdk/service/iam.py
@@ -1588,7 +1588,7 @@ class UpdateWorkspaceAssignments:
     """The ID of the user, service principal, or group."""
 
     workspace_id: Optional[int] = None
-    """The workspace ID for the account."""
+    """The workspace ID."""
 
     def as_dict(self) -> dict:
         """Serializes the UpdateWorkspaceAssignments into a dictionary suitable for use as a JSON request body."""
@@ -3894,7 +3894,7 @@ def update(self,
         specified principal.
         
         :param workspace_id: int
-          The workspace ID for the account.
+          The workspace ID.
         :param principal_id: int
           The ID of the user, service principal, or group.
         :param permissions: List[:class:`WorkspacePermission`] (optional)
diff --git a/databricks/sdk/service/jobs.py b/databricks/sdk/service/jobs.py
index 8220a0715..c0d4240bf 100755
--- a/databricks/sdk/service/jobs.py
+++ b/databricks/sdk/service/jobs.py
@@ -3861,9 +3861,6 @@ class Run:
     overriding_parameters: Optional[RunParameters] = None
     """The parameters used for this run."""
 
-    prev_page_token: Optional[str] = None
-    """A token that can be used to list the previous page of sub-resources."""
-
     queue_duration: Optional[int] = None
     """The time in milliseconds that the run has spent in the queue."""
 
@@ -3956,7 +3953,6 @@ def as_dict(self) -> dict:
         if self.original_attempt_run_id is not None:
             body['original_attempt_run_id'] = self.original_attempt_run_id
         if self.overriding_parameters: body['overriding_parameters'] = self.overriding_parameters.as_dict()
-        if self.prev_page_token is not None: body['prev_page_token'] = self.prev_page_token
         if self.queue_duration is not None: body['queue_duration'] = self.queue_duration
         if self.repair_history: body['repair_history'] = [v.as_dict() for v in self.repair_history]
         if self.run_duration is not None: body['run_duration'] = self.run_duration
@@ -4039,7 +4035,6 @@ def from_dict(cls, d: Dict[str, any]) -> Run:
                    number_in_job=d.get('number_in_job', None),
                    original_attempt_run_id=d.get('original_attempt_run_id', None),
                    overriding_parameters=_from_dict(d, 'overriding_parameters', RunParameters),
-                   prev_page_token=d.get('prev_page_token', None),
                    queue_duration=d.get('queue_duration', None),
                    repair_history=_repeated_dict(d, 'repair_history', RepairHistoryItem),
                    run_duration=d.get('run_duration', None),
diff --git a/databricks/sdk/service/serving.py b/databricks/sdk/service/serving.py
index c10e43572..938445863 100755
--- a/databricks/sdk/service/serving.py
+++ b/databricks/sdk/service/serving.py
@@ -657,10 +657,6 @@ class CreateServingEndpoint:
     config: Optional[EndpointCoreConfigInput] = None
     """The core config of the serving endpoint."""
 
-    ai_gateway: Optional[AiGatewayConfig] = None
-    """The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
-    supported as of now."""
-
     rate_limits: Optional[List[RateLimit]] = None
     """Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
     Gateway to manage rate limits."""
@@ -2554,12 +2550,6 @@ class ServedModelInput:
     min_provisioned_throughput: Optional[int] = None
     """The minimum tokens per second that the endpoint can scale down to."""
 
-    max_provisioned_throughput: Optional[int] = None
-    """The maximum tokens per second that the endpoint can scale up to."""
-
-    min_provisioned_throughput: Optional[int] = None
-    """The minimum tokens per second that the endpoint can scale down to."""
-
     name: Optional[str] = None
     """The name of a served entity. It must be unique across an endpoint. A served entity name can
     consist of alphanumeric characters, dashes, and underscores. If not specified for an external
@@ -2574,14 +2564,6 @@ class ServedModelInput:
     scale-to-zero is enabled, the lower bound of the provisioned concurrency for each workload size
     is 0."""
 
-    workload_size: Optional[ServedModelInputWorkloadSize] = None
-    """The workload size of the served model. The workload size corresponds to a range of provisioned
-    concurrency that the compute will autoscale between. A single unit of provisioned concurrency
-    can process one request at a time. Valid workload sizes are "Small" (4 - 4 provisioned
-    concurrency), "Medium" (8 - 16 provisioned concurrency), and "Large" (16 - 64 provisioned
-    concurrency). If scale-to-zero is enabled, the lower bound of the provisioned concurrency for
-    each workload size will be 0."""
-
     workload_type: Optional[ServedModelInputWorkloadType] = None
     """The workload type of the served entity. The workload type selects which type of compute to use
     in the endpoint. The default value for this parameter is "CPU". For deep learning workloads, GPU
@@ -3431,9 +3413,6 @@ def create(self,
           throughput endpoints are currently supported.
         :param config: :class:`EndpointCoreConfigInput` (optional)
           The core config of the serving endpoint.
-        :param ai_gateway: :class:`AiGatewayConfig` (optional)
-          The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
-          supported as of now.
         :param rate_limits: List[:class:`RateLimit`] (optional)
           Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
           Gateway to manage rate limits.
diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py
index 059b744ef..bc3c03d31 100755
--- a/databricks/sdk/service/sql.py
+++ b/databricks/sdk/service/sql.py
@@ -3078,49 +3078,74 @@ class LegacyQuery:
     can_edit: Optional[bool] = None
     """Describes whether the authenticated user is allowed to edit the definition of this query."""
 
-    catalog: Optional[str] = None
-    """Name of the catalog where this query will be executed."""
+    created_at: Optional[str] = None
+    """The timestamp when this query was created."""
 
-    create_time: Optional[str] = None
-    """Timestamp when this query was created."""
+    data_source_id: Optional[str] = None
+    """Data source ID maps to the ID of the data source used by the resource and is distinct from the
+    warehouse ID. [Learn more]
+    
+    [Learn more]: https://docs.databricks.com/api/workspace/datasources/list"""
 
     description: Optional[str] = None
     """General description that conveys additional information about this query such as usage notes."""
 
-    display_name: Optional[str] = None
-    """Display name of the query that appears in list views, widget headings, and on the query page."""
-
     id: Optional[str] = None
-    """UUID identifying the query."""
+    """Query ID."""
 
-    last_modifier_user_name: Optional[str] = None
-    """Username of the user who last saved changes to this query."""
+    is_archived: Optional[bool] = None
+    """Indicates whether the query is trashed. Trashed queries can't be used in dashboards, or appear
+    in search results. If this boolean is `true`, the `options` property for this query includes a
+    `moved_to_trash_at` timestamp. Trashed queries are permanently deleted after 30 days."""
 
-    lifecycle_state: Optional[LifecycleState] = None
-    """Indicates whether the query is trashed."""
+    is_draft: Optional[bool] = None
+    """Whether the query is a draft. Draft queries only appear in list views for their owners.
+    Visualizations from draft queries cannot appear on dashboards."""
 
-    owner_user_name: Optional[str] = None
-    """Username of the user that owns the query."""
+    is_favorite: Optional[bool] = None
+    """Whether this query object appears in the current user's favorites list. This flag determines
+    whether the star icon for favorites is selected."""
 
-    parameters: Optional[List[QueryParameter]] = None
-    """List of query parameter definitions."""
+    is_safe: Optional[bool] = None
+    """Text parameter types are not safe from SQL injection for all types of data source. Set this
+    Boolean parameter to `true` if a query either does not use any text type parameters or uses a
+    data source type where text type parameters are handled safely."""
 
-    parent_path: Optional[str] = None
-    """Workspace path of the workspace folder containing the object."""
+    last_modified_by: Optional[User] = None
 
-    query_text: Optional[str] = None
-    """Text of the query to be run."""
+    last_modified_by_id: Optional[int] = None
+    """The ID of the user who last saved changes to this query."""
 
-    run_as_mode: Optional[RunAsMode] = None
-    """Sets the "Run as" role for the object."""
+    latest_query_data_id: Optional[str] = None
+    """If there is a cached result for this query and user, this field includes the query result ID. If
+    this query uses parameters, this field is always null."""
 
-    schema: Optional[str] = None
-    """Name of the schema where this query will be executed."""
+    name: Optional[str] = None
+    """The title of this query that appears in list views, widget headings, and on the query page."""
+
+    options: Optional[QueryOptions] = None
+
+    parent: Optional[str] = None
+    """The identifier of the workspace folder containing the object."""
+
+    permission_tier: Optional[PermissionLevel] = None
+    """* `CAN_VIEW`: Can view the query * `CAN_RUN`: Can run the query * `CAN_EDIT`: Can edit the query
+    * `CAN_MANAGE`: Can manage the query"""
+
+    query: Optional[str] = None
+    """The text of the query to be run."""
+
+    query_hash: Optional[str] = None
+    """A SHA-256 hash of the query text along with the authenticated user ID."""
+
+    run_as_role: Optional[RunAsRole] = None
+    """Sets the **Run as** role for the object. Must be set to one of `"viewer"` (signifying "run as
+    viewer" behavior) or `"owner"` (signifying "run as owner" behavior)"""
 
     tags: Optional[List[str]] = None
 
-    update_time: Optional[str] = None
-    """Timestamp when this query was last updated."""
+    updated_at: Optional[str] = None
+    """The timestamp at which this query was last updated."""
 
     user: Optional[User] = None
 
@@ -3132,24 +3157,30 @@ class LegacyQuery:
     def as_dict(self) -> dict:
         """Serializes the LegacyQuery into a dictionary suitable for use as a JSON request body."""
         body = {}
-        if self.apply_auto_limit is not None: body['apply_auto_limit'] = self.apply_auto_limit
-        if self.catalog is not None: body['catalog'] = self.catalog
-        if self.create_time is not None: body['create_time'] = self.create_time
+        if self.can_edit is not None: body['can_edit'] = self.can_edit
+        if self.created_at is not None: body['created_at'] = self.created_at
+        if self.data_source_id is not None: body['data_source_id'] = self.data_source_id
         if self.description is not None: body['description'] = self.description
-        if self.display_name is not None: body['display_name'] = self.display_name
         if self.id is not None: body['id'] = self.id
-        if self.last_modifier_user_name is not None:
-            body['last_modifier_user_name'] = self.last_modifier_user_name
-        if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
-        if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
-        if self.parameters: body['parameters'] = [v.as_dict() for v in self.parameters]
-        if self.parent_path is not None: body['parent_path'] = self.parent_path
-        if self.query_text is not None: body['query_text'] = self.query_text
-        if self.run_as_mode is not None: body['run_as_mode'] = self.run_as_mode.value
-        if self.schema is not None: body['schema'] = self.schema
+        if self.is_archived is not None: body['is_archived'] = self.is_archived
+        if self.is_draft is not None: body['is_draft'] = self.is_draft
+        if self.is_favorite is not None: body['is_favorite'] = self.is_favorite
+        if self.is_safe is not None: body['is_safe'] = self.is_safe
+        if self.last_modified_by: body['last_modified_by'] = self.last_modified_by.as_dict()
+        if self.last_modified_by_id is not None: body['last_modified_by_id'] = self.last_modified_by_id
+        if self.latest_query_data_id is not None: body['latest_query_data_id'] = self.latest_query_data_id
+        if self.name is not None: body['name'] = self.name
+        if self.options: body['options'] = self.options.as_dict()
+        if self.parent is not None: body['parent'] = self.parent
+        if self.permission_tier is not None: body['permission_tier'] = self.permission_tier.value
+        if self.query is not None: body['query'] = self.query
+        if self.query_hash is not None: body['query_hash'] = self.query_hash
+        if self.run_as_role is not None: body['run_as_role'] = self.run_as_role.value
         if self.tags: body['tags'] = [v for v in self.tags]
-        if self.update_time is not None: body['update_time'] = self.update_time
-        if self.warehouse_id is not None: body['warehouse_id'] = self.warehouse_id
+        if self.updated_at is not None: body['updated_at'] = self.updated_at
+        if self.user: body['user'] = self.user.as_dict()
+        if self.user_id is not None: body['user_id'] = self.user_id
+        if self.visualizations: body['visualizations'] = [v.as_dict() for v in self.visualizations]
         return body
 
     def as_shallow_dict(self) -> dict:
@@ -3188,16 +3219,21 @@ def from_dict(cls, d: Dict[str, any]) -> LegacyQuery:
                    created_at=d.get('created_at', None),
                    data_source_id=d.get('data_source_id', None),
                    description=d.get('description', None),
-                   display_name=d.get('display_name', None),
                    id=d.get('id', None),
-                   last_modifier_user_name=d.get('last_modifier_user_name', None),
-                   lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
-                   owner_user_name=d.get('owner_user_name', None),
-                   parameters=_repeated_dict(d, 'parameters', QueryParameter),
-                   parent_path=d.get('parent_path', None),
-                   query_text=d.get('query_text', None),
-                   run_as_mode=_enum(d, 'run_as_mode', RunAsMode),
-                   schema=d.get('schema', None),
+                   is_archived=d.get('is_archived', None),
+                   is_draft=d.get('is_draft', None),
+                   is_favorite=d.get('is_favorite', None),
+                   is_safe=d.get('is_safe', None),
+                   last_modified_by=_from_dict(d, 'last_modified_by', User),
+                   last_modified_by_id=d.get('last_modified_by_id', None),
+                   latest_query_data_id=d.get('latest_query_data_id', None),
+                   name=d.get('name', None),
+                   options=_from_dict(d, 'options', QueryOptions),
+                   parent=d.get('parent', None),
+                   permission_tier=_enum(d, 'permission_tier', PermissionLevel),
+                   query=d.get('query', None),
+                   query_hash=d.get('query_hash', None),
+                   run_as_role=_enum(d, 'run_as_role', RunAsRole),
                    tags=d.get('tags', None),
                    updated_at=d.get('updated_at', None),
                    user=_from_dict(d, 'user', User),
@@ -8666,4 +8702,4 @@ def update_permissions(self,
                            f'/api/2.0/permissions/warehouses/{warehouse_id}',
                            body=body,
                            headers=headers)
-        return WarehousePermissions.from_dict(res)
+        return WarehousePermissions.from_dict(res)
\ No newline at end of file
diff --git a/docs/account/iam/workspace_assignment.rst b/docs/account/iam/workspace_assignment.rst
index 6230b8199..697f0a5da 100644
--- a/docs/account/iam/workspace_assignment.rst
+++ b/docs/account/iam/workspace_assignment.rst
@@ -92,7 +92,7 @@
         specified principal.
         
         :param workspace_id: int
-          The workspace ID for the account.
+          The workspace ID.
         :param principal_id: int
           The ID of the user, service principal, or group.
         :param permissions: List[:class:`WorkspacePermission`] (optional)
diff --git a/docs/workspace/dashboards/index.rst b/docs/workspace/dashboards/index.rst
index acea442bb..940efa5dd 100644
--- a/docs/workspace/dashboards/index.rst
+++ b/docs/workspace/dashboards/index.rst
@@ -10,4 +10,4 @@ Manage Lakeview dashboards
    genie
    lakeview
    lakeview_embedded
-   query_execution
+   query_execution
\ No newline at end of file
diff --git a/docs/workspace/serving/serving_endpoints.rst b/docs/workspace/serving/serving_endpoints.rst
index f6bfe82f4..687976f5d 100644
--- a/docs/workspace/serving/serving_endpoints.rst
+++ b/docs/workspace/serving/serving_endpoints.rst
@@ -41,9 +41,6 @@
           throughput endpoints are currently supported.
         :param config: :class:`EndpointCoreConfigInput` (optional)
           The core config of the serving endpoint.
-        :param ai_gateway: :class:`AiGatewayConfig` (optional)
-          The AI Gateway configuration for the serving endpoint. NOTE: only external model endpoints are
-          supported as of now.
         :param rate_limits: List[:class:`RateLimit`] (optional)
           Rate limits to be applied to the serving endpoint. NOTE: this field is deprecated, please use AI
           Gateway to manage rate limits.
diff --git a/tests/test_open_ai_mixin.py b/tests/test_open_ai_mixin.py
index 72e1e9a60..e503da073 100644
--- a/tests/test_open_ai_mixin.py
+++ b/tests/test_open_ai_mixin.py
@@ -48,4 +48,4 @@ def test_http_request(w, requests_mock):
     assert requests_mock.call_count == 1
     assert requests_mock.called
     assert response.status_code == 200 # Verify the response status
-    assert (response.text == "The request was successful") # Ensure the response body matches the mocked data
+    assert (response.text == "The request was successful") # Ensure the response body matches the mocked data
\ No newline at end of file

From b48dac9e2939d46160e5be448ed4d40af17a0952 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Fri, 14 Feb 2025 18:17:25 -0700
Subject: [PATCH 127/136] more diff

---
 .codegen/_openapi_sha         | 2 +-
 databricks/sdk/service/sql.py | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/.codegen/_openapi_sha b/.codegen/_openapi_sha
index 2a9a021e0..562b72fcc 100644
--- a/.codegen/_openapi_sha
+++ b/.codegen/_openapi_sha
@@ -1 +1 @@
-99f644e72261ef5ecf8d74db20f4b7a1e09723cc
+99f644e72261ef5ecf8d74db20f4b7a1e09723cc
\ No newline at end of file
diff --git a/databricks/sdk/service/sql.py b/databricks/sdk/service/sql.py
index bc3c03d31..cfa94aaa7 100755
--- a/databricks/sdk/service/sql.py
+++ b/databricks/sdk/service/sql.py
@@ -8702,4 +8702,4 @@ def update_permissions(self,
                            f'/api/2.0/permissions/warehouses/{warehouse_id}',
                            body=body,
                            headers=headers)
-        return WarehousePermissions.from_dict(res)
\ No newline at end of file
+        return WarehousePermissions.from_dict(res)

From 592d5bc645cc76f9e02bf078453dde970a2e945b Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Sat, 15 Feb 2025 13:01:31 -0700
Subject: [PATCH 128/136] update push workflow

---
 .github/workflows/push.yml | 12 +-----------
 1 file changed, 1 insertion(+), 11 deletions(-)

diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
index a839096c0..4f8881465 100644
--- a/.github/workflows/push.yml
+++ b/.github/workflows/push.yml
@@ -10,7 +10,7 @@ on:
       - main
 
 jobs:
-  tests-ubuntu:
+  tests:
     uses: ./.github/workflows/test.yml
     strategy:
       fail-fast: false
@@ -19,16 +19,6 @@ jobs:
     with:
       os: ubuntu-latest
       pyVersion: ${{ matrix.pyVersion }}
-
-  tests-windows:
-      uses: ./.github/workflows/test.yml
-      strategy:
-        fail-fast: false
-        matrix:
-          pyVersion: [ '3.9', '3.10', '3.11', '3.12' ]
-      with:
-        os: windows-latest
-        pyVersion: ${{ matrix.pyVersion }}
           
   fmt:
     runs-on: ubuntu-latest

From c7127b3f5239a41942befefa7fb5f98e5822a434 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Sat, 15 Feb 2025 13:03:00 -0700
Subject: [PATCH 129/136] update push workflow again

---
 .github/workflows/push.yml | 17 ++++++++++++++---
 1 file changed, 14 insertions(+), 3 deletions(-)

diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
index 4f8881465..80dc449a1 100644
--- a/.github/workflows/push.yml
+++ b/.github/workflows/push.yml
@@ -16,9 +16,20 @@ jobs:
       fail-fast: false
       matrix:
         pyVersion: [ '3.8', '3.9', '3.10', '3.11', '3.12' ]
-    with:
-      os: ubuntu-latest
-      pyVersion: ${{ matrix.pyVersion }}
+    runs-on: ubuntu-latest
+    steps:
+      - name: Checkout
+        uses: actions/checkout@v4
+
+      - name: Unshallow
+        run: git fetch --prune --unshallow
+
+      - uses: actions/setup-python@v5
+        with:
+          python-version: ${{ matrix.pyVersion }}
+
+      - name: Run tests
+        run: make dev install test
           
   fmt:
     runs-on: ubuntu-latest

From 7ffc5a0e41aaeabe5fc532ed15ab93a931f943b9 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Sat, 15 Feb 2025 13:04:55 -0700
Subject: [PATCH 130/136] test change

---
 .github/workflows/push.yml | 15 +--------------
 1 file changed, 1 insertion(+), 14 deletions(-)

diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
index 80dc449a1..f524bdcd3 100644
--- a/.github/workflows/push.yml
+++ b/.github/workflows/push.yml
@@ -15,7 +15,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        pyVersion: [ '3.8', '3.9', '3.10', '3.11', '3.12' ]
+        pyVersion: [ '3.7', '3.8', '3.9', '3.10', '3.11', '3.12' ]
     runs-on: ubuntu-latest
     steps:
       - name: Checkout
@@ -30,16 +30,3 @@ jobs:
 
       - name: Run tests
         run: make dev install test
-          
-  fmt:
-    runs-on: ubuntu-latest
-
-    steps:
-      - name: Checkout
-        uses: actions/checkout@v4
-
-      - name: Format all files
-        run: make dev fmt
-
-      - name: Fail on differences
-        run: git diff --exit-code

From 6f893f2c40cfbd18e9faf1ea337d02342577b197 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Sat, 15 Feb 2025 13:06:03 -0700
Subject: [PATCH 131/136] fix tests

---
 .github/workflows/push.yml | 1 -
 1 file changed, 1 deletion(-)

diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
index f524bdcd3..1c71fcd9e 100644
--- a/.github/workflows/push.yml
+++ b/.github/workflows/push.yml
@@ -11,7 +11,6 @@ on:
 
 jobs:
   tests:
-    uses: ./.github/workflows/test.yml
     strategy:
       fail-fast: false
       matrix:

From eadbd47089c869ec23232cafb74bd97eced30af6 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Sat, 15 Feb 2025 13:07:04 -0700
Subject: [PATCH 132/136] remove test version

---
 .github/workflows/push.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
index 1c71fcd9e..f76e696f2 100644
--- a/.github/workflows/push.yml
+++ b/.github/workflows/push.yml
@@ -14,7 +14,7 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        pyVersion: [ '3.7', '3.8', '3.9', '3.10', '3.11', '3.12' ]
+        pyVersion: [ '3.8', '3.9', '3.10', '3.11', '3.12' ]
     runs-on: ubuntu-latest
     steps:
       - name: Checkout

From e36a62f85d7ee84e3ad1efb1e8bed3c4067edbb8 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Sat, 15 Feb 2025 13:11:09 -0700
Subject: [PATCH 133/136] retry

---
 .github/workflows/push.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml
index f76e696f2..bef41718f 100644
--- a/.github/workflows/push.yml
+++ b/.github/workflows/push.yml
@@ -14,8 +14,8 @@ jobs:
     strategy:
       fail-fast: false
       matrix:
-        pyVersion: [ '3.8', '3.9', '3.10', '3.11', '3.12' ]
-    runs-on: ubuntu-latest
+        pyVersion: [ '3.7', '3.8', '3.9', '3.10', '3.11', '3.12' ]
+    runs-on: ubuntu-22.04
     steps:
       - name: Checkout
         uses: actions/checkout@v4

From 0c5ed00d6f11b98ce79642e701575ca409283529 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Sat, 15 Feb 2025 13:22:29 -0700
Subject: [PATCH 134/136] revert release action

---
 .github/workflows/release.yml | 7 +------
 1 file changed, 1 insertion(+), 6 deletions(-)

diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index ae242c1d8..892bbc5c6 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -7,16 +7,11 @@ on:
 
 jobs:
   publish:
-    runs-on:
-      group: databricks-deco-testing-runner-group
-      labels: ubuntu-latest-deco
-
+    runs-on: ubuntu-latest
     environment: release
-
     permissions:
       contents: write
       id-token: write
-
     steps:
       - uses: actions/checkout@v3
 

From 4204ed0ee6ebe6dd495e0c1d6351b9a014b8360d Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Sat, 15 Feb 2025 14:29:23 -0700
Subject: [PATCH 135/136] revert release changes

---
 .github/workflows/release.yml | 7 +------
 1 file changed, 1 insertion(+), 6 deletions(-)

diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index ae242c1d8..892bbc5c6 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -7,16 +7,11 @@ on:
 
 jobs:
   publish:
-    runs-on:
-      group: databricks-deco-testing-runner-group
-      labels: ubuntu-latest-deco
-
+    runs-on: ubuntu-latest
     environment: release
-
     permissions:
       contents: write
       id-token: write
-
     steps:
       - uses: actions/checkout@v3
 

From 877f5adbf12a72351f4fd902e09c0b9c141faaa9 Mon Sep 17 00:00:00 2001
From: CaymanWilliams 
Date: Sat, 15 Feb 2025 14:29:46 -0700
Subject: [PATCH 136/136] delete message checks

---
 .github/workflows/external-message.yml | 59 --------------------------
 1 file changed, 59 deletions(-)
 delete mode 100644 .github/workflows/external-message.yml

diff --git a/.github/workflows/external-message.yml b/.github/workflows/external-message.yml
deleted file mode 100644
index 6771057c7..000000000
--- a/.github/workflows/external-message.yml
+++ /dev/null
@@ -1,59 +0,0 @@
-name: PR Comment
-
-# WARNING:
-# THIS WORKFLOW ALWAYS RUNS FOR EXTERNAL CONTRIBUTORS WITHOUT ANY APPROVAL.
-# THIS WORKFLOW RUNS FROM MAIN BRANCH, NOT FROM THE PR BRANCH.
-# DO NOT PULL THE PR OR EXECUTE ANY CODE FROM THE PR.
-
-on:
-  pull_request_target:
-    types: [opened, reopened, synchronize]
-    branches:
-      - main
-
-jobs:
-  comment-on-pr:
-    runs-on:
-      group: databricks-deco-testing-runner-group
-      labels: ubuntu-latest-deco
-
-    permissions:
-      pull-requests: write
-
-    steps:
-      - uses: actions/checkout@v4
-
-      - name: Delete old comments
-        env:
-           GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-        run: |
-            # Delete previous comment if it exists
-            previous_comment_ids=$(gh api "repos/${{ github.repository }}/issues/${{ github.event.pull_request.number }}/comments" \
-              --jq '.[] | select(.body | startswith("")) | .id')
-            echo "Previous comment IDs: $previous_comment_ids"
-            # Iterate over each comment ID and delete the comment
-            if [ ! -z "$previous_comment_ids" ]; then
-              echo "$previous_comment_ids" | while read -r comment_id; do
-                echo "Deleting comment with ID: $comment_id"
-                gh api "repos/${{ github.repository }}/issues/comments/$comment_id" -X DELETE
-              done
-            fi
-
-      - name: Comment on PR
-        env:
-          GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-          COMMIT_SHA: ${{ github.event.pull_request.head.sha }}
-        run: |
-          gh pr comment ${{ github.event.pull_request.number }} --body \
-          "
-          If integration tests don't run automatically, an authorized user can run them manually by following the instructions below:
-
-          Trigger:
-          [go/deco-tests-run/sdk-py](https://go/deco-tests-run/sdk-py)
-
-          Inputs:
-          * PR number: ${{github.event.pull_request.number}}
-          * Commit SHA: \`${{ env.COMMIT_SHA }}\`
-
-          Checks will be approved automatically on success.
-          "